cc 149 arch/alpha/kernel/sys_jensen.c unsigned int cc; cc 151 arch/alpha/kernel/sys_jensen.c __asm __volatile("rpcc %0" : "=r"(cc)); cc 154 arch/alpha/kernel/sys_jensen.c if (cc - last_msg > ((JENSEN_CYCLES_PER_SEC) * 3) || cc 157 arch/alpha/kernel/sys_jensen.c irq, count, cc-last_cc, get_irq_regs()->pc); cc 159 arch/alpha/kernel/sys_jensen.c last_msg = cc; cc 162 arch/alpha/kernel/sys_jensen.c last_cc = cc; cc 286 arch/alpha/kernel/time.c validate_cc_value(unsigned long cc) cc 321 arch/alpha/kernel/time.c return cc; cc 325 arch/alpha/kernel/time.c return cc; cc 327 arch/alpha/kernel/time.c if (cc < cpu_hz[index].min - deviation cc 328 arch/alpha/kernel/time.c || cc > cpu_hz[index].max + deviation) cc 331 arch/alpha/kernel/time.c return cc; cc 346 arch/alpha/kernel/time.c int cc, count = 0; cc 362 arch/alpha/kernel/time.c cc = rpcc(); cc 366 arch/alpha/kernel/time.c cc = rpcc() - cc; cc 372 arch/alpha/kernel/time.c return ((long)cc * PIT_TICK_RATE) / (CALIBRATE_LATCH + 1); cc 192 arch/arm/include/asm/assembler.h .irp c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo cc 449 arch/arm/include/asm/assembler.h .irp c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo cc 71 arch/arm/probes/kprobes/actions-thumb.c int cc = (insn >> 22) & 0xf; cc 72 arch/arm/probes/kprobes/actions-thumb.c asi->insn_check_cc = probes_condition_checks[cc]; cc 425 arch/arm/probes/kprobes/actions-thumb.c int cc = (insn >> 8) & 0xf; cc 426 arch/arm/probes/kprobes/actions-thumb.c asi->insn_check_cc = probes_condition_checks[cc]; cc 1176 arch/arm/probes/kprobes/test-arm.c #define COPROCESSOR_INSTRUCTIONS_ST_LD(two,cc) \ cc 1208 arch/arm/probes/kprobes/test-arm.c TEST_UNSUPPORTED(__inst_arm(0x##cc##daf0001) " @ stc"two" 0, cr0, [r15, #4]!") \ cc 1209 arch/arm/probes/kprobes/test-arm.c TEST_UNSUPPORTED(__inst_arm(0x##cc##d2f0001) " @ stc"two" 0, cr0, [r15, #-4]!") \ cc 1210 arch/arm/probes/kprobes/test-arm.c TEST_UNSUPPORTED(__inst_arm(0x##cc##caf0001) " @ stc"two" 0, cr0, [r15], #4") \ cc 1211 arch/arm/probes/kprobes/test-arm.c TEST_UNSUPPORTED(__inst_arm(0x##cc##c2f0001) " @ stc"two" 0, cr0, [r15], #-4") \ cc 1215 arch/arm/probes/kprobes/test-arm.c TEST_UNSUPPORTED(__inst_arm(0x##cc##def0001) " @ stc"two"l 0, cr0, [r15, #4]!") \ cc 1216 arch/arm/probes/kprobes/test-arm.c TEST_UNSUPPORTED(__inst_arm(0x##cc##d6f0001) " @ stc"two"l 0, cr0, [r15, #-4]!") \ cc 1217 arch/arm/probes/kprobes/test-arm.c TEST_UNSUPPORTED(__inst_arm(0x##cc##cef0001) " @ stc"two"l 0, cr0, [r15], #4") \ cc 1218 arch/arm/probes/kprobes/test-arm.c TEST_UNSUPPORTED(__inst_arm(0x##cc##c6f0001) " @ stc"two"l 0, cr0, [r15], #-4") \ cc 1222 arch/arm/probes/kprobes/test-arm.c TEST_UNSUPPORTED(__inst_arm(0x##cc##dbf0001) " @ ldc"two" 0, cr0, [r15, #4]!") \ cc 1223 arch/arm/probes/kprobes/test-arm.c TEST_UNSUPPORTED(__inst_arm(0x##cc##d3f0001) " @ ldc"two" 0, cr0, [r15, #-4]!") \ cc 1224 arch/arm/probes/kprobes/test-arm.c TEST_UNSUPPORTED(__inst_arm(0x##cc##cbf0001) " @ ldc"two" 0, cr0, [r15], #4") \ cc 1225 arch/arm/probes/kprobes/test-arm.c TEST_UNSUPPORTED(__inst_arm(0x##cc##c3f0001) " @ ldc"two" 0, cr0, [r15], #-4") \ cc 1229 arch/arm/probes/kprobes/test-arm.c TEST_UNSUPPORTED(__inst_arm(0x##cc##dff0001) " @ ldc"two"l 0, cr0, [r15, #4]!") \ cc 1230 arch/arm/probes/kprobes/test-arm.c TEST_UNSUPPORTED(__inst_arm(0x##cc##d7f0001) " @ ldc"two"l 0, cr0, [r15, #-4]!") \ cc 1231 arch/arm/probes/kprobes/test-arm.c TEST_UNSUPPORTED(__inst_arm(0x##cc##cff0001) " @ ldc"two"l 0, cr0, [r15], #4") \ cc 1232 arch/arm/probes/kprobes/test-arm.c TEST_UNSUPPORTED(__inst_arm(0x##cc##c7f0001) " @ ldc"two"l 0, cr0, [r15], #-4") \ cc 1235 arch/arm/probes/kprobes/test-arm.c #define COPROCESSOR_INSTRUCTIONS_MC_MR(two,cc) \ cc 1239 arch/arm/probes/kprobes/test-arm.c TEST_UNSUPPORTED(__inst_arm(0x##cc##c4f00f0) " @ mcrr"two" 0, 15, r0, r15, cr0") \ cc 1240 arch/arm/probes/kprobes/test-arm.c TEST_UNSUPPORTED(__inst_arm(0x##cc##c40ff0f) " @ mcrr"two" 15, 0, r15, r0, cr15") \ cc 1243 arch/arm/probes/kprobes/test-arm.c TEST_UNSUPPORTED(__inst_arm(0x##cc##c5f00f0) " @ mrrc"two" 0, 15, r0, r15, cr0") \ cc 1244 arch/arm/probes/kprobes/test-arm.c TEST_UNSUPPORTED(__inst_arm(0x##cc##c50ff0f) " @ mrrc"two" 15, 0, r15, r0, cr15") \ cc 1010 arch/arm/probes/kprobes/test-core.c static unsigned long test_check_cc(int cc, unsigned long cpsr) cc 1012 arch/arm/probes/kprobes/test-core.c int ret = arm_check_condition(cc << 28, cpsr); cc 1034 arch/arm/probes/kprobes/test-core.c int cc = current_instruction >> 28; cc 1036 arch/arm/probes/kprobes/test-core.c probe_should_run = test_check_cc(cc, cpsr) != 0; cc 1043 arch/arm/probes/kprobes/test-core.c int cc = (current_instruction >> kprobe_test_cc_position) & 0xf; cc 1044 arch/arm/probes/kprobes/test-core.c probe_should_run = test_check_cc(cc, cpsr) != 0; cc 479 arch/ia64/include/asm/pal.h cc : 1, /* Cache check */ cc 577 arch/ia64/include/asm/pal.h cc : 1, /* Error occurred cc 682 arch/ia64/include/asm/pal.h #define pmci_proc_cache_check pme_processor.cc cc 741 arch/ia64/include/asm/pal.h #define pmci_bus_cache_cache_transfer pme_bus.cc cc 403 arch/ia64/kernel/mca_drv.c if (psp->tc || psp->cc || psp->rc || psp->uc) cc 606 arch/ia64/kernel/mca_drv.c } else if (psp->cc && !psp->bc) { /* Cache error */ cc 680 arch/ia64/kernel/mca_drv.c if (psp->tc && !(psp->cc || psp->bc || psp->rc || psp->uc)) cc 691 arch/ia64/kernel/mca_drv.c if (psp->cc == 0 && (psp->bc == 0 || pbci == NULL)) cc 200 arch/microblaze/kernel/timer.c .cc = NULL, cc 203 arch/microblaze/kernel/timer.c static u64 xilinx_cc_read(const struct cyclecounter *cc) cc 57 arch/mips/bcm47xx/serial.c struct bcma_drv_cc *cc = &(bcm47xx_bus.bcma.bus.drv_cc); cc 61 arch/mips/bcm47xx/serial.c for (i = 0; i < cc->nr_serial_ports && cc 65 arch/mips/bcm47xx/serial.c bcma_port = &(cc->serial_ports[i]); cc 241 arch/mips/include/asm/octeon/cvmx-pci-defs.h uint32_t cc:24; cc 245 arch/mips/include/asm/octeon/cvmx-pci-defs.h uint32_t cc:24; cc 279 arch/mips/include/asm/octeon/cvmx-pciercx-defs.h __BITFIELD_FIELD(uint32_t cc:1, cc 78 arch/mips/include/asm/txx9/tx3927.h endian_def_b4(cc, scc, rpli, rid); cc 796 arch/mips/include/uapi/asm/inst.h __BITFIELD_FIELD(unsigned int cc : 3, cc 847 arch/mips/include/uapi/asm/inst.h __BITFIELD_FIELD(unsigned int cc : 3, cc 869 arch/mips/include/uapi/asm/inst.h __BITFIELD_FIELD(unsigned int cc : 3, cc 189 arch/mips/math-emu/cp1emu.c (insn.mm_fp2_format.cc<<2) + op; cc 256 arch/mips/math-emu/cp1emu.c (insn.mm_fp4_format.cc << 2) + op; cc 402 arch/mips/math-emu/cp1emu.c mips32_insn.fp0_format.fd = insn.mm_fp4_format.cc << 2; cc 87 arch/powerpc/include/asm/icswx.h u8 cc; cc 481 arch/powerpc/include/asm/ps3av.h u8 cc:3; cc 568 arch/powerpc/platforms/powermac/smp.c struct device_node *cc = NULL; cc 575 arch/powerpc/platforms/powermac/smp.c for_each_node_by_name(cc, "i2c-hwclock") { cc 576 arch/powerpc/platforms/powermac/smp.c p = of_get_parent(cc); cc 582 arch/powerpc/platforms/powermac/smp.c pmac_tb_clock_chip_host = pmac_i2c_find_bus(cc); cc 585 arch/powerpc/platforms/powermac/smp.c reg = of_get_property(cc, "reg", NULL); cc 590 arch/powerpc/platforms/powermac/smp.c if (of_device_is_compatible(cc,"pulsar-legacy-slewing")) { cc 594 arch/powerpc/platforms/powermac/smp.c } else if (of_device_is_compatible(cc, "cy28508")) { cc 243 arch/powerpc/xmon/ppc-dis.c int cc; cc 248 arch/powerpc/xmon/ppc-dis.c cc = value & 3; cc 249 arch/powerpc/xmon/ppc-dis.c printf("%s", cbnames[cc]); cc 3722 arch/powerpc/xmon/xmon.c int esel = i, cc = i; cc 3725 arch/powerpc/xmon/xmon.c cc = i / assoc; cc 3727 arch/powerpc/xmon/xmon.c mas2 = cc * 0x1000; cc 3745 arch/powerpc/xmon/xmon.c printf("%04x-%c", cc, 'A' + esel); cc 71 arch/s390/boot/pgm_check_info.c *p++ = hex_asc_lo(psw->cc); cc 42 arch/s390/include/asm/cmpxchg.h int cc; \ cc 47 arch/s390/include/asm/cmpxchg.h : [cc] "=d" (cc), [old] "+d" (__old1), "+d" (__old2) \ cc 51 arch/s390/include/asm/cmpxchg.h !cc; \ cc 180 arch/s390/include/asm/cpu_mf.h int cc; cc 186 arch/s390/include/asm/cpu_mf.h : "=d" (cc) : "Q" (ctl) : "cc"); cc 187 arch/s390/include/asm/cpu_mf.h return cc; cc 194 arch/s390/include/asm/cpu_mf.h int cc; cc 200 arch/s390/include/asm/cpu_mf.h : "=d" (_content), "=d" (cc) : "d" (ctr) : "cc"); cc 202 arch/s390/include/asm/cpu_mf.h return cc; cc 209 arch/s390/include/asm/cpu_mf.h int cc; cc 211 arch/s390/include/asm/cpu_mf.h cc = __ecctr(ctr, &content); cc 212 arch/s390/include/asm/cpu_mf.h if (!cc) cc 214 arch/s390/include/asm/cpu_mf.h return cc; cc 229 arch/s390/include/asm/cpu_mf.h int cc; cc 235 arch/s390/include/asm/cpu_mf.h : "=d" (cc) cc 238 arch/s390/include/asm/cpu_mf.h return cc; cc 244 arch/s390/include/asm/cpu_mf.h int cc = 1; cc 251 arch/s390/include/asm/cpu_mf.h : "+d" (cc), "+Q" (*info)); cc 252 arch/s390/include/asm/cpu_mf.h return cc ? -EINVAL : 0; cc 258 arch/s390/include/asm/cpu_mf.h int cc; cc 260 arch/s390/include/asm/cpu_mf.h cc = 1; cc 267 arch/s390/include/asm/cpu_mf.h : "+d" (cc), "+a" (req) cc 271 arch/s390/include/asm/cpu_mf.h return cc ? -EINVAL : 0; cc 80 arch/s390/include/asm/nmi.h u64 cc : 1; /* 47 clock comparator validity */ cc 124 arch/s390/include/asm/page.h int cc; cc 130 arch/s390/include/asm/page.h : "=d" (cc) : "a" (addr) : "cc"); cc 131 arch/s390/include/asm/page.h return cc; cc 100 arch/s390/include/asm/pci_io.h int cc; cc 102 arch/s390/include/asm/pci_io.h cc = zpci_load(&data, src, len); cc 103 arch/s390/include/asm/pci_io.h if (cc) cc 121 arch/s390/include/asm/pci_io.h return cc; cc 44 arch/s390/include/asm/ptrace.h unsigned long cc : 2; /* Condition Code */ cc 43 arch/s390/include/asm/scsw.h __u32 cc : 2; cc 85 arch/s390/include/asm/scsw.h u32 cc:2; cc 121 arch/s390/include/asm/scsw.h u32 cc:2; cc 270 arch/s390/include/asm/scsw.h return scsw->tm.cc; cc 272 arch/s390/include/asm/scsw.h return scsw->cmd.cc; cc 581 arch/s390/include/asm/scsw.h (scsw->cmd.cc != 3); cc 594 arch/s390/include/asm/scsw.h (scsw->cmd.cc != 3); cc 749 arch/s390/include/asm/scsw.h (scsw->tm.cc != 3); cc 762 arch/s390/include/asm/scsw.h (scsw->tm.cc != 3); cc 961 arch/s390/include/asm/scsw.h return (scsw->cmd.cc != 0) || (scsw->cmd.stctl != cc 974 arch/s390/include/asm/scsw.h return (scsw->tm.cc != 0) || (scsw->tm.stctl != cc 45 arch/s390/include/asm/sigp.h int cc; cc 51 arch/s390/include/asm/sigp.h : "=d" (cc), "+d" (reg1) : "d" (addr), "a" (order) : "cc"); cc 53 arch/s390/include/asm/sigp.h return cc; cc 60 arch/s390/include/asm/sigp.h int cc; cc 62 arch/s390/include/asm/sigp.h cc = ____pcpu_sigp(addr, order, parm, &_status); cc 63 arch/s390/include/asm/sigp.h if (status && cc == SIGP_CC_STATUS_STORED) cc 65 arch/s390/include/asm/sigp.h return cc; cc 25 arch/s390/include/asm/timex.h int cc; cc 31 arch/s390/include/asm/timex.h : "=d" (cc) : "Q" (time) : "cc"); cc 32 arch/s390/include/asm/timex.h return cc; cc 37 arch/s390/include/asm/timex.h int cc; cc 43 arch/s390/include/asm/timex.h : "=d" (cc), "=Q" (*time) : : "cc"); cc 44 arch/s390/include/asm/timex.h return cc; cc 59 arch/s390/include/asm/uv.h int cc; cc 66 arch/s390/include/asm/uv.h : [cc] "=d" (cc) cc 69 arch/s390/include/asm/uv.h return cc; cc 24 arch/s390/include/uapi/asm/vtoc.h __u16 cc; cc 31 arch/s390/include/uapi/asm/vtoc.h __u16 cc; cc 158 arch/s390/kernel/dumpstack.c psw->key, psw->mcheck, psw->wait, psw->pstate, psw->as, psw->cc, psw->pm); cc 122 arch/s390/kernel/smp.c int cc; cc 125 arch/s390/kernel/smp.c cc = __pcpu_sigp(addr, order, parm, NULL); cc 126 arch/s390/kernel/smp.c if (cc != SIGP_CC_BUSY) cc 127 arch/s390/kernel/smp.c return cc; cc 134 arch/s390/kernel/smp.c int cc, retry; cc 137 arch/s390/kernel/smp.c cc = __pcpu_sigp(pcpu->address, order, parm, NULL); cc 138 arch/s390/kernel/smp.c if (cc != SIGP_CC_BUSY) cc 143 arch/s390/kernel/smp.c return cc; cc 352 arch/s390/kernel/smp.c int cc; cc 356 arch/s390/kernel/smp.c cc = __pcpu_sigp(0, SIGP_SET_MULTI_THREADING, mtid, NULL); cc 357 arch/s390/kernel/smp.c if (cc == 0) { cc 364 arch/s390/kernel/smp.c return cc; cc 401 arch/s390/kernel/sthyi.c int cc; cc 407 arch/s390/kernel/sthyi.c : [cc] "=d" (cc), "=d" (rcode) cc 411 arch/s390/kernel/sthyi.c return cc; cc 222 arch/s390/kernel/uprobes.c psw_bits((regs)->psw).cc = 1; \ cc 224 arch/s390/kernel/uprobes.c psw_bits((regs)->psw).cc = 2; \ cc 226 arch/s390/kernel/uprobes.c psw_bits((regs)->psw).cc = 0; \ cc 364 arch/s390/kvm/intercept.c u64 code, addr, cc = 0, rc = 0; cc 382 arch/s390/kvm/intercept.c cc = 3; cc 394 arch/s390/kvm/intercept.c cc = sthyi_fill(sctns, &rc); cc 397 arch/s390/kvm/intercept.c if (!cc) { cc 407 arch/s390/kvm/intercept.c kvm_s390_set_psw_cc(vcpu, cc); cc 322 arch/s390/kvm/kvm-s390.c int cc; cc 329 arch/s390/kvm/kvm-s390.c : "=d" (cc) cc 332 arch/s390/kvm/kvm-s390.c return cc == 0; cc 163 arch/s390/kvm/kvm-s390.h static inline void kvm_s390_set_psw_cc(struct kvm_vcpu *vcpu, unsigned long cc) cc 166 arch/s390/kvm/kvm-s390.h vcpu->arch.sie_block->gpsw.mask |= cc << 44; cc 1439 arch/s390/kvm/priv.c int ret = 0, cc = 0; cc 1460 arch/s390/kvm/priv.c cc = 1; cc 1480 arch/s390/kvm/priv.c cc = 1; /* Write not permitted ==> read-only */ cc 1481 arch/s390/kvm/priv.c kvm_s390_set_psw_cc(vcpu, cc); cc 268 arch/s390/lib/string.c int cc; cc 274 arch/s390/lib/string.c : "=&d" (cc), "+a" (r2), "+a" (r3), cc 276 arch/s390/lib/string.c return cc; cc 294 arch/s390/lib/string.c int cc; cc 296 arch/s390/lib/string.c cc = clcle(s1, l2, s2, l2); cc 297 arch/s390/lib/string.c if (!cc) cc 374 arch/s390/mm/pageattr.c int cc; cc 381 arch/s390/mm/pageattr.c : "=d" (cc), "+a" (addr) : : "cc"); cc 382 arch/s390/mm/pageattr.c return cc == 0; cc 864 arch/s390/mm/pgtable.c int cc = 0; cc 879 arch/s390/mm/pgtable.c cc = page_reset_referenced(paddr); cc 881 arch/s390/mm/pgtable.c return cc; cc 895 arch/s390/mm/pgtable.c cc = page_reset_referenced(paddr); cc 897 arch/s390/mm/pgtable.c pgste_val(new) |= ((unsigned long) cc << 53) & PGSTE_HR_BIT; cc 900 arch/s390/mm/pgtable.c cc |= (pgste_val(old) & (PGSTE_GR_BIT | PGSTE_GC_BIT)) >> 49; cc 907 arch/s390/mm/pgtable.c return cc; cc 129 arch/s390/pci/pci.c u8 cc, status; cc 131 arch/s390/pci/pci.c cc = zpci_mod_fc(req, &fib, &status); cc 132 arch/s390/pci/pci.c if (cc == 3) /* Function already gone. */ cc 133 arch/s390/pci/pci.c cc = 0; cc 134 arch/s390/pci/pci.c return cc ? -EIO : 0; cc 142 arch/s390/pci/pci.c u8 cc, status; cc 158 arch/s390/pci/pci.c cc = zpci_mod_fc(req, &fib, &status); cc 159 arch/s390/pci/pci.c if (cc) { cc 163 arch/s390/pci/pci.c return cc ? -EIO : 0; cc 171 arch/s390/pci/pci.c u8 cc, status; cc 177 arch/s390/pci/pci.c cc = zpci_mod_fc(req, &fib, &status); cc 178 arch/s390/pci/pci.c if (cc == 3) /* Function already gone. */ cc 179 arch/s390/pci/pci.c cc = 0; cc 181 arch/s390/pci/pci.c if (!cc) { cc 185 arch/s390/pci/pci.c return cc ? -EIO : 0; cc 52 arch/s390/pci/pci_clp.c int cc = 3; cc 60 arch/s390/pci/pci_clp.c : [cc] "+d" (cc), [mask] "=d" (mask) : [cmd] "a" (1) cc 63 arch/s390/pci/pci_clp.c return cc; cc 73 arch/s390/pci/pci_clp.c int cc = 3; cc 81 arch/s390/pci/pci_clp.c : [cc] "+d" (cc), [ign] "=d" (ignored), "+m" (*req) cc 84 arch/s390/pci/pci_clp.c return cc; cc 20 arch/s390/pci/pci_insn.c static inline void zpci_err_insn(u8 cc, u8 status, u64 req, u64 offset) cc 25 arch/s390/pci/pci_insn.c u8 cc; cc 27 arch/s390/pci/pci_insn.c } __packed data = {req, offset, cc, status}; cc 35 arch/s390/pci/pci_insn.c u8 cc; cc 41 arch/s390/pci/pci_insn.c : [cc] "=d" (cc), [req] "+d" (req), [fib] "+Q" (*fib) cc 44 arch/s390/pci/pci_insn.c return cc; cc 49 arch/s390/pci/pci_insn.c u8 cc; cc 52 arch/s390/pci/pci_insn.c cc = __mpcifc(req, fib, status); cc 53 arch/s390/pci/pci_insn.c if (cc == 2) cc 55 arch/s390/pci/pci_insn.c } while (cc == 2); cc 57 arch/s390/pci/pci_insn.c if (cc) cc 58 arch/s390/pci/pci_insn.c zpci_err_insn(cc, *status, req, 0); cc 60 arch/s390/pci/pci_insn.c return cc; cc 68 arch/s390/pci/pci_insn.c u8 cc; cc 74 arch/s390/pci/pci_insn.c : [cc] "=d" (cc), [fn] "+d" (fn) cc 78 arch/s390/pci/pci_insn.c return cc; cc 83 arch/s390/pci/pci_insn.c u8 cc, status; cc 86 arch/s390/pci/pci_insn.c cc = __rpcit(fn, addr, range, &status); cc 87 arch/s390/pci/pci_insn.c if (cc == 2) cc 89 arch/s390/pci/pci_insn.c } while (cc == 2); cc 91 arch/s390/pci/pci_insn.c if (cc) cc 92 arch/s390/pci/pci_insn.c zpci_err_insn(cc, status, addr, range); cc 94 arch/s390/pci/pci_insn.c if (cc == 1 && (status == 4 || status == 16)) cc 97 arch/s390/pci/pci_insn.c return (cc) ? -EIO : 0; cc 118 arch/s390/pci/pci_insn.c int cc = -ENXIO; cc 127 arch/s390/pci/pci_insn.c : [cc] "+d" (cc), [data] "=d" (__data), [req] "+d" (__req) cc 132 arch/s390/pci/pci_insn.c return cc; cc 138 arch/s390/pci/pci_insn.c int cc; cc 140 arch/s390/pci/pci_insn.c cc = ____pcilg(&__data, req, offset, status); cc 141 arch/s390/pci/pci_insn.c if (!cc) cc 144 arch/s390/pci/pci_insn.c return cc; cc 150 arch/s390/pci/pci_insn.c int cc; cc 153 arch/s390/pci/pci_insn.c cc = __pcilg(data, req, offset, &status); cc 154 arch/s390/pci/pci_insn.c if (cc == 2) cc 156 arch/s390/pci/pci_insn.c } while (cc == 2); cc 158 arch/s390/pci/pci_insn.c if (cc) cc 159 arch/s390/pci/pci_insn.c zpci_err_insn(cc, status, req, offset); cc 161 arch/s390/pci/pci_insn.c return (cc > 0) ? -EIO : cc; cc 178 arch/s390/pci/pci_insn.c int cc = -ENXIO; cc 187 arch/s390/pci/pci_insn.c : [cc] "+d" (cc), [data] "=d" (__data), "+d" (r3) cc 192 arch/s390/pci/pci_insn.c return cc; cc 198 arch/s390/pci/pci_insn.c int cc; cc 203 arch/s390/pci/pci_insn.c cc = __pcilg_mio(data, (__force u64) addr, len, &status); cc 204 arch/s390/pci/pci_insn.c if (cc) cc 205 arch/s390/pci/pci_insn.c zpci_err_insn(cc, status, 0, (__force u64) addr); cc 207 arch/s390/pci/pci_insn.c return (cc > 0) ? -EIO : cc; cc 216 arch/s390/pci/pci_insn.c int cc = -ENXIO; cc 224 arch/s390/pci/pci_insn.c : [cc] "+d" (cc), [req] "+d" (__req) cc 228 arch/s390/pci/pci_insn.c return cc; cc 234 arch/s390/pci/pci_insn.c int cc; cc 237 arch/s390/pci/pci_insn.c cc = __pcistg(data, req, offset, &status); cc 238 arch/s390/pci/pci_insn.c if (cc == 2) cc 240 arch/s390/pci/pci_insn.c } while (cc == 2); cc 242 arch/s390/pci/pci_insn.c if (cc) cc 243 arch/s390/pci/pci_insn.c zpci_err_insn(cc, status, req, offset); cc 245 arch/s390/pci/pci_insn.c return (cc > 0) ? -EIO : cc; cc 262 arch/s390/pci/pci_insn.c int cc = -ENXIO; cc 270 arch/s390/pci/pci_insn.c : [cc] "+d" (cc), "+d" (r3) cc 274 arch/s390/pci/pci_insn.c return cc; cc 280 arch/s390/pci/pci_insn.c int cc; cc 285 arch/s390/pci/pci_insn.c cc = __pcistg_mio(data, (__force u64) addr, len, &status); cc 286 arch/s390/pci/pci_insn.c if (cc) cc 287 arch/s390/pci/pci_insn.c zpci_err_insn(cc, status, 0, (__force u64) addr); cc 289 arch/s390/pci/pci_insn.c return (cc > 0) ? -EIO : cc; cc 296 arch/s390/pci/pci_insn.c int cc = -ENXIO; cc 304 arch/s390/pci/pci_insn.c : [cc] "+d" (cc), [req] "+d" (req) cc 308 arch/s390/pci/pci_insn.c return cc; cc 314 arch/s390/pci/pci_insn.c int cc; cc 317 arch/s390/pci/pci_insn.c cc = __pcistb(data, req, offset, &status); cc 318 arch/s390/pci/pci_insn.c if (cc == 2) cc 320 arch/s390/pci/pci_insn.c } while (cc == 2); cc 322 arch/s390/pci/pci_insn.c if (cc) cc 323 arch/s390/pci/pci_insn.c zpci_err_insn(cc, status, req, offset); cc 325 arch/s390/pci/pci_insn.c return (cc > 0) ? -EIO : cc; cc 341 arch/s390/pci/pci_insn.c int cc = -ENXIO; cc 349 arch/s390/pci/pci_insn.c : [cc] "+d" (cc), [len] "+d" (len) cc 353 arch/s390/pci/pci_insn.c return cc; cc 360 arch/s390/pci/pci_insn.c int cc; cc 365 arch/s390/pci/pci_insn.c cc = __pcistb_mio(src, (__force u64) dst, len, &status); cc 366 arch/s390/pci/pci_insn.c if (cc) cc 367 arch/s390/pci/pci_insn.c zpci_err_insn(cc, status, 0, (__force u64) dst); cc 369 arch/s390/pci/pci_insn.c return (cc > 0) ? -EIO : cc; cc 61 arch/s390/pci/pci_irq.c u8 cc, status; cc 63 arch/s390/pci/pci_irq.c cc = zpci_mod_fc(req, &fib, &status); cc 64 arch/s390/pci/pci_irq.c if (cc == 3 || (cc == 1 && status == 24)) cc 66 arch/s390/pci/pci_irq.c cc = 0; cc 68 arch/s390/pci/pci_irq.c return cc ? -EIO : 0; cc 90 arch/s390/pci/pci_irq.c u8 cc, status; cc 93 arch/s390/pci/pci_irq.c cc = zpci_mod_fc(req, &fib, &status); cc 94 arch/s390/pci/pci_irq.c if (cc == 3 || (cc == 1 && status == 24)) cc 96 arch/s390/pci/pci_irq.c cc = 0; cc 98 arch/s390/pci/pci_irq.c return cc ? -EIO : 0; cc 17 arch/s390/pci/pci_mmio.c static inline void zpci_err_mmio(u8 cc, u8 status, u64 offset) cc 21 arch/s390/pci/pci_mmio.c u8 cc; cc 23 arch/s390/pci/pci_mmio.c } data = {offset, cc, status}; cc 32 arch/s390/pci/pci_mmio.c int cc = -ENXIO; cc 41 arch/s390/pci/pci_mmio.c : [cc] "+d" (cc), [len] "+d" (len) cc 45 arch/s390/pci/pci_mmio.c return cc; cc 54 arch/s390/pci/pci_mmio.c int cc = -ENXIO; cc 79 arch/s390/pci/pci_mmio.c [len] "+d" (len), [cc] "+d" (cc), cc 85 arch/s390/pci/pci_mmio.c if (!cc && cnt != 0) cc 86 arch/s390/pci/pci_mmio.c cc = -EFAULT; cc 88 arch/s390/pci/pci_mmio.c return cc; cc 205 arch/s390/pci/pci_mmio.c int cc = -ENXIO; cc 228 arch/s390/pci/pci_mmio.c [cc] "+d" (cc), [val] "=d" (val), [len] "+d" (len), cc 236 arch/s390/pci/pci_mmio.c if (!cc && cnt != 0) cc 237 arch/s390/pci/pci_mmio.c cc = -EFAULT; cc 240 arch/s390/pci/pci_mmio.c return cc; cc 18 arch/x86/include/asm/rmwcc.h #define __GEN_RMWcc(fullop, _var, cc, clobbers, ...) \ cc 21 arch/x86/include/asm/rmwcc.h asm_volatile_goto (fullop "; j" #cc " %l[cc_label]" \ cc 34 arch/x86/include/asm/rmwcc.h #define __GEN_RMWcc(fullop, _var, cc, clobbers, ...) \ cc 37 arch/x86/include/asm/rmwcc.h asm volatile (fullop CC_SET(cc) \ cc 38 arch/x86/include/asm/rmwcc.h : [var] "+m" (_var), CC_OUT(cc) (c) \ cc 45 arch/x86/include/asm/rmwcc.h #define GEN_UNARY_RMWcc_4(op, var, cc, arg0) \ cc 46 arch/x86/include/asm/rmwcc.h __GEN_RMWcc(op " " arg0, var, cc, __CLOBBERS_MEM()) cc 48 arch/x86/include/asm/rmwcc.h #define GEN_UNARY_RMWcc_3(op, var, cc) \ cc 49 arch/x86/include/asm/rmwcc.h GEN_UNARY_RMWcc_4(op, var, cc, "%[var]") cc 53 arch/x86/include/asm/rmwcc.h #define GEN_BINARY_RMWcc_6(op, var, cc, vcon, _val, arg0) \ cc 54 arch/x86/include/asm/rmwcc.h __GEN_RMWcc(op " %[val], " arg0, var, cc, \ cc 57 arch/x86/include/asm/rmwcc.h #define GEN_BINARY_RMWcc_5(op, var, cc, vcon, val) \ cc 58 arch/x86/include/asm/rmwcc.h GEN_BINARY_RMWcc_6(op, var, cc, vcon, val, "%[var]") cc 62 arch/x86/include/asm/rmwcc.h #define GEN_UNARY_SUFFIXED_RMWcc(op, suffix, var, cc, clobbers...) \ cc 63 arch/x86/include/asm/rmwcc.h __GEN_RMWcc(op " %[var]\n\t" suffix, var, cc, \ cc 66 arch/x86/include/asm/rmwcc.h #define GEN_BINARY_SUFFIXED_RMWcc(op, suffix, var, cc, vcon, _val, clobbers...)\ cc 67 arch/x86/include/asm/rmwcc.h __GEN_RMWcc(op " %[val], %[var]\n\t" suffix, var, cc, \ cc 154 arch/x86/kernel/cpu/centaur.c u32 aa, bb, cc, dd; cc 238 arch/x86/kernel/cpu/centaur.c cpuid(0x80000005, &aa, &bb, &cc, &dd); cc 240 arch/x86/kernel/cpu/centaur.c c->x86_cache_size = (cc>>24)+(dd>>24); cc 793 arch/x86/math-emu/fpu_trig.c int cc; cc 802 arch/x86/math-emu/fpu_trig.c cc = 0; cc 863 arch/x86/math-emu/fpu_trig.c cc |= SW_C0; cc 865 arch/x86/math-emu/fpu_trig.c cc |= SW_C3; cc 867 arch/x86/math-emu/fpu_trig.c cc |= SW_C1; cc 925 arch/x86/math-emu/fpu_trig.c cc = SW_C2; cc 937 arch/x86/math-emu/fpu_trig.c setcc(cc); cc 950 arch/x86/math-emu/fpu_trig.c setcc(cc); cc 52 arch/x86/math-emu/status_w.h static inline void setcc(int cc) cc 55 arch/x86/math-emu/status_w.h partial_status |= (cc) & (SW_C0 | SW_C1 | SW_C2 | SW_C3); cc 101 arch/xtensa/include/asm/coprocessor.h #define XCHAL_SA_REG(list,cc,abi,type,y,name,z,align,size,...) \ cc 102 arch/xtensa/include/asm/coprocessor.h __REG ## list (cc, abi, type, name, size, align) cc 104 arch/xtensa/include/asm/coprocessor.h #define __REG0(cc,abi,t,name,s,a) __REG0_ ## cc (abi,name) cc 105 arch/xtensa/include/asm/coprocessor.h #define __REG1(cc,abi,t,name,s,a) __REG1_ ## cc (name) cc 106 arch/xtensa/include/asm/coprocessor.h #define __REG2(cc,abi,type,...) __REG2_ ## type (__VA_ARGS__) cc 39 block/partitions/ibm.c cyl |= ptr->cc; cc 57 block/partitions/ibm.c cyl |= ptr->cc; cc 84 crypto/ecrdsa.c struct ecc_point cc = ECC_POINT_INIT(s, e, ndigits); /* reuse s, e */ cc 137 crypto/ecrdsa.c ecc_point_mult_shamir(&cc, z1, &ctx->curve->g, z2, &ctx->pub_key, cc 139 crypto/ecrdsa.c if (vli_cmp(cc.x, ctx->curve->n, ndigits) == 1) cc 140 crypto/ecrdsa.c vli_sub(cc.x, cc.x, ctx->curve->n, ndigits); cc 143 crypto/ecrdsa.c if (!vli_cmp(cc.x, r, ndigits)) cc 47 crypto/rmd128.c u32 aa, bb, cc, dd, aaa, bbb, ccc, ddd; cc 52 crypto/rmd128.c cc = state[2]; cc 62 crypto/rmd128.c ROUND(aa, bb, cc, dd, F1, K1, in[0], 11); cc 63 crypto/rmd128.c ROUND(dd, aa, bb, cc, F1, K1, in[1], 14); cc 64 crypto/rmd128.c ROUND(cc, dd, aa, bb, F1, K1, in[2], 15); cc 65 crypto/rmd128.c ROUND(bb, cc, dd, aa, F1, K1, in[3], 12); cc 66 crypto/rmd128.c ROUND(aa, bb, cc, dd, F1, K1, in[4], 5); cc 67 crypto/rmd128.c ROUND(dd, aa, bb, cc, F1, K1, in[5], 8); cc 68 crypto/rmd128.c ROUND(cc, dd, aa, bb, F1, K1, in[6], 7); cc 69 crypto/rmd128.c ROUND(bb, cc, dd, aa, F1, K1, in[7], 9); cc 70 crypto/rmd128.c ROUND(aa, bb, cc, dd, F1, K1, in[8], 11); cc 71 crypto/rmd128.c ROUND(dd, aa, bb, cc, F1, K1, in[9], 13); cc 72 crypto/rmd128.c ROUND(cc, dd, aa, bb, F1, K1, in[10], 14); cc 73 crypto/rmd128.c ROUND(bb, cc, dd, aa, F1, K1, in[11], 15); cc 74 crypto/rmd128.c ROUND(aa, bb, cc, dd, F1, K1, in[12], 6); cc 75 crypto/rmd128.c ROUND(dd, aa, bb, cc, F1, K1, in[13], 7); cc 76 crypto/rmd128.c ROUND(cc, dd, aa, bb, F1, K1, in[14], 9); cc 77 crypto/rmd128.c ROUND(bb, cc, dd, aa, F1, K1, in[15], 8); cc 80 crypto/rmd128.c ROUND(aa, bb, cc, dd, F2, K2, in[7], 7); cc 81 crypto/rmd128.c ROUND(dd, aa, bb, cc, F2, K2, in[4], 6); cc 82 crypto/rmd128.c ROUND(cc, dd, aa, bb, F2, K2, in[13], 8); cc 83 crypto/rmd128.c ROUND(bb, cc, dd, aa, F2, K2, in[1], 13); cc 84 crypto/rmd128.c ROUND(aa, bb, cc, dd, F2, K2, in[10], 11); cc 85 crypto/rmd128.c ROUND(dd, aa, bb, cc, F2, K2, in[6], 9); cc 86 crypto/rmd128.c ROUND(cc, dd, aa, bb, F2, K2, in[15], 7); cc 87 crypto/rmd128.c ROUND(bb, cc, dd, aa, F2, K2, in[3], 15); cc 88 crypto/rmd128.c ROUND(aa, bb, cc, dd, F2, K2, in[12], 7); cc 89 crypto/rmd128.c ROUND(dd, aa, bb, cc, F2, K2, in[0], 12); cc 90 crypto/rmd128.c ROUND(cc, dd, aa, bb, F2, K2, in[9], 15); cc 91 crypto/rmd128.c ROUND(bb, cc, dd, aa, F2, K2, in[5], 9); cc 92 crypto/rmd128.c ROUND(aa, bb, cc, dd, F2, K2, in[2], 11); cc 93 crypto/rmd128.c ROUND(dd, aa, bb, cc, F2, K2, in[14], 7); cc 94 crypto/rmd128.c ROUND(cc, dd, aa, bb, F2, K2, in[11], 13); cc 95 crypto/rmd128.c ROUND(bb, cc, dd, aa, F2, K2, in[8], 12); cc 98 crypto/rmd128.c ROUND(aa, bb, cc, dd, F3, K3, in[3], 11); cc 99 crypto/rmd128.c ROUND(dd, aa, bb, cc, F3, K3, in[10], 13); cc 100 crypto/rmd128.c ROUND(cc, dd, aa, bb, F3, K3, in[14], 6); cc 101 crypto/rmd128.c ROUND(bb, cc, dd, aa, F3, K3, in[4], 7); cc 102 crypto/rmd128.c ROUND(aa, bb, cc, dd, F3, K3, in[9], 14); cc 103 crypto/rmd128.c ROUND(dd, aa, bb, cc, F3, K3, in[15], 9); cc 104 crypto/rmd128.c ROUND(cc, dd, aa, bb, F3, K3, in[8], 13); cc 105 crypto/rmd128.c ROUND(bb, cc, dd, aa, F3, K3, in[1], 15); cc 106 crypto/rmd128.c ROUND(aa, bb, cc, dd, F3, K3, in[2], 14); cc 107 crypto/rmd128.c ROUND(dd, aa, bb, cc, F3, K3, in[7], 8); cc 108 crypto/rmd128.c ROUND(cc, dd, aa, bb, F3, K3, in[0], 13); cc 109 crypto/rmd128.c ROUND(bb, cc, dd, aa, F3, K3, in[6], 6); cc 110 crypto/rmd128.c ROUND(aa, bb, cc, dd, F3, K3, in[13], 5); cc 111 crypto/rmd128.c ROUND(dd, aa, bb, cc, F3, K3, in[11], 12); cc 112 crypto/rmd128.c ROUND(cc, dd, aa, bb, F3, K3, in[5], 7); cc 113 crypto/rmd128.c ROUND(bb, cc, dd, aa, F3, K3, in[12], 5); cc 116 crypto/rmd128.c ROUND(aa, bb, cc, dd, F4, K4, in[1], 11); cc 117 crypto/rmd128.c ROUND(dd, aa, bb, cc, F4, K4, in[9], 12); cc 118 crypto/rmd128.c ROUND(cc, dd, aa, bb, F4, K4, in[11], 14); cc 119 crypto/rmd128.c ROUND(bb, cc, dd, aa, F4, K4, in[10], 15); cc 120 crypto/rmd128.c ROUND(aa, bb, cc, dd, F4, K4, in[0], 14); cc 121 crypto/rmd128.c ROUND(dd, aa, bb, cc, F4, K4, in[8], 15); cc 122 crypto/rmd128.c ROUND(cc, dd, aa, bb, F4, K4, in[12], 9); cc 123 crypto/rmd128.c ROUND(bb, cc, dd, aa, F4, K4, in[4], 8); cc 124 crypto/rmd128.c ROUND(aa, bb, cc, dd, F4, K4, in[13], 9); cc 125 crypto/rmd128.c ROUND(dd, aa, bb, cc, F4, K4, in[3], 14); cc 126 crypto/rmd128.c ROUND(cc, dd, aa, bb, F4, K4, in[7], 5); cc 127 crypto/rmd128.c ROUND(bb, cc, dd, aa, F4, K4, in[15], 6); cc 128 crypto/rmd128.c ROUND(aa, bb, cc, dd, F4, K4, in[14], 8); cc 129 crypto/rmd128.c ROUND(dd, aa, bb, cc, F4, K4, in[5], 6); cc 130 crypto/rmd128.c ROUND(cc, dd, aa, bb, F4, K4, in[6], 5); cc 131 crypto/rmd128.c ROUND(bb, cc, dd, aa, F4, K4, in[2], 12); cc 206 crypto/rmd128.c ddd += cc + state[1]; /* final result for state[0] */ cc 51 crypto/rmd160.c u32 aa, bb, cc, dd, ee, aaa, bbb, ccc, ddd, eee; cc 56 crypto/rmd160.c cc = state[2]; cc 68 crypto/rmd160.c ROUND(aa, bb, cc, dd, ee, F1, K1, in[0], 11); cc 69 crypto/rmd160.c ROUND(ee, aa, bb, cc, dd, F1, K1, in[1], 14); cc 70 crypto/rmd160.c ROUND(dd, ee, aa, bb, cc, F1, K1, in[2], 15); cc 71 crypto/rmd160.c ROUND(cc, dd, ee, aa, bb, F1, K1, in[3], 12); cc 72 crypto/rmd160.c ROUND(bb, cc, dd, ee, aa, F1, K1, in[4], 5); cc 73 crypto/rmd160.c ROUND(aa, bb, cc, dd, ee, F1, K1, in[5], 8); cc 74 crypto/rmd160.c ROUND(ee, aa, bb, cc, dd, F1, K1, in[6], 7); cc 75 crypto/rmd160.c ROUND(dd, ee, aa, bb, cc, F1, K1, in[7], 9); cc 76 crypto/rmd160.c ROUND(cc, dd, ee, aa, bb, F1, K1, in[8], 11); cc 77 crypto/rmd160.c ROUND(bb, cc, dd, ee, aa, F1, K1, in[9], 13); cc 78 crypto/rmd160.c ROUND(aa, bb, cc, dd, ee, F1, K1, in[10], 14); cc 79 crypto/rmd160.c ROUND(ee, aa, bb, cc, dd, F1, K1, in[11], 15); cc 80 crypto/rmd160.c ROUND(dd, ee, aa, bb, cc, F1, K1, in[12], 6); cc 81 crypto/rmd160.c ROUND(cc, dd, ee, aa, bb, F1, K1, in[13], 7); cc 82 crypto/rmd160.c ROUND(bb, cc, dd, ee, aa, F1, K1, in[14], 9); cc 83 crypto/rmd160.c ROUND(aa, bb, cc, dd, ee, F1, K1, in[15], 8); cc 86 crypto/rmd160.c ROUND(ee, aa, bb, cc, dd, F2, K2, in[7], 7); cc 87 crypto/rmd160.c ROUND(dd, ee, aa, bb, cc, F2, K2, in[4], 6); cc 88 crypto/rmd160.c ROUND(cc, dd, ee, aa, bb, F2, K2, in[13], 8); cc 89 crypto/rmd160.c ROUND(bb, cc, dd, ee, aa, F2, K2, in[1], 13); cc 90 crypto/rmd160.c ROUND(aa, bb, cc, dd, ee, F2, K2, in[10], 11); cc 91 crypto/rmd160.c ROUND(ee, aa, bb, cc, dd, F2, K2, in[6], 9); cc 92 crypto/rmd160.c ROUND(dd, ee, aa, bb, cc, F2, K2, in[15], 7); cc 93 crypto/rmd160.c ROUND(cc, dd, ee, aa, bb, F2, K2, in[3], 15); cc 94 crypto/rmd160.c ROUND(bb, cc, dd, ee, aa, F2, K2, in[12], 7); cc 95 crypto/rmd160.c ROUND(aa, bb, cc, dd, ee, F2, K2, in[0], 12); cc 96 crypto/rmd160.c ROUND(ee, aa, bb, cc, dd, F2, K2, in[9], 15); cc 97 crypto/rmd160.c ROUND(dd, ee, aa, bb, cc, F2, K2, in[5], 9); cc 98 crypto/rmd160.c ROUND(cc, dd, ee, aa, bb, F2, K2, in[2], 11); cc 99 crypto/rmd160.c ROUND(bb, cc, dd, ee, aa, F2, K2, in[14], 7); cc 100 crypto/rmd160.c ROUND(aa, bb, cc, dd, ee, F2, K2, in[11], 13); cc 101 crypto/rmd160.c ROUND(ee, aa, bb, cc, dd, F2, K2, in[8], 12); cc 104 crypto/rmd160.c ROUND(dd, ee, aa, bb, cc, F3, K3, in[3], 11); cc 105 crypto/rmd160.c ROUND(cc, dd, ee, aa, bb, F3, K3, in[10], 13); cc 106 crypto/rmd160.c ROUND(bb, cc, dd, ee, aa, F3, K3, in[14], 6); cc 107 crypto/rmd160.c ROUND(aa, bb, cc, dd, ee, F3, K3, in[4], 7); cc 108 crypto/rmd160.c ROUND(ee, aa, bb, cc, dd, F3, K3, in[9], 14); cc 109 crypto/rmd160.c ROUND(dd, ee, aa, bb, cc, F3, K3, in[15], 9); cc 110 crypto/rmd160.c ROUND(cc, dd, ee, aa, bb, F3, K3, in[8], 13); cc 111 crypto/rmd160.c ROUND(bb, cc, dd, ee, aa, F3, K3, in[1], 15); cc 112 crypto/rmd160.c ROUND(aa, bb, cc, dd, ee, F3, K3, in[2], 14); cc 113 crypto/rmd160.c ROUND(ee, aa, bb, cc, dd, F3, K3, in[7], 8); cc 114 crypto/rmd160.c ROUND(dd, ee, aa, bb, cc, F3, K3, in[0], 13); cc 115 crypto/rmd160.c ROUND(cc, dd, ee, aa, bb, F3, K3, in[6], 6); cc 116 crypto/rmd160.c ROUND(bb, cc, dd, ee, aa, F3, K3, in[13], 5); cc 117 crypto/rmd160.c ROUND(aa, bb, cc, dd, ee, F3, K3, in[11], 12); cc 118 crypto/rmd160.c ROUND(ee, aa, bb, cc, dd, F3, K3, in[5], 7); cc 119 crypto/rmd160.c ROUND(dd, ee, aa, bb, cc, F3, K3, in[12], 5); cc 122 crypto/rmd160.c ROUND(cc, dd, ee, aa, bb, F4, K4, in[1], 11); cc 123 crypto/rmd160.c ROUND(bb, cc, dd, ee, aa, F4, K4, in[9], 12); cc 124 crypto/rmd160.c ROUND(aa, bb, cc, dd, ee, F4, K4, in[11], 14); cc 125 crypto/rmd160.c ROUND(ee, aa, bb, cc, dd, F4, K4, in[10], 15); cc 126 crypto/rmd160.c ROUND(dd, ee, aa, bb, cc, F4, K4, in[0], 14); cc 127 crypto/rmd160.c ROUND(cc, dd, ee, aa, bb, F4, K4, in[8], 15); cc 128 crypto/rmd160.c ROUND(bb, cc, dd, ee, aa, F4, K4, in[12], 9); cc 129 crypto/rmd160.c ROUND(aa, bb, cc, dd, ee, F4, K4, in[4], 8); cc 130 crypto/rmd160.c ROUND(ee, aa, bb, cc, dd, F4, K4, in[13], 9); cc 131 crypto/rmd160.c ROUND(dd, ee, aa, bb, cc, F4, K4, in[3], 14); cc 132 crypto/rmd160.c ROUND(cc, dd, ee, aa, bb, F4, K4, in[7], 5); cc 133 crypto/rmd160.c ROUND(bb, cc, dd, ee, aa, F4, K4, in[15], 6); cc 134 crypto/rmd160.c ROUND(aa, bb, cc, dd, ee, F4, K4, in[14], 8); cc 135 crypto/rmd160.c ROUND(ee, aa, bb, cc, dd, F4, K4, in[5], 6); cc 136 crypto/rmd160.c ROUND(dd, ee, aa, bb, cc, F4, K4, in[6], 5); cc 137 crypto/rmd160.c ROUND(cc, dd, ee, aa, bb, F4, K4, in[2], 12); cc 140 crypto/rmd160.c ROUND(bb, cc, dd, ee, aa, F5, K5, in[4], 9); cc 141 crypto/rmd160.c ROUND(aa, bb, cc, dd, ee, F5, K5, in[0], 15); cc 142 crypto/rmd160.c ROUND(ee, aa, bb, cc, dd, F5, K5, in[5], 5); cc 143 crypto/rmd160.c ROUND(dd, ee, aa, bb, cc, F5, K5, in[9], 11); cc 144 crypto/rmd160.c ROUND(cc, dd, ee, aa, bb, F5, K5, in[7], 6); cc 145 crypto/rmd160.c ROUND(bb, cc, dd, ee, aa, F5, K5, in[12], 8); cc 146 crypto/rmd160.c ROUND(aa, bb, cc, dd, ee, F5, K5, in[2], 13); cc 147 crypto/rmd160.c ROUND(ee, aa, bb, cc, dd, F5, K5, in[10], 12); cc 148 crypto/rmd160.c ROUND(dd, ee, aa, bb, cc, F5, K5, in[14], 5); cc 149 crypto/rmd160.c ROUND(cc, dd, ee, aa, bb, F5, K5, in[1], 12); cc 150 crypto/rmd160.c ROUND(bb, cc, dd, ee, aa, F5, K5, in[3], 13); cc 151 crypto/rmd160.c ROUND(aa, bb, cc, dd, ee, F5, K5, in[8], 14); cc 152 crypto/rmd160.c ROUND(ee, aa, bb, cc, dd, F5, K5, in[11], 11); cc 153 crypto/rmd160.c ROUND(dd, ee, aa, bb, cc, F5, K5, in[6], 8); cc 154 crypto/rmd160.c ROUND(cc, dd, ee, aa, bb, F5, K5, in[15], 5); cc 155 crypto/rmd160.c ROUND(bb, cc, dd, ee, aa, F5, K5, in[13], 6); cc 248 crypto/rmd160.c ddd += cc + state[1]; /* final result for state[0] */ cc 47 crypto/rmd256.c u32 aa, bb, cc, dd, aaa, bbb, ccc, ddd; cc 52 crypto/rmd256.c cc = state[2]; cc 62 crypto/rmd256.c ROUND(aa, bb, cc, dd, F1, K1, in[0], 11); cc 63 crypto/rmd256.c ROUND(dd, aa, bb, cc, F1, K1, in[1], 14); cc 64 crypto/rmd256.c ROUND(cc, dd, aa, bb, F1, K1, in[2], 15); cc 65 crypto/rmd256.c ROUND(bb, cc, dd, aa, F1, K1, in[3], 12); cc 66 crypto/rmd256.c ROUND(aa, bb, cc, dd, F1, K1, in[4], 5); cc 67 crypto/rmd256.c ROUND(dd, aa, bb, cc, F1, K1, in[5], 8); cc 68 crypto/rmd256.c ROUND(cc, dd, aa, bb, F1, K1, in[6], 7); cc 69 crypto/rmd256.c ROUND(bb, cc, dd, aa, F1, K1, in[7], 9); cc 70 crypto/rmd256.c ROUND(aa, bb, cc, dd, F1, K1, in[8], 11); cc 71 crypto/rmd256.c ROUND(dd, aa, bb, cc, F1, K1, in[9], 13); cc 72 crypto/rmd256.c ROUND(cc, dd, aa, bb, F1, K1, in[10], 14); cc 73 crypto/rmd256.c ROUND(bb, cc, dd, aa, F1, K1, in[11], 15); cc 74 crypto/rmd256.c ROUND(aa, bb, cc, dd, F1, K1, in[12], 6); cc 75 crypto/rmd256.c ROUND(dd, aa, bb, cc, F1, K1, in[13], 7); cc 76 crypto/rmd256.c ROUND(cc, dd, aa, bb, F1, K1, in[14], 9); cc 77 crypto/rmd256.c ROUND(bb, cc, dd, aa, F1, K1, in[15], 8); cc 101 crypto/rmd256.c ROUND(aa, bb, cc, dd, F2, K2, in[7], 7); cc 102 crypto/rmd256.c ROUND(dd, aa, bb, cc, F2, K2, in[4], 6); cc 103 crypto/rmd256.c ROUND(cc, dd, aa, bb, F2, K2, in[13], 8); cc 104 crypto/rmd256.c ROUND(bb, cc, dd, aa, F2, K2, in[1], 13); cc 105 crypto/rmd256.c ROUND(aa, bb, cc, dd, F2, K2, in[10], 11); cc 106 crypto/rmd256.c ROUND(dd, aa, bb, cc, F2, K2, in[6], 9); cc 107 crypto/rmd256.c ROUND(cc, dd, aa, bb, F2, K2, in[15], 7); cc 108 crypto/rmd256.c ROUND(bb, cc, dd, aa, F2, K2, in[3], 15); cc 109 crypto/rmd256.c ROUND(aa, bb, cc, dd, F2, K2, in[12], 7); cc 110 crypto/rmd256.c ROUND(dd, aa, bb, cc, F2, K2, in[0], 12); cc 111 crypto/rmd256.c ROUND(cc, dd, aa, bb, F2, K2, in[9], 15); cc 112 crypto/rmd256.c ROUND(bb, cc, dd, aa, F2, K2, in[5], 9); cc 113 crypto/rmd256.c ROUND(aa, bb, cc, dd, F2, K2, in[2], 11); cc 114 crypto/rmd256.c ROUND(dd, aa, bb, cc, F2, K2, in[14], 7); cc 115 crypto/rmd256.c ROUND(cc, dd, aa, bb, F2, K2, in[11], 13); cc 116 crypto/rmd256.c ROUND(bb, cc, dd, aa, F2, K2, in[8], 12); cc 140 crypto/rmd256.c ROUND(aa, bb, cc, dd, F3, K3, in[3], 11); cc 141 crypto/rmd256.c ROUND(dd, aa, bb, cc, F3, K3, in[10], 13); cc 142 crypto/rmd256.c ROUND(cc, dd, aa, bb, F3, K3, in[14], 6); cc 143 crypto/rmd256.c ROUND(bb, cc, dd, aa, F3, K3, in[4], 7); cc 144 crypto/rmd256.c ROUND(aa, bb, cc, dd, F3, K3, in[9], 14); cc 145 crypto/rmd256.c ROUND(dd, aa, bb, cc, F3, K3, in[15], 9); cc 146 crypto/rmd256.c ROUND(cc, dd, aa, bb, F3, K3, in[8], 13); cc 147 crypto/rmd256.c ROUND(bb, cc, dd, aa, F3, K3, in[1], 15); cc 148 crypto/rmd256.c ROUND(aa, bb, cc, dd, F3, K3, in[2], 14); cc 149 crypto/rmd256.c ROUND(dd, aa, bb, cc, F3, K3, in[7], 8); cc 150 crypto/rmd256.c ROUND(cc, dd, aa, bb, F3, K3, in[0], 13); cc 151 crypto/rmd256.c ROUND(bb, cc, dd, aa, F3, K3, in[6], 6); cc 152 crypto/rmd256.c ROUND(aa, bb, cc, dd, F3, K3, in[13], 5); cc 153 crypto/rmd256.c ROUND(dd, aa, bb, cc, F3, K3, in[11], 12); cc 154 crypto/rmd256.c ROUND(cc, dd, aa, bb, F3, K3, in[5], 7); cc 155 crypto/rmd256.c ROUND(bb, cc, dd, aa, F3, K3, in[12], 5); cc 176 crypto/rmd256.c swap(cc, ccc); cc 179 crypto/rmd256.c ROUND(aa, bb, cc, dd, F4, K4, in[1], 11); cc 180 crypto/rmd256.c ROUND(dd, aa, bb, cc, F4, K4, in[9], 12); cc 181 crypto/rmd256.c ROUND(cc, dd, aa, bb, F4, K4, in[11], 14); cc 182 crypto/rmd256.c ROUND(bb, cc, dd, aa, F4, K4, in[10], 15); cc 183 crypto/rmd256.c ROUND(aa, bb, cc, dd, F4, K4, in[0], 14); cc 184 crypto/rmd256.c ROUND(dd, aa, bb, cc, F4, K4, in[8], 15); cc 185 crypto/rmd256.c ROUND(cc, dd, aa, bb, F4, K4, in[12], 9); cc 186 crypto/rmd256.c ROUND(bb, cc, dd, aa, F4, K4, in[4], 8); cc 187 crypto/rmd256.c ROUND(aa, bb, cc, dd, F4, K4, in[13], 9); cc 188 crypto/rmd256.c ROUND(dd, aa, bb, cc, F4, K4, in[3], 14); cc 189 crypto/rmd256.c ROUND(cc, dd, aa, bb, F4, K4, in[7], 5); cc 190 crypto/rmd256.c ROUND(bb, cc, dd, aa, F4, K4, in[15], 6); cc 191 crypto/rmd256.c ROUND(aa, bb, cc, dd, F4, K4, in[14], 8); cc 192 crypto/rmd256.c ROUND(dd, aa, bb, cc, F4, K4, in[5], 6); cc 193 crypto/rmd256.c ROUND(cc, dd, aa, bb, F4, K4, in[6], 5); cc 194 crypto/rmd256.c ROUND(bb, cc, dd, aa, F4, K4, in[2], 12); cc 220 crypto/rmd256.c state[2] += cc; cc 51 crypto/rmd320.c u32 aa, bb, cc, dd, ee, aaa, bbb, ccc, ddd, eee; cc 56 crypto/rmd320.c cc = state[2]; cc 68 crypto/rmd320.c ROUND(aa, bb, cc, dd, ee, F1, K1, in[0], 11); cc 69 crypto/rmd320.c ROUND(ee, aa, bb, cc, dd, F1, K1, in[1], 14); cc 70 crypto/rmd320.c ROUND(dd, ee, aa, bb, cc, F1, K1, in[2], 15); cc 71 crypto/rmd320.c ROUND(cc, dd, ee, aa, bb, F1, K1, in[3], 12); cc 72 crypto/rmd320.c ROUND(bb, cc, dd, ee, aa, F1, K1, in[4], 5); cc 73 crypto/rmd320.c ROUND(aa, bb, cc, dd, ee, F1, K1, in[5], 8); cc 74 crypto/rmd320.c ROUND(ee, aa, bb, cc, dd, F1, K1, in[6], 7); cc 75 crypto/rmd320.c ROUND(dd, ee, aa, bb, cc, F1, K1, in[7], 9); cc 76 crypto/rmd320.c ROUND(cc, dd, ee, aa, bb, F1, K1, in[8], 11); cc 77 crypto/rmd320.c ROUND(bb, cc, dd, ee, aa, F1, K1, in[9], 13); cc 78 crypto/rmd320.c ROUND(aa, bb, cc, dd, ee, F1, K1, in[10], 14); cc 79 crypto/rmd320.c ROUND(ee, aa, bb, cc, dd, F1, K1, in[11], 15); cc 80 crypto/rmd320.c ROUND(dd, ee, aa, bb, cc, F1, K1, in[12], 6); cc 81 crypto/rmd320.c ROUND(cc, dd, ee, aa, bb, F1, K1, in[13], 7); cc 82 crypto/rmd320.c ROUND(bb, cc, dd, ee, aa, F1, K1, in[14], 9); cc 83 crypto/rmd320.c ROUND(aa, bb, cc, dd, ee, F1, K1, in[15], 8); cc 107 crypto/rmd320.c ROUND(ee, aa, bb, cc, dd, F2, K2, in[7], 7); cc 108 crypto/rmd320.c ROUND(dd, ee, aa, bb, cc, F2, K2, in[4], 6); cc 109 crypto/rmd320.c ROUND(cc, dd, ee, aa, bb, F2, K2, in[13], 8); cc 110 crypto/rmd320.c ROUND(bb, cc, dd, ee, aa, F2, K2, in[1], 13); cc 111 crypto/rmd320.c ROUND(aa, bb, cc, dd, ee, F2, K2, in[10], 11); cc 112 crypto/rmd320.c ROUND(ee, aa, bb, cc, dd, F2, K2, in[6], 9); cc 113 crypto/rmd320.c ROUND(dd, ee, aa, bb, cc, F2, K2, in[15], 7); cc 114 crypto/rmd320.c ROUND(cc, dd, ee, aa, bb, F2, K2, in[3], 15); cc 115 crypto/rmd320.c ROUND(bb, cc, dd, ee, aa, F2, K2, in[12], 7); cc 116 crypto/rmd320.c ROUND(aa, bb, cc, dd, ee, F2, K2, in[0], 12); cc 117 crypto/rmd320.c ROUND(ee, aa, bb, cc, dd, F2, K2, in[9], 15); cc 118 crypto/rmd320.c ROUND(dd, ee, aa, bb, cc, F2, K2, in[5], 9); cc 119 crypto/rmd320.c ROUND(cc, dd, ee, aa, bb, F2, K2, in[2], 11); cc 120 crypto/rmd320.c ROUND(bb, cc, dd, ee, aa, F2, K2, in[14], 7); cc 121 crypto/rmd320.c ROUND(aa, bb, cc, dd, ee, F2, K2, in[11], 13); cc 122 crypto/rmd320.c ROUND(ee, aa, bb, cc, dd, F2, K2, in[8], 12); cc 146 crypto/rmd320.c ROUND(dd, ee, aa, bb, cc, F3, K3, in[3], 11); cc 147 crypto/rmd320.c ROUND(cc, dd, ee, aa, bb, F3, K3, in[10], 13); cc 148 crypto/rmd320.c ROUND(bb, cc, dd, ee, aa, F3, K3, in[14], 6); cc 149 crypto/rmd320.c ROUND(aa, bb, cc, dd, ee, F3, K3, in[4], 7); cc 150 crypto/rmd320.c ROUND(ee, aa, bb, cc, dd, F3, K3, in[9], 14); cc 151 crypto/rmd320.c ROUND(dd, ee, aa, bb, cc, F3, K3, in[15], 9); cc 152 crypto/rmd320.c ROUND(cc, dd, ee, aa, bb, F3, K3, in[8], 13); cc 153 crypto/rmd320.c ROUND(bb, cc, dd, ee, aa, F3, K3, in[1], 15); cc 154 crypto/rmd320.c ROUND(aa, bb, cc, dd, ee, F3, K3, in[2], 14); cc 155 crypto/rmd320.c ROUND(ee, aa, bb, cc, dd, F3, K3, in[7], 8); cc 156 crypto/rmd320.c ROUND(dd, ee, aa, bb, cc, F3, K3, in[0], 13); cc 157 crypto/rmd320.c ROUND(cc, dd, ee, aa, bb, F3, K3, in[6], 6); cc 158 crypto/rmd320.c ROUND(bb, cc, dd, ee, aa, F3, K3, in[13], 5); cc 159 crypto/rmd320.c ROUND(aa, bb, cc, dd, ee, F3, K3, in[11], 12); cc 160 crypto/rmd320.c ROUND(ee, aa, bb, cc, dd, F3, K3, in[5], 7); cc 161 crypto/rmd320.c ROUND(dd, ee, aa, bb, cc, F3, K3, in[12], 5); cc 182 crypto/rmd320.c swap(cc, ccc); cc 185 crypto/rmd320.c ROUND(cc, dd, ee, aa, bb, F4, K4, in[1], 11); cc 186 crypto/rmd320.c ROUND(bb, cc, dd, ee, aa, F4, K4, in[9], 12); cc 187 crypto/rmd320.c ROUND(aa, bb, cc, dd, ee, F4, K4, in[11], 14); cc 188 crypto/rmd320.c ROUND(ee, aa, bb, cc, dd, F4, K4, in[10], 15); cc 189 crypto/rmd320.c ROUND(dd, ee, aa, bb, cc, F4, K4, in[0], 14); cc 190 crypto/rmd320.c ROUND(cc, dd, ee, aa, bb, F4, K4, in[8], 15); cc 191 crypto/rmd320.c ROUND(bb, cc, dd, ee, aa, F4, K4, in[12], 9); cc 192 crypto/rmd320.c ROUND(aa, bb, cc, dd, ee, F4, K4, in[4], 8); cc 193 crypto/rmd320.c ROUND(ee, aa, bb, cc, dd, F4, K4, in[13], 9); cc 194 crypto/rmd320.c ROUND(dd, ee, aa, bb, cc, F4, K4, in[3], 14); cc 195 crypto/rmd320.c ROUND(cc, dd, ee, aa, bb, F4, K4, in[7], 5); cc 196 crypto/rmd320.c ROUND(bb, cc, dd, ee, aa, F4, K4, in[15], 6); cc 197 crypto/rmd320.c ROUND(aa, bb, cc, dd, ee, F4, K4, in[14], 8); cc 198 crypto/rmd320.c ROUND(ee, aa, bb, cc, dd, F4, K4, in[5], 6); cc 199 crypto/rmd320.c ROUND(dd, ee, aa, bb, cc, F4, K4, in[6], 5); cc 200 crypto/rmd320.c ROUND(cc, dd, ee, aa, bb, F4, K4, in[2], 12); cc 224 crypto/rmd320.c ROUND(bb, cc, dd, ee, aa, F5, K5, in[4], 9); cc 225 crypto/rmd320.c ROUND(aa, bb, cc, dd, ee, F5, K5, in[0], 15); cc 226 crypto/rmd320.c ROUND(ee, aa, bb, cc, dd, F5, K5, in[5], 5); cc 227 crypto/rmd320.c ROUND(dd, ee, aa, bb, cc, F5, K5, in[9], 11); cc 228 crypto/rmd320.c ROUND(cc, dd, ee, aa, bb, F5, K5, in[7], 6); cc 229 crypto/rmd320.c ROUND(bb, cc, dd, ee, aa, F5, K5, in[12], 8); cc 230 crypto/rmd320.c ROUND(aa, bb, cc, dd, ee, F5, K5, in[2], 13); cc 231 crypto/rmd320.c ROUND(ee, aa, bb, cc, dd, F5, K5, in[10], 12); cc 232 crypto/rmd320.c ROUND(dd, ee, aa, bb, cc, F5, K5, in[14], 5); cc 233 crypto/rmd320.c ROUND(cc, dd, ee, aa, bb, F5, K5, in[1], 12); cc 234 crypto/rmd320.c ROUND(bb, cc, dd, ee, aa, F5, K5, in[3], 13); cc 235 crypto/rmd320.c ROUND(aa, bb, cc, dd, ee, F5, K5, in[8], 14); cc 236 crypto/rmd320.c ROUND(ee, aa, bb, cc, dd, F5, K5, in[11], 11); cc 237 crypto/rmd320.c ROUND(dd, ee, aa, bb, cc, F5, K5, in[6], 8); cc 238 crypto/rmd320.c ROUND(cc, dd, ee, aa, bb, F5, K5, in[15], 5); cc 239 crypto/rmd320.c ROUND(bb, cc, dd, ee, aa, F5, K5, in[13], 6); cc 265 crypto/rmd320.c state[2] += cc; cc 464 crypto/tgr192.c u64 a, b, c, aa, bb, cc; cc 474 crypto/tgr192.c c = cc = tctx->c; cc 486 crypto/tgr192.c c += cc; cc 166 drivers/ata/acard-ahci.c u16 cc; cc 169 drivers/ata/acard-ahci.c pci_read_config_word(pdev, 0x0a, &cc); cc 170 drivers/ata/acard-ahci.c if (cc == PCI_CLASS_STORAGE_IDE) cc 172 drivers/ata/acard-ahci.c else if (cc == PCI_CLASS_STORAGE_SATA) cc 174 drivers/ata/acard-ahci.c else if (cc == PCI_CLASS_STORAGE_RAID) cc 917 drivers/ata/ahci.c u16 cc; cc 920 drivers/ata/ahci.c pci_read_config_word(pdev, 0x0a, &cc); cc 921 drivers/ata/ahci.c if (cc == PCI_CLASS_STORAGE_IDE) cc 923 drivers/ata/ahci.c else if (cc == PCI_CLASS_STORAGE_SATA) cc 925 drivers/ata/ahci.c else if (cc == PCI_CLASS_STORAGE_RAID) cc 45 drivers/bcma/bcma_private.h void bcma_core_chipcommon_early_init(struct bcma_drv_cc *cc); cc 46 drivers/bcma/bcma_private.h void bcma_core_chipcommon_init(struct bcma_drv_cc *cc); cc 47 drivers/bcma/bcma_private.h void bcma_chipco_bcm4331_ext_pa_lines_ctl(struct bcma_drv_cc *cc, bool enable); cc 49 drivers/bcma/bcma_private.h void bcma_chipco_serial_init(struct bcma_drv_cc *cc); cc 57 drivers/bcma/bcma_private.h void bcma_pmu_early_init(struct bcma_drv_cc *cc); cc 58 drivers/bcma/bcma_private.h void bcma_pmu_init(struct bcma_drv_cc *cc); cc 59 drivers/bcma/bcma_private.h u32 bcma_pmu_get_alp_clock(struct bcma_drv_cc *cc); cc 60 drivers/bcma/bcma_private.h u32 bcma_pmu_get_cpu_clock(struct bcma_drv_cc *cc); cc 68 drivers/bcma/bcma_private.h int bcma_pflash_init(struct bcma_drv_cc *cc); cc 70 drivers/bcma/bcma_private.h static inline int bcma_pflash_init(struct bcma_drv_cc *cc) cc 72 drivers/bcma/bcma_private.h bcma_err(cc->core->bus, "Parallel flash not supported\n"); cc 79 drivers/bcma/bcma_private.h int bcma_sflash_init(struct bcma_drv_cc *cc); cc 82 drivers/bcma/bcma_private.h static inline int bcma_sflash_init(struct bcma_drv_cc *cc) cc 84 drivers/bcma/bcma_private.h bcma_err(cc->core->bus, "Serial flash not supported\n"); cc 91 drivers/bcma/bcma_private.h int bcma_nflash_init(struct bcma_drv_cc *cc); cc 94 drivers/bcma/bcma_private.h static inline int bcma_nflash_init(struct bcma_drv_cc *cc) cc 96 drivers/bcma/bcma_private.h bcma_err(cc->core->bus, "NAND flash not supported\n"); cc 152 drivers/bcma/bcma_private.h extern int bcma_chipco_watchdog_register(struct bcma_drv_cc *cc); cc 202 drivers/bcma/bcma_private.h int bcma_gpio_init(struct bcma_drv_cc *cc); cc 203 drivers/bcma/bcma_private.h int bcma_gpio_unregister(struct bcma_drv_cc *cc); cc 205 drivers/bcma/bcma_private.h static inline int bcma_gpio_init(struct bcma_drv_cc *cc) cc 209 drivers/bcma/bcma_private.h static inline int bcma_gpio_unregister(struct bcma_drv_cc *cc) cc 18 drivers/bcma/driver_chipcommon.c static inline u32 bcma_cc_write32_masked(struct bcma_drv_cc *cc, u16 offset, cc 22 drivers/bcma/driver_chipcommon.c value |= bcma_cc_read32(cc, offset) & ~mask; cc 23 drivers/bcma/driver_chipcommon.c bcma_cc_write32(cc, offset, value); cc 28 drivers/bcma/driver_chipcommon.c u32 bcma_chipco_get_alp_clock(struct bcma_drv_cc *cc) cc 30 drivers/bcma/driver_chipcommon.c if (cc->capabilities & BCMA_CC_CAP_PMU) cc 31 drivers/bcma/driver_chipcommon.c return bcma_pmu_get_alp_clock(cc); cc 37 drivers/bcma/driver_chipcommon.c static bool bcma_core_cc_has_pmu_watchdog(struct bcma_drv_cc *cc) cc 39 drivers/bcma/driver_chipcommon.c struct bcma_bus *bus = cc->core->bus; cc 41 drivers/bcma/driver_chipcommon.c if (cc->capabilities & BCMA_CC_CAP_PMU) { cc 56 drivers/bcma/driver_chipcommon.c static u32 bcma_chipco_watchdog_get_max_timer(struct bcma_drv_cc *cc) cc 58 drivers/bcma/driver_chipcommon.c struct bcma_bus *bus = cc->core->bus; cc 61 drivers/bcma/driver_chipcommon.c if (bcma_core_cc_has_pmu_watchdog(cc)) { cc 64 drivers/bcma/driver_chipcommon.c else if (cc->core->id.rev < 26) cc 67 drivers/bcma/driver_chipcommon.c nb = (cc->core->id.rev >= 37) ? 32 : 24; cc 80 drivers/bcma/driver_chipcommon.c struct bcma_drv_cc *cc = bcm47xx_wdt_get_drvdata(wdt); cc 82 drivers/bcma/driver_chipcommon.c return bcma_chipco_watchdog_timer_set(cc, ticks); cc 88 drivers/bcma/driver_chipcommon.c struct bcma_drv_cc *cc = bcm47xx_wdt_get_drvdata(wdt); cc 91 drivers/bcma/driver_chipcommon.c ticks = bcma_chipco_watchdog_timer_set(cc, cc->ticks_per_ms * ms); cc 92 drivers/bcma/driver_chipcommon.c return ticks / cc->ticks_per_ms; cc 95 drivers/bcma/driver_chipcommon.c static int bcma_chipco_watchdog_ticks_per_ms(struct bcma_drv_cc *cc) cc 97 drivers/bcma/driver_chipcommon.c struct bcma_bus *bus = cc->core->bus; cc 99 drivers/bcma/driver_chipcommon.c if (cc->capabilities & BCMA_CC_CAP_PMU) { cc 104 drivers/bcma/driver_chipcommon.c return bcma_chipco_get_alp_clock(cc) / 4000; cc 109 drivers/bcma/driver_chipcommon.c return bcma_chipco_get_alp_clock(cc) / 1000; cc 113 drivers/bcma/driver_chipcommon.c int bcma_chipco_watchdog_register(struct bcma_drv_cc *cc) cc 115 drivers/bcma/driver_chipcommon.c struct bcma_bus *bus = cc->core->bus; cc 125 drivers/bcma/driver_chipcommon.c wdt.driver_data = cc; cc 129 drivers/bcma/driver_chipcommon.c bcma_chipco_watchdog_get_max_timer(cc) / cc->ticks_per_ms; cc 137 drivers/bcma/driver_chipcommon.c cc->watchdog = pdev; cc 142 drivers/bcma/driver_chipcommon.c static void bcma_core_chipcommon_flash_detect(struct bcma_drv_cc *cc) cc 144 drivers/bcma/driver_chipcommon.c struct bcma_bus *bus = cc->core->bus; cc 146 drivers/bcma/driver_chipcommon.c switch (cc->capabilities & BCMA_CC_CAP_FLASHT) { cc 150 drivers/bcma/driver_chipcommon.c bcma_sflash_init(cc); cc 154 drivers/bcma/driver_chipcommon.c bcma_pflash_init(cc); cc 160 drivers/bcma/driver_chipcommon.c if (cc->core->id.rev == 38 || cc 162 drivers/bcma/driver_chipcommon.c if (cc->capabilities & BCMA_CC_CAP_NFLASH) { cc 164 drivers/bcma/driver_chipcommon.c bcma_nflash_init(cc); cc 169 drivers/bcma/driver_chipcommon.c void bcma_core_chipcommon_early_init(struct bcma_drv_cc *cc) cc 171 drivers/bcma/driver_chipcommon.c struct bcma_bus *bus = cc->core->bus; cc 173 drivers/bcma/driver_chipcommon.c if (cc->early_setup_done) cc 176 drivers/bcma/driver_chipcommon.c spin_lock_init(&cc->gpio_lock); cc 178 drivers/bcma/driver_chipcommon.c if (cc->core->id.rev >= 11) cc 179 drivers/bcma/driver_chipcommon.c cc->status = bcma_cc_read32(cc, BCMA_CC_CHIPSTAT); cc 180 drivers/bcma/driver_chipcommon.c cc->capabilities = bcma_cc_read32(cc, BCMA_CC_CAP); cc 181 drivers/bcma/driver_chipcommon.c if (cc->core->id.rev >= 35) cc 182 drivers/bcma/driver_chipcommon.c cc->capabilities_ext = bcma_cc_read32(cc, BCMA_CC_CAP_EXT); cc 184 drivers/bcma/driver_chipcommon.c if (cc->capabilities & BCMA_CC_CAP_PMU) cc 185 drivers/bcma/driver_chipcommon.c bcma_pmu_early_init(cc); cc 188 drivers/bcma/driver_chipcommon.c bcma_core_chipcommon_flash_detect(cc); cc 190 drivers/bcma/driver_chipcommon.c cc->early_setup_done = true; cc 193 drivers/bcma/driver_chipcommon.c void bcma_core_chipcommon_init(struct bcma_drv_cc *cc) cc 198 drivers/bcma/driver_chipcommon.c if (cc->setup_done) cc 201 drivers/bcma/driver_chipcommon.c bcma_core_chipcommon_early_init(cc); cc 203 drivers/bcma/driver_chipcommon.c if (cc->core->id.rev >= 20) { cc 206 drivers/bcma/driver_chipcommon.c if (cc->core->bus->chipinfo.id == BCMA_CHIP_ID_BCM43142) { cc 211 drivers/bcma/driver_chipcommon.c bcma_cc_write32(cc, BCMA_CC_GPIOPULLUP, pullup); cc 212 drivers/bcma/driver_chipcommon.c bcma_cc_write32(cc, BCMA_CC_GPIOPULLDOWN, pulldown); cc 215 drivers/bcma/driver_chipcommon.c if (cc->capabilities & BCMA_CC_CAP_PMU) cc 216 drivers/bcma/driver_chipcommon.c bcma_pmu_init(cc); cc 217 drivers/bcma/driver_chipcommon.c if (cc->capabilities & BCMA_CC_CAP_PCTL) cc 218 drivers/bcma/driver_chipcommon.c bcma_err(cc->core->bus, "Power control not implemented!\n"); cc 220 drivers/bcma/driver_chipcommon.c if (cc->core->id.rev >= 16) { cc 221 drivers/bcma/driver_chipcommon.c if (cc->core->bus->sprom.leddc_on_time && cc 222 drivers/bcma/driver_chipcommon.c cc->core->bus->sprom.leddc_off_time) { cc 223 drivers/bcma/driver_chipcommon.c leddc_on = cc->core->bus->sprom.leddc_on_time; cc 224 drivers/bcma/driver_chipcommon.c leddc_off = cc->core->bus->sprom.leddc_off_time; cc 226 drivers/bcma/driver_chipcommon.c bcma_cc_write32(cc, BCMA_CC_GPIOTIMER, cc 230 drivers/bcma/driver_chipcommon.c cc->ticks_per_ms = bcma_chipco_watchdog_ticks_per_ms(cc); cc 232 drivers/bcma/driver_chipcommon.c cc->setup_done = true; cc 236 drivers/bcma/driver_chipcommon.c u32 bcma_chipco_watchdog_timer_set(struct bcma_drv_cc *cc, u32 ticks) cc 240 drivers/bcma/driver_chipcommon.c maxt = bcma_chipco_watchdog_get_max_timer(cc); cc 241 drivers/bcma/driver_chipcommon.c if (bcma_core_cc_has_pmu_watchdog(cc)) { cc 246 drivers/bcma/driver_chipcommon.c bcma_pmu_write32(cc, BCMA_CC_PMU_WATCHDOG, ticks); cc 248 drivers/bcma/driver_chipcommon.c struct bcma_bus *bus = cc->core->bus; cc 253 drivers/bcma/driver_chipcommon.c bcma_core_set_clockmode(cc->core, cc 259 drivers/bcma/driver_chipcommon.c bcma_cc_write32(cc, BCMA_CC_WATCHDOG, ticks); cc 264 drivers/bcma/driver_chipcommon.c void bcma_chipco_irq_mask(struct bcma_drv_cc *cc, u32 mask, u32 value) cc 266 drivers/bcma/driver_chipcommon.c bcma_cc_write32_masked(cc, BCMA_CC_IRQMASK, mask, value); cc 269 drivers/bcma/driver_chipcommon.c u32 bcma_chipco_irq_status(struct bcma_drv_cc *cc, u32 mask) cc 271 drivers/bcma/driver_chipcommon.c return bcma_cc_read32(cc, BCMA_CC_IRQSTAT) & mask; cc 274 drivers/bcma/driver_chipcommon.c u32 bcma_chipco_gpio_in(struct bcma_drv_cc *cc, u32 mask) cc 276 drivers/bcma/driver_chipcommon.c return bcma_cc_read32(cc, BCMA_CC_GPIOIN) & mask; cc 279 drivers/bcma/driver_chipcommon.c u32 bcma_chipco_gpio_out(struct bcma_drv_cc *cc, u32 mask, u32 value) cc 284 drivers/bcma/driver_chipcommon.c spin_lock_irqsave(&cc->gpio_lock, flags); cc 285 drivers/bcma/driver_chipcommon.c res = bcma_cc_write32_masked(cc, BCMA_CC_GPIOOUT, mask, value); cc 286 drivers/bcma/driver_chipcommon.c spin_unlock_irqrestore(&cc->gpio_lock, flags); cc 292 drivers/bcma/driver_chipcommon.c u32 bcma_chipco_gpio_outen(struct bcma_drv_cc *cc, u32 mask, u32 value) cc 297 drivers/bcma/driver_chipcommon.c spin_lock_irqsave(&cc->gpio_lock, flags); cc 298 drivers/bcma/driver_chipcommon.c res = bcma_cc_write32_masked(cc, BCMA_CC_GPIOOUTEN, mask, value); cc 299 drivers/bcma/driver_chipcommon.c spin_unlock_irqrestore(&cc->gpio_lock, flags); cc 309 drivers/bcma/driver_chipcommon.c u32 bcma_chipco_gpio_control(struct bcma_drv_cc *cc, u32 mask, u32 value) cc 314 drivers/bcma/driver_chipcommon.c spin_lock_irqsave(&cc->gpio_lock, flags); cc 315 drivers/bcma/driver_chipcommon.c res = bcma_cc_write32_masked(cc, BCMA_CC_GPIOCTL, mask, value); cc 316 drivers/bcma/driver_chipcommon.c spin_unlock_irqrestore(&cc->gpio_lock, flags); cc 322 drivers/bcma/driver_chipcommon.c u32 bcma_chipco_gpio_intmask(struct bcma_drv_cc *cc, u32 mask, u32 value) cc 327 drivers/bcma/driver_chipcommon.c spin_lock_irqsave(&cc->gpio_lock, flags); cc 328 drivers/bcma/driver_chipcommon.c res = bcma_cc_write32_masked(cc, BCMA_CC_GPIOIRQ, mask, value); cc 329 drivers/bcma/driver_chipcommon.c spin_unlock_irqrestore(&cc->gpio_lock, flags); cc 334 drivers/bcma/driver_chipcommon.c u32 bcma_chipco_gpio_polarity(struct bcma_drv_cc *cc, u32 mask, u32 value) cc 339 drivers/bcma/driver_chipcommon.c spin_lock_irqsave(&cc->gpio_lock, flags); cc 340 drivers/bcma/driver_chipcommon.c res = bcma_cc_write32_masked(cc, BCMA_CC_GPIOPOL, mask, value); cc 341 drivers/bcma/driver_chipcommon.c spin_unlock_irqrestore(&cc->gpio_lock, flags); cc 346 drivers/bcma/driver_chipcommon.c u32 bcma_chipco_gpio_pullup(struct bcma_drv_cc *cc, u32 mask, u32 value) cc 351 drivers/bcma/driver_chipcommon.c if (cc->core->id.rev < 20) cc 354 drivers/bcma/driver_chipcommon.c spin_lock_irqsave(&cc->gpio_lock, flags); cc 355 drivers/bcma/driver_chipcommon.c res = bcma_cc_write32_masked(cc, BCMA_CC_GPIOPULLUP, mask, value); cc 356 drivers/bcma/driver_chipcommon.c spin_unlock_irqrestore(&cc->gpio_lock, flags); cc 361 drivers/bcma/driver_chipcommon.c u32 bcma_chipco_gpio_pulldown(struct bcma_drv_cc *cc, u32 mask, u32 value) cc 366 drivers/bcma/driver_chipcommon.c if (cc->core->id.rev < 20) cc 369 drivers/bcma/driver_chipcommon.c spin_lock_irqsave(&cc->gpio_lock, flags); cc 370 drivers/bcma/driver_chipcommon.c res = bcma_cc_write32_masked(cc, BCMA_CC_GPIOPULLDOWN, mask, value); cc 371 drivers/bcma/driver_chipcommon.c spin_unlock_irqrestore(&cc->gpio_lock, flags); cc 377 drivers/bcma/driver_chipcommon.c void bcma_chipco_serial_init(struct bcma_drv_cc *cc) cc 382 drivers/bcma/driver_chipcommon.c unsigned int ccrev = cc->core->id.rev; cc 383 drivers/bcma/driver_chipcommon.c struct bcma_serial_port *ports = cc->serial_ports; cc 386 drivers/bcma/driver_chipcommon.c baud_base = bcma_chipco_get_alp_clock(cc); cc 389 drivers/bcma/driver_chipcommon.c bcma_cc_write32(cc, BCMA_CC_CORECTL, cc 390 drivers/bcma/driver_chipcommon.c bcma_cc_read32(cc, BCMA_CC_CORECTL) cc 394 drivers/bcma/driver_chipcommon.c bcma_cc_write32(cc, BCMA_CC_CORECTL, cc 395 drivers/bcma/driver_chipcommon.c bcma_cc_read32(cc, BCMA_CC_CORECTL) cc 399 drivers/bcma/driver_chipcommon.c bcma_cc_write32(cc, BCMA_CC_CORECTL, cc 400 drivers/bcma/driver_chipcommon.c bcma_cc_read32(cc, BCMA_CC_CORECTL) cc 404 drivers/bcma/driver_chipcommon.c bcma_err(cc->core->bus, "serial not supported on this device ccrev: 0x%x\n", cc 409 drivers/bcma/driver_chipcommon.c irq = bcma_core_irq(cc->core, 0); cc 412 drivers/bcma/driver_chipcommon.c cc->nr_serial_ports = (cc->capabilities & BCMA_CC_CAP_NRUART); cc 413 drivers/bcma/driver_chipcommon.c for (i = 0; i < cc->nr_serial_ports; i++) { cc 414 drivers/bcma/driver_chipcommon.c ports[i].regs = cc->core->io_addr + BCMA_CC_UART0_DATA + cc 19 drivers/bcma/driver_chipcommon_nflash.c int bcma_nflash_init(struct bcma_drv_cc *cc) cc 21 drivers/bcma/driver_chipcommon_nflash.c struct bcma_bus *bus = cc->core->bus; cc 24 drivers/bcma/driver_chipcommon_nflash.c cc->core->id.rev != 38) { cc 29 drivers/bcma/driver_chipcommon_nflash.c if (!(cc->capabilities & BCMA_CC_CAP_NFLASH)) { cc 34 drivers/bcma/driver_chipcommon_nflash.c cc->nflash.present = true; cc 35 drivers/bcma/driver_chipcommon_nflash.c if (cc->core->id.rev == 38 && cc 36 drivers/bcma/driver_chipcommon_nflash.c (cc->status & BCMA_CC_CHIPST_5357_NAND_BOOT)) cc 37 drivers/bcma/driver_chipcommon_nflash.c cc->nflash.boot = true; cc 41 drivers/bcma/driver_chipcommon_nflash.c bcma_nflash_dev.dev.platform_data = &cc->nflash; cc 34 drivers/bcma/driver_chipcommon_pflash.c int bcma_pflash_init(struct bcma_drv_cc *cc) cc 36 drivers/bcma/driver_chipcommon_pflash.c struct bcma_pflash *pflash = &cc->pflash; cc 40 drivers/bcma/driver_chipcommon_pflash.c if (!(bcma_read32(cc->core, BCMA_CC_FLASH_CFG) & BCMA_CC_FLASH_CFG_DS)) cc 16 drivers/bcma/driver_chipcommon_pmu.c u32 bcma_chipco_pll_read(struct bcma_drv_cc *cc, u32 offset) cc 18 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_write32(cc, BCMA_CC_PMU_PLLCTL_ADDR, offset); cc 19 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_read32(cc, BCMA_CC_PMU_PLLCTL_ADDR); cc 20 drivers/bcma/driver_chipcommon_pmu.c return bcma_pmu_read32(cc, BCMA_CC_PMU_PLLCTL_DATA); cc 24 drivers/bcma/driver_chipcommon_pmu.c void bcma_chipco_pll_write(struct bcma_drv_cc *cc, u32 offset, u32 value) cc 26 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_write32(cc, BCMA_CC_PMU_PLLCTL_ADDR, offset); cc 27 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_read32(cc, BCMA_CC_PMU_PLLCTL_ADDR); cc 28 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_write32(cc, BCMA_CC_PMU_PLLCTL_DATA, value); cc 32 drivers/bcma/driver_chipcommon_pmu.c void bcma_chipco_pll_maskset(struct bcma_drv_cc *cc, u32 offset, u32 mask, cc 35 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_write32(cc, BCMA_CC_PMU_PLLCTL_ADDR, offset); cc 36 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_read32(cc, BCMA_CC_PMU_PLLCTL_ADDR); cc 37 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_maskset32(cc, BCMA_CC_PMU_PLLCTL_DATA, mask, set); cc 41 drivers/bcma/driver_chipcommon_pmu.c void bcma_chipco_chipctl_maskset(struct bcma_drv_cc *cc, cc 44 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_write32(cc, BCMA_CC_PMU_CHIPCTL_ADDR, offset); cc 45 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_read32(cc, BCMA_CC_PMU_CHIPCTL_ADDR); cc 46 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_maskset32(cc, BCMA_CC_PMU_CHIPCTL_DATA, mask, set); cc 50 drivers/bcma/driver_chipcommon_pmu.c void bcma_chipco_regctl_maskset(struct bcma_drv_cc *cc, u32 offset, u32 mask, cc 53 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_write32(cc, BCMA_CC_PMU_REGCTL_ADDR, offset); cc 54 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_read32(cc, BCMA_CC_PMU_REGCTL_ADDR); cc 55 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_maskset32(cc, BCMA_CC_PMU_REGCTL_DATA, mask, set); cc 59 drivers/bcma/driver_chipcommon_pmu.c static u32 bcma_pmu_xtalfreq(struct bcma_drv_cc *cc) cc 63 drivers/bcma/driver_chipcommon_pmu.c if (!(bcma_pmu_read32(cc, BCMA_CC_PMU_STAT) & cc 67 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_write32(cc, BCMA_CC_PMU_XTAL_FREQ, cc 71 drivers/bcma/driver_chipcommon_pmu.c ilp_ctl = bcma_pmu_read32(cc, BCMA_CC_PMU_XTAL_FREQ); cc 74 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_write32(cc, BCMA_CC_PMU_XTAL_FREQ, 0); cc 80 drivers/bcma/driver_chipcommon_pmu.c static void bcma_pmu2_pll_init0(struct bcma_drv_cc *cc, u32 xtalfreq) cc 82 drivers/bcma/driver_chipcommon_pmu.c struct bcma_bus *bus = cc->core->bus; cc 115 drivers/bcma/driver_chipcommon_pmu.c pll0 = bcma_chipco_pll_read(cc, BCMA_CC_PMU15_PLL_PLLCTL0); cc 130 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_mask32(cc, BCMA_CC_PMU_MINRES_MSK, mask); cc 131 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_mask32(cc, BCMA_CC_PMU_MAXRES_MSK, mask); cc 132 drivers/bcma/driver_chipcommon_pmu.c bcma_wait_value(cc->core, BCMA_CLKCTLST, cc 139 drivers/bcma/driver_chipcommon_pmu.c bcma_chipco_pll_write(cc, BCMA_CC_PMU15_PLL_PLLCTL0, pll0); cc 142 drivers/bcma/driver_chipcommon_pmu.c if (cc->pmu.rev >= 2) cc 143 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_set32(cc, BCMA_CC_PMU_CTL, BCMA_CC_PMU_CTL_PLL_UPD); cc 148 drivers/bcma/driver_chipcommon_pmu.c static void bcma_pmu_pll_init(struct bcma_drv_cc *cc) cc 150 drivers/bcma/driver_chipcommon_pmu.c struct bcma_bus *bus = cc->core->bus; cc 151 drivers/bcma/driver_chipcommon_pmu.c u32 xtalfreq = bcma_pmu_xtalfreq(cc); cc 157 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu2_pll_init0(cc, xtalfreq); cc 162 drivers/bcma/driver_chipcommon_pmu.c static void bcma_pmu_resources_init(struct bcma_drv_cc *cc) cc 164 drivers/bcma/driver_chipcommon_pmu.c struct bcma_bus *bus = cc->core->bus; cc 198 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_write32(cc, BCMA_CC_PMU_MINRES_MSK, min_msk); cc 200 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_write32(cc, BCMA_CC_PMU_MAXRES_MSK, max_msk); cc 210 drivers/bcma/driver_chipcommon_pmu.c void bcma_chipco_bcm4331_ext_pa_lines_ctl(struct bcma_drv_cc *cc, bool enable) cc 212 drivers/bcma/driver_chipcommon_pmu.c struct bcma_bus *bus = cc->core->bus; cc 215 drivers/bcma/driver_chipcommon_pmu.c val = bcma_cc_read32(cc, BCMA_CC_CHIPCTL); cc 227 drivers/bcma/driver_chipcommon_pmu.c bcma_cc_write32(cc, BCMA_CC_CHIPCTL, val); cc 230 drivers/bcma/driver_chipcommon_pmu.c static void bcma_pmu_workarounds(struct bcma_drv_cc *cc) cc 232 drivers/bcma/driver_chipcommon_pmu.c struct bcma_bus *bus = cc->core->bus; cc 238 drivers/bcma/driver_chipcommon_pmu.c bcma_chipco_chipctl_maskset(cc, 0, cc 245 drivers/bcma/driver_chipcommon_pmu.c bcma_chipco_bcm4331_ext_pa_lines_ctl(cc, true); cc 252 drivers/bcma/driver_chipcommon_pmu.c bcma_cc_maskset32(cc, BCMA_CC_CHIPCTL, cc 255 drivers/bcma/driver_chipcommon_pmu.c bcma_chipco_chipctl_maskset(cc, 0, cc 259 drivers/bcma/driver_chipcommon_pmu.c bcma_chipco_chipctl_maskset(cc, 0, cc 270 drivers/bcma/driver_chipcommon_pmu.c void bcma_pmu_early_init(struct bcma_drv_cc *cc) cc 272 drivers/bcma/driver_chipcommon_pmu.c struct bcma_bus *bus = cc->core->bus; cc 275 drivers/bcma/driver_chipcommon_pmu.c if (cc->core->id.rev >= 35 && cc 276 drivers/bcma/driver_chipcommon_pmu.c cc->capabilities_ext & BCMA_CC_CAP_EXT_AOB_PRESENT) { cc 277 drivers/bcma/driver_chipcommon_pmu.c cc->pmu.core = bcma_find_core(bus, BCMA_CORE_PMU); cc 278 drivers/bcma/driver_chipcommon_pmu.c if (!cc->pmu.core) cc 281 drivers/bcma/driver_chipcommon_pmu.c if (!cc->pmu.core) cc 282 drivers/bcma/driver_chipcommon_pmu.c cc->pmu.core = cc->core; cc 284 drivers/bcma/driver_chipcommon_pmu.c pmucap = bcma_pmu_read32(cc, BCMA_CC_PMU_CAP); cc 285 drivers/bcma/driver_chipcommon_pmu.c cc->pmu.rev = (pmucap & BCMA_CC_PMU_CAP_REVISION); cc 287 drivers/bcma/driver_chipcommon_pmu.c bcma_debug(bus, "Found rev %u PMU (capabilities 0x%08X)\n", cc->pmu.rev, cc 291 drivers/bcma/driver_chipcommon_pmu.c void bcma_pmu_init(struct bcma_drv_cc *cc) cc 293 drivers/bcma/driver_chipcommon_pmu.c if (cc->pmu.rev == 1) cc 294 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_mask32(cc, BCMA_CC_PMU_CTL, cc 297 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_set32(cc, BCMA_CC_PMU_CTL, cc 300 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_pll_init(cc); cc 301 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_resources_init(cc); cc 302 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_workarounds(cc); cc 305 drivers/bcma/driver_chipcommon_pmu.c u32 bcma_pmu_get_alp_clock(struct bcma_drv_cc *cc) cc 307 drivers/bcma/driver_chipcommon_pmu.c struct bcma_bus *bus = cc->core->bus; cc 335 drivers/bcma/driver_chipcommon_pmu.c if (cc->status & BCMA_CC_CHIPST_4360_XTAL_40MZ) cc 341 drivers/bcma/driver_chipcommon_pmu.c bus->chipinfo.id, cc->pmu.rev, BCMA_CC_PMU_ALP_CLOCK); cc 349 drivers/bcma/driver_chipcommon_pmu.c static u32 bcma_pmu_pll_clock(struct bcma_drv_cc *cc, u32 pll0, u32 m) cc 352 drivers/bcma/driver_chipcommon_pmu.c struct bcma_bus *bus = cc->core->bus; cc 361 drivers/bcma/driver_chipcommon_pmu.c tmp = bcma_cc_read32(cc, BCMA_CC_CHIPSTAT); cc 366 drivers/bcma/driver_chipcommon_pmu.c tmp = bcma_chipco_pll_read(cc, pll0 + BCMA_CC_PPL_P1P2_OFF); cc 370 drivers/bcma/driver_chipcommon_pmu.c tmp = bcma_chipco_pll_read(cc, pll0 + BCMA_CC_PPL_M14_OFF); cc 374 drivers/bcma/driver_chipcommon_pmu.c tmp = bcma_chipco_pll_read(cc, pll0 + BCMA_CC_PPL_NM5_OFF); cc 378 drivers/bcma/driver_chipcommon_pmu.c fc = bcma_pmu_get_alp_clock(cc) / 1000000; cc 385 drivers/bcma/driver_chipcommon_pmu.c static u32 bcma_pmu_pll_clock_bcm4706(struct bcma_drv_cc *cc, u32 pll0, u32 m) cc 393 drivers/bcma/driver_chipcommon_pmu.c tmp = bcma_chipco_pll_read(cc, pll0 + BCMA_CC_PMU6_4706_PROCPLL_OFF); cc 401 drivers/bcma/driver_chipcommon_pmu.c tmp = bcma_cc_read32(cc, BCMA_CC_CHIPSTAT); cc 416 drivers/bcma/driver_chipcommon_pmu.c u32 bcma_pmu_get_bus_clock(struct bcma_drv_cc *cc) cc 418 drivers/bcma/driver_chipcommon_pmu.c struct bcma_bus *bus = cc->core->bus; cc 424 drivers/bcma/driver_chipcommon_pmu.c return bcma_pmu_pll_clock(cc, BCMA_CC_PMU4716_MAINPLL_PLL0, cc 427 drivers/bcma/driver_chipcommon_pmu.c return bcma_pmu_pll_clock(cc, BCMA_CC_PMU5356_MAINPLL_PLL0, cc 431 drivers/bcma/driver_chipcommon_pmu.c return bcma_pmu_pll_clock(cc, BCMA_CC_PMU5357_MAINPLL_PLL0, cc 434 drivers/bcma/driver_chipcommon_pmu.c return bcma_pmu_pll_clock_bcm4706(cc, cc 441 drivers/bcma/driver_chipcommon_pmu.c bus->chipinfo.id, cc->pmu.rev, BCMA_CC_PMU_HT_CLOCK); cc 448 drivers/bcma/driver_chipcommon_pmu.c u32 bcma_pmu_get_cpu_clock(struct bcma_drv_cc *cc) cc 450 drivers/bcma/driver_chipcommon_pmu.c struct bcma_bus *bus = cc->core->bus; cc 456 drivers/bcma/driver_chipcommon_pmu.c if (cc->pmu.rev >= 5) { cc 460 drivers/bcma/driver_chipcommon_pmu.c return bcma_pmu_pll_clock_bcm4706(cc, cc 475 drivers/bcma/driver_chipcommon_pmu.c return bcma_pmu_pll_clock(cc, pll, BCMA_CC_PMU5_MAINPLL_CPU); cc 479 drivers/bcma/driver_chipcommon_pmu.c return bcma_pmu_get_bus_clock(cc); cc 482 drivers/bcma/driver_chipcommon_pmu.c static void bcma_pmu_spuravoid_pll_write(struct bcma_drv_cc *cc, u32 offset, cc 485 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_write32(cc, BCMA_CC_PMU_PLLCTL_ADDR, offset); cc 486 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_write32(cc, BCMA_CC_PMU_PLLCTL_DATA, value); cc 489 drivers/bcma/driver_chipcommon_pmu.c void bcma_pmu_spuravoid_pllupdate(struct bcma_drv_cc *cc, int spuravoid) cc 495 drivers/bcma/driver_chipcommon_pmu.c struct bcma_bus *bus = cc->core->bus; cc 510 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_write32(cc, BCMA_CC_PMU_PLLCTL_ADDR, cc 512 drivers/bcma/driver_chipcommon_pmu.c tmp = bcma_pmu_read32(cc, BCMA_CC_PMU_PLLCTL_DATA); cc 515 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_write32(cc, BCMA_CC_PMU_PLLCTL_DATA, tmp); cc 518 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_write32(cc, BCMA_CC_PMU_PLLCTL_ADDR, cc 520 drivers/bcma/driver_chipcommon_pmu.c tmp = bcma_pmu_read32(cc, BCMA_CC_PMU_PLLCTL_DATA); cc 523 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_write32(cc, BCMA_CC_PMU_PLLCTL_DATA, tmp); cc 531 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL0, cc 533 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL2, cc 536 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL0, cc 538 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL2, cc 541 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL0, cc 543 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL2, cc 553 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL0, cc 555 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL1, cc 557 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL2, cc 559 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL3, cc 561 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL4, cc 563 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL5, cc 566 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL0, cc 568 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL1, cc 570 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL2, cc 572 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL3, cc 574 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL4, cc 576 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL5, cc 586 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL0, cc 588 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL1, cc 590 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL2, cc 592 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL3, cc 594 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL4, cc 596 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL5, cc 599 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL0, cc 601 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL1, cc 603 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL2, cc 605 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL3, cc 607 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL4, cc 609 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL5, cc 625 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL0, cc 627 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL1, cc 629 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL2, cc 631 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL3, cc 633 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL4, cc 635 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL5, cc 638 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL0, cc 640 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL1, cc 642 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL2, cc 644 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL3, cc 646 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL4, cc 648 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_spuravoid_pll_write(cc, BCMA_CC_PMU_PLL_CTL5, cc 659 drivers/bcma/driver_chipcommon_pmu.c tmp |= bcma_pmu_read32(cc, BCMA_CC_PMU_CTL); cc 660 drivers/bcma/driver_chipcommon_pmu.c bcma_pmu_write32(cc, BCMA_CC_PMU_CTL, tmp); cc 74 drivers/bcma/driver_chipcommon_sflash.c static void bcma_sflash_cmd(struct bcma_drv_cc *cc, u32 opcode) cc 77 drivers/bcma/driver_chipcommon_sflash.c bcma_cc_write32(cc, BCMA_CC_FLASHCTL, cc 80 drivers/bcma/driver_chipcommon_sflash.c if (!(bcma_cc_read32(cc, BCMA_CC_FLASHCTL) & cc 85 drivers/bcma/driver_chipcommon_sflash.c bcma_err(cc->core->bus, "SFLASH control command failed (timeout)!\n"); cc 89 drivers/bcma/driver_chipcommon_sflash.c int bcma_sflash_init(struct bcma_drv_cc *cc) cc 91 drivers/bcma/driver_chipcommon_sflash.c struct bcma_bus *bus = cc->core->bus; cc 92 drivers/bcma/driver_chipcommon_sflash.c struct bcma_sflash *sflash = &cc->sflash; cc 96 drivers/bcma/driver_chipcommon_sflash.c switch (cc->capabilities & BCMA_CC_CAP_FLASHT) { cc 98 drivers/bcma/driver_chipcommon_sflash.c bcma_sflash_cmd(cc, BCMA_CC_FLASHCTL_ST_DP); cc 100 drivers/bcma/driver_chipcommon_sflash.c bcma_cc_write32(cc, BCMA_CC_FLASHADDR, 0); cc 101 drivers/bcma/driver_chipcommon_sflash.c bcma_sflash_cmd(cc, BCMA_CC_FLASHCTL_ST_RES); cc 102 drivers/bcma/driver_chipcommon_sflash.c id = bcma_cc_read32(cc, BCMA_CC_FLASHDATA); cc 104 drivers/bcma/driver_chipcommon_sflash.c bcma_cc_write32(cc, BCMA_CC_FLASHADDR, 1); cc 105 drivers/bcma/driver_chipcommon_sflash.c bcma_sflash_cmd(cc, BCMA_CC_FLASHCTL_ST_RES); cc 106 drivers/bcma/driver_chipcommon_sflash.c id2 = bcma_cc_read32(cc, BCMA_CC_FLASHDATA); cc 131 drivers/bcma/driver_chipcommon_sflash.c bcma_sflash_cmd(cc, BCMA_CC_FLASHCTL_AT_STATUS); cc 132 drivers/bcma/driver_chipcommon_sflash.c id = bcma_cc_read32(cc, BCMA_CC_FLASHDATA) & 0x3c; cc 22 drivers/bcma/driver_gpio.c struct bcma_drv_cc *cc = gpiochip_get_data(chip); cc 24 drivers/bcma/driver_gpio.c return !!bcma_chipco_gpio_in(cc, 1 << gpio); cc 30 drivers/bcma/driver_gpio.c struct bcma_drv_cc *cc = gpiochip_get_data(chip); cc 32 drivers/bcma/driver_gpio.c bcma_chipco_gpio_out(cc, 1 << gpio, value ? 1 << gpio : 0); cc 37 drivers/bcma/driver_gpio.c struct bcma_drv_cc *cc = gpiochip_get_data(chip); cc 39 drivers/bcma/driver_gpio.c bcma_chipco_gpio_outen(cc, 1 << gpio, 0); cc 46 drivers/bcma/driver_gpio.c struct bcma_drv_cc *cc = gpiochip_get_data(chip); cc 48 drivers/bcma/driver_gpio.c bcma_chipco_gpio_outen(cc, 1 << gpio, 1 << gpio); cc 49 drivers/bcma/driver_gpio.c bcma_chipco_gpio_out(cc, 1 << gpio, value ? 1 << gpio : 0); cc 55 drivers/bcma/driver_gpio.c struct bcma_drv_cc *cc = gpiochip_get_data(chip); cc 57 drivers/bcma/driver_gpio.c bcma_chipco_gpio_control(cc, 1 << gpio, 0); cc 59 drivers/bcma/driver_gpio.c bcma_chipco_gpio_pulldown(cc, 1 << gpio, 0); cc 61 drivers/bcma/driver_gpio.c bcma_chipco_gpio_pullup(cc, 1 << gpio, 1 << gpio); cc 68 drivers/bcma/driver_gpio.c struct bcma_drv_cc *cc = gpiochip_get_data(chip); cc 71 drivers/bcma/driver_gpio.c bcma_chipco_gpio_pullup(cc, 1 << gpio, 0); cc 79 drivers/bcma/driver_gpio.c struct bcma_drv_cc *cc = gpiochip_get_data(gc); cc 81 drivers/bcma/driver_gpio.c u32 val = bcma_chipco_gpio_in(cc, BIT(gpio)); cc 83 drivers/bcma/driver_gpio.c bcma_chipco_gpio_polarity(cc, BIT(gpio), val); cc 84 drivers/bcma/driver_gpio.c bcma_chipco_gpio_intmask(cc, BIT(gpio), BIT(gpio)); cc 90 drivers/bcma/driver_gpio.c struct bcma_drv_cc *cc = gpiochip_get_data(gc); cc 93 drivers/bcma/driver_gpio.c bcma_chipco_gpio_intmask(cc, BIT(gpio), 0); cc 104 drivers/bcma/driver_gpio.c struct bcma_drv_cc *cc = dev_id; cc 105 drivers/bcma/driver_gpio.c struct gpio_chip *gc = &cc->gpio; cc 106 drivers/bcma/driver_gpio.c u32 val = bcma_cc_read32(cc, BCMA_CC_GPIOIN); cc 107 drivers/bcma/driver_gpio.c u32 mask = bcma_cc_read32(cc, BCMA_CC_GPIOIRQ); cc 108 drivers/bcma/driver_gpio.c u32 pol = bcma_cc_read32(cc, BCMA_CC_GPIOPOL); cc 117 drivers/bcma/driver_gpio.c bcma_chipco_gpio_polarity(cc, irqs, val & irqs); cc 122 drivers/bcma/driver_gpio.c static int bcma_gpio_irq_init(struct bcma_drv_cc *cc) cc 124 drivers/bcma/driver_gpio.c struct gpio_chip *chip = &cc->gpio; cc 127 drivers/bcma/driver_gpio.c if (cc->core->bus->hosttype != BCMA_HOSTTYPE_SOC) cc 130 drivers/bcma/driver_gpio.c hwirq = bcma_core_irq(cc->core, 0); cc 132 drivers/bcma/driver_gpio.c cc); cc 136 drivers/bcma/driver_gpio.c bcma_chipco_gpio_intmask(cc, ~0, 0); cc 137 drivers/bcma/driver_gpio.c bcma_cc_set32(cc, BCMA_CC_IRQMASK, BCMA_CC_IRQ_GPIO); cc 145 drivers/bcma/driver_gpio.c free_irq(hwirq, cc); cc 152 drivers/bcma/driver_gpio.c static void bcma_gpio_irq_exit(struct bcma_drv_cc *cc) cc 154 drivers/bcma/driver_gpio.c if (cc->core->bus->hosttype != BCMA_HOSTTYPE_SOC) cc 157 drivers/bcma/driver_gpio.c bcma_cc_mask32(cc, BCMA_CC_IRQMASK, ~BCMA_CC_IRQ_GPIO); cc 158 drivers/bcma/driver_gpio.c free_irq(bcma_core_irq(cc->core, 0), cc); cc 161 drivers/bcma/driver_gpio.c static int bcma_gpio_irq_init(struct bcma_drv_cc *cc) cc 166 drivers/bcma/driver_gpio.c static void bcma_gpio_irq_exit(struct bcma_drv_cc *cc) cc 171 drivers/bcma/driver_gpio.c int bcma_gpio_init(struct bcma_drv_cc *cc) cc 173 drivers/bcma/driver_gpio.c struct bcma_bus *bus = cc->core->bus; cc 174 drivers/bcma/driver_gpio.c struct gpio_chip *chip = &cc->gpio; cc 188 drivers/bcma/driver_gpio.c chip->of_node = cc->core->dev.of_node; cc 210 drivers/bcma/driver_gpio.c cc->core->bus->hosttype == BCMA_HOSTTYPE_SOC) cc 215 drivers/bcma/driver_gpio.c err = gpiochip_add_data(chip, cc); cc 219 drivers/bcma/driver_gpio.c err = bcma_gpio_irq_init(cc); cc 228 drivers/bcma/driver_gpio.c int bcma_gpio_unregister(struct bcma_drv_cc *cc) cc 230 drivers/bcma/driver_gpio.c bcma_gpio_irq_exit(cc); cc 231 drivers/bcma/driver_gpio.c gpiochip_remove(&cc->gpio); cc 204 drivers/bcma/driver_mips.c struct bcma_drv_cc *cc = &bus->drv_cc; cc 205 drivers/bcma/driver_mips.c u8 cc_rev = cc->core->id.rev; cc 225 drivers/bcma/driver_mips.c if (cc->status & BCMA_CC_CHIPST_5357_NAND_BOOT) cc 227 drivers/bcma/driver_mips.c else if (cc->status & BIT(5)) cc 231 drivers/bcma/driver_mips.c if ((cc->capabilities & BCMA_CC_CAP_FLASHT) == cc 565 drivers/bcma/sprom.c struct bcma_device *cc = bus->drv_cc.core; cc 569 drivers/bcma/sprom.c if ((bcma_read32(cc, BCMA_CC_OTPS) & BCMA_CC_OTPS_GU_PROG_HW) == 0) cc 573 drivers/bcma/sprom.c offset = (bcma_read32(cc, BCMA_CC_OTPL) & BCMA_CC_OTPL_GURGN_OFFSET); cc 253 drivers/block/paride/epat.c { int k, j, f, cc; cc 257 drivers/block/paride/epat.c cc = RR(0xd); cc 284 drivers/block/paride/epat.c pi->device,pi->port,pi->mode,cc,e[0],e[1],f); cc 162 drivers/char/tpm/tpm-interface.c u32 cc = be32_to_cpu(header->return_code); cc 182 drivers/char/tpm/tpm-interface.c if (rc == TPM2_RC_TESTING && cc == TPM2_CC_SELF_TEST) cc 458 drivers/char/tpm/tpm.h int tpm2_find_cc(struct tpm_chip *chip, u32 cc); cc 929 drivers/char/tpm/tpm2-cmd.c u32 cc; cc 974 drivers/char/tpm/tpm2-cmd.c cc = chip->cc_attrs_tbl[i] & 0xFFFF; cc 976 drivers/char/tpm/tpm2-cmd.c if (cc == TPM2_CC_CONTEXT_SAVE || cc == TPM2_CC_FLUSH_CONTEXT) { cc 1057 drivers/char/tpm/tpm2-cmd.c int tpm2_find_cc(struct tpm_chip *chip, u32 cc) cc 1062 drivers/char/tpm/tpm2-cmd.c if (cc == (chip->cc_attrs_tbl[i] & GENMASK(15, 0))) cc 236 drivers/char/tpm/tpm2-space.c static int tpm2_map_command(struct tpm_chip *chip, u32 cc, u8 *cmd) cc 244 drivers/char/tpm/tpm2-space.c i = tpm2_find_cc(chip, cc); cc 268 drivers/char/tpm/tpm2-space.c u32 cc; cc 275 drivers/char/tpm/tpm2-space.c cc = be32_to_cpu(header->ordinal); cc 277 drivers/char/tpm/tpm2-space.c i = tpm2_find_cc(chip, cc); cc 280 drivers/char/tpm/tpm2-space.c cc); cc 290 drivers/char/tpm/tpm2-space.c return cc; cc 301 drivers/char/tpm/tpm2-space.c int cc; cc 306 drivers/char/tpm/tpm2-space.c cc = tpm_find_and_validate_cc(chip, space, cmd, cmdsiz); cc 307 drivers/char/tpm/tpm2-space.c if (cc < 0) cc 308 drivers/char/tpm/tpm2-space.c return cc; cc 323 drivers/char/tpm/tpm2-space.c rc = tpm2_map_command(chip, cc, cmd); cc 329 drivers/char/tpm/tpm2-space.c chip->last_cc = cc; cc 369 drivers/char/tpm/tpm2-space.c static int tpm2_map_response_header(struct tpm_chip *chip, u32 cc, u8 *rsp, cc 383 drivers/char/tpm/tpm2-space.c i = tpm2_find_cc(chip, cc); cc 429 drivers/char/tpm/tpm2-space.c static int tpm2_map_response_body(struct tpm_chip *chip, u32 cc, u8 *rsp, cc 441 drivers/char/tpm/tpm2-space.c if (cc != TPM2_CC_GET_CAPABILITY || cc 186 drivers/clk/qcom/clk-spmi-pmic-div.c struct spmi_pmic_div_clk_cc *cc = data; cc 189 drivers/clk/qcom/clk-spmi-pmic-div.c if (idx < 0 || idx >= cc->nclks) { cc 191 drivers/clk/qcom/clk-spmi-pmic-div.c __func__, clkspec->args[0], cc->nclks); cc 195 drivers/clk/qcom/clk-spmi-pmic-div.c return &cc->clks[idx].hw; cc 200 drivers/clk/qcom/clk-spmi-pmic-div.c struct spmi_pmic_div_clk_cc *cc; cc 234 drivers/clk/qcom/clk-spmi-pmic-div.c cc = devm_kzalloc(dev, struct_size(cc, clks, nclks), GFP_KERNEL); cc 235 drivers/clk/qcom/clk-spmi-pmic-div.c if (!cc) cc 237 drivers/clk/qcom/clk-spmi-pmic-div.c cc->nclks = nclks; cc 260 drivers/clk/qcom/clk-spmi-pmic-div.c for (i = 0, clkdiv = cc->clks; i < nclks; i++) { cc 274 drivers/clk/qcom/clk-spmi-pmic-div.c return devm_of_clk_add_hw_provider(dev, spmi_pmic_div_clk_hw_get, cc); cc 198 drivers/clk/qcom/common.c static void qcom_cc_drop_protected(struct device *dev, struct qcom_cc *cc) cc 206 drivers/clk/qcom/common.c if (i >= cc->num_rclks) cc 209 drivers/clk/qcom/common.c cc->rclks[i] = NULL; cc 216 drivers/clk/qcom/common.c struct qcom_cc *cc = data; cc 219 drivers/clk/qcom/common.c if (idx >= cc->num_rclks) { cc 224 drivers/clk/qcom/common.c return cc->rclks[idx] ? &cc->rclks[idx]->hw : ERR_PTR(-ENOENT); cc 233 drivers/clk/qcom/common.c struct qcom_cc *cc; cc 240 drivers/clk/qcom/common.c cc = devm_kzalloc(dev, sizeof(*cc), GFP_KERNEL); cc 241 drivers/clk/qcom/common.c if (!cc) cc 244 drivers/clk/qcom/common.c reset = &cc->reset; cc 272 drivers/clk/qcom/common.c cc->rclks = rclks; cc 273 drivers/clk/qcom/common.c cc->num_rclks = num_clks; cc 275 drivers/clk/qcom/common.c qcom_cc_drop_protected(dev, cc); cc 292 drivers/clk/qcom/common.c ret = devm_of_clk_add_hw_provider(dev, qcom_cc_clk_hw_get, cc); cc 15 drivers/clk/sprd/composite.c struct sprd_comp *cc = hw_to_sprd_comp(hw); cc 17 drivers/clk/sprd/composite.c return sprd_div_helper_round_rate(&cc->common, &cc->div, cc 24 drivers/clk/sprd/composite.c struct sprd_comp *cc = hw_to_sprd_comp(hw); cc 26 drivers/clk/sprd/composite.c return sprd_div_helper_recalc_rate(&cc->common, &cc->div, parent_rate); cc 32 drivers/clk/sprd/composite.c struct sprd_comp *cc = hw_to_sprd_comp(hw); cc 34 drivers/clk/sprd/composite.c return sprd_div_helper_set_rate(&cc->common, &cc->div, cc 40 drivers/clk/sprd/composite.c struct sprd_comp *cc = hw_to_sprd_comp(hw); cc 42 drivers/clk/sprd/composite.c return sprd_mux_helper_get_parent(&cc->common, &cc->mux); cc 47 drivers/clk/sprd/composite.c struct sprd_comp *cc = hw_to_sprd_comp(hw); cc 49 drivers/clk/sprd/composite.c return sprd_mux_helper_set_parent(&cc->common, &cc->mux, index); cc 183 drivers/clocksource/arm_arch_timer.c static u64 arch_counter_read_cc(const struct cyclecounter *cc) cc 161 drivers/cpufreq/bmips-cpufreq.c struct cpufreq_compat *cc; cc 164 drivers/cpufreq/bmips-cpufreq.c for (cc = bmips_cpufreq_compat; cc->compatible; cc++) { cc 165 drivers/cpufreq/bmips-cpufreq.c np = of_find_compatible_node(NULL, "cpu", cc->compatible); cc 168 drivers/cpufreq/bmips-cpufreq.c priv = cc; cc 174 drivers/cpufreq/bmips-cpufreq.c if (!cc->compatible) cc 74 drivers/crypto/cavium/zip/zip_deflate.c zip_cmd->s.cc = zip_ops->ccode; cc 79 drivers/crypto/cavium/zip/zip_inflate.c zip_cmd->s.cc = zip_ops->ccode; cc 187 drivers/crypto/cavium/zip/zip_regs.h u64 cc : 2; cc 203 drivers/crypto/cavium/zip/zip_regs.h u64 cc : 2; cc 170 drivers/crypto/nx/nx-842-powernv.c (csb)->cs, (csb)->cc, (csb)->ce, \ cc 218 drivers/crypto/nx/nx-842-powernv.c switch (csb->cc) { cc 358 drivers/crypto/nx/nx-842-powernv.c CSB_ERR(csb, "Invalid CC %d", csb->cc); cc 747 drivers/dma/ti/cppi41.c struct cppi41_channel *cc, *_ct; cc 754 drivers/dma/ti/cppi41.c list_for_each_entry_safe(cc, _ct, &cdd->pending, node) { cc 755 drivers/dma/ti/cppi41.c if (cc != c) cc 757 drivers/dma/ti/cppi41.c list_del(&cc->node); cc 249 drivers/infiniband/hw/cxgb4/device.c int cc; cc 267 drivers/infiniband/hw/cxgb4/device.c cc = snprintf(qpd->buf + qpd->pos, space, cc 288 drivers/infiniband/hw/cxgb4/device.c cc = snprintf(qpd->buf + qpd->pos, space, cc 304 drivers/infiniband/hw/cxgb4/device.c cc = snprintf(qpd->buf + qpd->pos, space, cc 309 drivers/infiniband/hw/cxgb4/device.c if (cc < space) cc 310 drivers/infiniband/hw/cxgb4/device.c qpd->pos += cc; cc 375 drivers/infiniband/hw/cxgb4/device.c int cc; cc 390 drivers/infiniband/hw/cxgb4/device.c cc = snprintf(stagd->buf + stagd->pos, space, cc 402 drivers/infiniband/hw/cxgb4/device.c if (cc < space) cc 403 drivers/infiniband/hw/cxgb4/device.c stagd->pos += cc; cc 561 drivers/infiniband/hw/cxgb4/device.c int cc; cc 574 drivers/infiniband/hw/cxgb4/device.c cc = snprintf(epd->buf + epd->pos, space, cc 595 drivers/infiniband/hw/cxgb4/device.c cc = snprintf(epd->buf + epd->pos, space, cc 610 drivers/infiniband/hw/cxgb4/device.c if (cc < space) cc 611 drivers/infiniband/hw/cxgb4/device.c epd->pos += cc; cc 619 drivers/infiniband/hw/cxgb4/device.c int cc; cc 631 drivers/infiniband/hw/cxgb4/device.c cc = snprintf(epd->buf + epd->pos, space, cc 644 drivers/infiniband/hw/cxgb4/device.c cc = snprintf(epd->buf + epd->pos, space, cc 652 drivers/infiniband/hw/cxgb4/device.c if (cc < space) cc 653 drivers/infiniband/hw/cxgb4/device.c epd->pos += cc; cc 151 drivers/infiniband/hw/efa/efa_com.c sq->cc = 0; cc 189 drivers/infiniband/hw/efa/efa_com.c cq->cc = 0; cc 231 drivers/infiniband/hw/efa/efa_com.c aenq->cc = 0; cc 253 drivers/infiniband/hw/efa/efa_com.c writel(edev->aenq.cc, edev->reg_bar + EFA_REGS_AENQ_CONS_DB_OFF); cc 460 drivers/infiniband/hw/efa/efa_com.c ci = aq->cq.cc & queue_size_mask; cc 485 drivers/infiniband/hw/efa/efa_com.c aq->cq.cc += comp_num; cc 487 drivers/infiniband/hw/efa/efa_com.c aq->sq.cc += comp_num; cc 576 drivers/infiniband/hw/efa/efa_com.c comp_ctx, aq->sq.pc, aq->sq.cc, aq->cq.cc); cc 583 drivers/infiniband/hw/efa/efa_com.c comp_ctx, aq->sq.pc, aq->sq.cc, aq->cq.cc); cc 857 drivers/infiniband/hw/efa/efa_com.c ci = aenq->cc & (aenq->depth - 1); cc 888 drivers/infiniband/hw/efa/efa_com.c aenq->cc += processed; cc 896 drivers/infiniband/hw/efa/efa_com.c writel(aenq->cc, edev->reg_bar + EFA_REGS_AENQ_CONS_DB_OFF); cc 29 drivers/infiniband/hw/efa/efa_com.h u16 cc; /* consumer counter */ cc 40 drivers/infiniband/hw/efa/efa_com.h u16 cc; /* consumer counter */ cc 87 drivers/infiniband/hw/efa/efa_com.h u32 cc; /* consumer counter */ cc 1366 drivers/infiniband/hw/mlx5/odp.c int cc = 0; cc 1368 drivers/infiniband/hw/mlx5/odp.c while ((eqe = mlx5_eq_get_eqe(eq->core, cc))) { cc 1442 drivers/infiniband/hw/mlx5/odp.c cc = mlx5_eq_update_cc(eq->core, ++cc); cc 1445 drivers/infiniband/hw/mlx5/odp.c mlx5_eq_update_ci(eq->core, cc, 1); cc 10 drivers/input/mouse/cypress_ps2.h #define ENCODE_CMD(aa, bb, cc, dd) \ cc 11 drivers/input/mouse/cypress_ps2.h (COMPOSIT((aa), 6) | COMPOSIT((bb), 4) | COMPOSIT((cc), 2) | COMPOSIT((dd), 0)) cc 65 drivers/md/dm-crypt.c struct crypt_config *cc; cc 90 drivers/md/dm-crypt.c int (*ctr)(struct crypt_config *cc, struct dm_target *ti, cc 92 drivers/md/dm-crypt.c void (*dtr)(struct crypt_config *cc); cc 93 drivers/md/dm-crypt.c int (*init)(struct crypt_config *cc); cc 94 drivers/md/dm-crypt.c int (*wipe)(struct crypt_config *cc); cc 95 drivers/md/dm-crypt.c int (*generator)(struct crypt_config *cc, u8 *iv, cc 97 drivers/md/dm-crypt.c int (*post)(struct crypt_config *cc, u8 *iv, cc 223 drivers/md/dm-crypt.c static struct scatterlist *crypt_get_sg_data(struct crypt_config *cc, cc 229 drivers/md/dm-crypt.c static struct crypto_skcipher *any_tfm(struct crypt_config *cc) cc 231 drivers/md/dm-crypt.c return cc->cipher_tfm.tfms[0]; cc 234 drivers/md/dm-crypt.c static struct crypto_aead *any_tfm_aead(struct crypt_config *cc) cc 236 drivers/md/dm-crypt.c return cc->cipher_tfm.tfms_aead[0]; cc 290 drivers/md/dm-crypt.c static int crypt_iv_plain_gen(struct crypt_config *cc, u8 *iv, cc 293 drivers/md/dm-crypt.c memset(iv, 0, cc->iv_size); cc 299 drivers/md/dm-crypt.c static int crypt_iv_plain64_gen(struct crypt_config *cc, u8 *iv, cc 302 drivers/md/dm-crypt.c memset(iv, 0, cc->iv_size); cc 308 drivers/md/dm-crypt.c static int crypt_iv_plain64be_gen(struct crypt_config *cc, u8 *iv, cc 311 drivers/md/dm-crypt.c memset(iv, 0, cc->iv_size); cc 313 drivers/md/dm-crypt.c *(__be64 *)&iv[cc->iv_size - sizeof(u64)] = cpu_to_be64(dmreq->iv_sector); cc 318 drivers/md/dm-crypt.c static int crypt_iv_essiv_gen(struct crypt_config *cc, u8 *iv, cc 325 drivers/md/dm-crypt.c memset(iv, 0, cc->iv_size); cc 331 drivers/md/dm-crypt.c static int crypt_iv_benbi_ctr(struct crypt_config *cc, struct dm_target *ti, cc 337 drivers/md/dm-crypt.c if (test_bit(CRYPT_MODE_INTEGRITY_AEAD, &cc->cipher_flags)) cc 338 drivers/md/dm-crypt.c bs = crypto_aead_blocksize(any_tfm_aead(cc)); cc 340 drivers/md/dm-crypt.c bs = crypto_skcipher_blocksize(any_tfm(cc)); cc 356 drivers/md/dm-crypt.c cc->iv_gen_private.benbi.shift = 9 - log; cc 361 drivers/md/dm-crypt.c static void crypt_iv_benbi_dtr(struct crypt_config *cc) cc 365 drivers/md/dm-crypt.c static int crypt_iv_benbi_gen(struct crypt_config *cc, u8 *iv, cc 370 drivers/md/dm-crypt.c memset(iv, 0, cc->iv_size - sizeof(u64)); /* rest is cleared below */ cc 372 drivers/md/dm-crypt.c val = cpu_to_be64(((u64)dmreq->iv_sector << cc->iv_gen_private.benbi.shift) + 1); cc 373 drivers/md/dm-crypt.c put_unaligned(val, (__be64 *)(iv + cc->iv_size - sizeof(u64))); cc 378 drivers/md/dm-crypt.c static int crypt_iv_null_gen(struct crypt_config *cc, u8 *iv, cc 381 drivers/md/dm-crypt.c memset(iv, 0, cc->iv_size); cc 386 drivers/md/dm-crypt.c static void crypt_iv_lmk_dtr(struct crypt_config *cc) cc 388 drivers/md/dm-crypt.c struct iv_lmk_private *lmk = &cc->iv_gen_private.lmk; cc 398 drivers/md/dm-crypt.c static int crypt_iv_lmk_ctr(struct crypt_config *cc, struct dm_target *ti, cc 401 drivers/md/dm-crypt.c struct iv_lmk_private *lmk = &cc->iv_gen_private.lmk; cc 403 drivers/md/dm-crypt.c if (cc->sector_size != (1 << SECTOR_SHIFT)) { cc 415 drivers/md/dm-crypt.c if (cc->key_parts == cc->tfms_count) { cc 422 drivers/md/dm-crypt.c crypt_iv_lmk_dtr(cc); cc 430 drivers/md/dm-crypt.c static int crypt_iv_lmk_init(struct crypt_config *cc) cc 432 drivers/md/dm-crypt.c struct iv_lmk_private *lmk = &cc->iv_gen_private.lmk; cc 433 drivers/md/dm-crypt.c int subkey_size = cc->key_size / cc->key_parts; cc 437 drivers/md/dm-crypt.c memcpy(lmk->seed, cc->key + (cc->tfms_count * subkey_size), cc 443 drivers/md/dm-crypt.c static int crypt_iv_lmk_wipe(struct crypt_config *cc) cc 445 drivers/md/dm-crypt.c struct iv_lmk_private *lmk = &cc->iv_gen_private.lmk; cc 453 drivers/md/dm-crypt.c static int crypt_iv_lmk_one(struct crypt_config *cc, u8 *iv, cc 457 drivers/md/dm-crypt.c struct iv_lmk_private *lmk = &cc->iv_gen_private.lmk; cc 496 drivers/md/dm-crypt.c memcpy(iv, &md5state.hash, cc->iv_size); cc 501 drivers/md/dm-crypt.c static int crypt_iv_lmk_gen(struct crypt_config *cc, u8 *iv, cc 509 drivers/md/dm-crypt.c sg = crypt_get_sg_data(cc, dmreq->sg_in); cc 511 drivers/md/dm-crypt.c r = crypt_iv_lmk_one(cc, iv, dmreq, src + sg->offset); cc 514 drivers/md/dm-crypt.c memset(iv, 0, cc->iv_size); cc 519 drivers/md/dm-crypt.c static int crypt_iv_lmk_post(struct crypt_config *cc, u8 *iv, cc 529 drivers/md/dm-crypt.c sg = crypt_get_sg_data(cc, dmreq->sg_out); cc 531 drivers/md/dm-crypt.c r = crypt_iv_lmk_one(cc, iv, dmreq, dst + sg->offset); cc 535 drivers/md/dm-crypt.c crypto_xor(dst + sg->offset, iv, cc->iv_size); cc 541 drivers/md/dm-crypt.c static void crypt_iv_tcw_dtr(struct crypt_config *cc) cc 543 drivers/md/dm-crypt.c struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; cc 555 drivers/md/dm-crypt.c static int crypt_iv_tcw_ctr(struct crypt_config *cc, struct dm_target *ti, cc 558 drivers/md/dm-crypt.c struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; cc 560 drivers/md/dm-crypt.c if (cc->sector_size != (1 << SECTOR_SHIFT)) { cc 565 drivers/md/dm-crypt.c if (cc->key_size <= (cc->iv_size + TCW_WHITENING_SIZE)) { cc 576 drivers/md/dm-crypt.c tcw->iv_seed = kzalloc(cc->iv_size, GFP_KERNEL); cc 579 drivers/md/dm-crypt.c crypt_iv_tcw_dtr(cc); cc 587 drivers/md/dm-crypt.c static int crypt_iv_tcw_init(struct crypt_config *cc) cc 589 drivers/md/dm-crypt.c struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; cc 590 drivers/md/dm-crypt.c int key_offset = cc->key_size - cc->iv_size - TCW_WHITENING_SIZE; cc 592 drivers/md/dm-crypt.c memcpy(tcw->iv_seed, &cc->key[key_offset], cc->iv_size); cc 593 drivers/md/dm-crypt.c memcpy(tcw->whitening, &cc->key[key_offset + cc->iv_size], cc 599 drivers/md/dm-crypt.c static int crypt_iv_tcw_wipe(struct crypt_config *cc) cc 601 drivers/md/dm-crypt.c struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; cc 603 drivers/md/dm-crypt.c memset(tcw->iv_seed, 0, cc->iv_size); cc 609 drivers/md/dm-crypt.c static int crypt_iv_tcw_whitening(struct crypt_config *cc, cc 613 drivers/md/dm-crypt.c struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; cc 647 drivers/md/dm-crypt.c static int crypt_iv_tcw_gen(struct crypt_config *cc, u8 *iv, cc 651 drivers/md/dm-crypt.c struct iv_tcw_private *tcw = &cc->iv_gen_private.tcw; cc 658 drivers/md/dm-crypt.c sg = crypt_get_sg_data(cc, dmreq->sg_in); cc 660 drivers/md/dm-crypt.c r = crypt_iv_tcw_whitening(cc, dmreq, src + sg->offset); cc 666 drivers/md/dm-crypt.c if (cc->iv_size > 8) cc 668 drivers/md/dm-crypt.c cc->iv_size - 8); cc 673 drivers/md/dm-crypt.c static int crypt_iv_tcw_post(struct crypt_config *cc, u8 *iv, cc 684 drivers/md/dm-crypt.c sg = crypt_get_sg_data(cc, dmreq->sg_out); cc 686 drivers/md/dm-crypt.c r = crypt_iv_tcw_whitening(cc, dmreq, dst + sg->offset); cc 692 drivers/md/dm-crypt.c static int crypt_iv_random_gen(struct crypt_config *cc, u8 *iv, cc 696 drivers/md/dm-crypt.c get_random_bytes(iv, cc->iv_size); cc 700 drivers/md/dm-crypt.c static int crypt_iv_eboiv_ctr(struct crypt_config *cc, struct dm_target *ti, cc 703 drivers/md/dm-crypt.c if (test_bit(CRYPT_MODE_INTEGRITY_AEAD, &cc->cipher_flags)) { cc 708 drivers/md/dm-crypt.c if (crypto_skcipher_blocksize(any_tfm(cc)) != cc->iv_size) { cc 717 drivers/md/dm-crypt.c static int crypt_iv_eboiv_gen(struct crypt_config *cc, u8 *iv, cc 726 drivers/md/dm-crypt.c req = skcipher_request_alloc(any_tfm(cc), GFP_NOIO); cc 730 drivers/md/dm-crypt.c memset(buf, 0, cc->iv_size); cc 731 drivers/md/dm-crypt.c *(__le64 *)buf = cpu_to_le64(dmreq->iv_sector * cc->sector_size); cc 733 drivers/md/dm-crypt.c sg_init_one(&src, page_address(ZERO_PAGE(0)), cc->iv_size); cc 734 drivers/md/dm-crypt.c sg_init_one(&dst, iv, cc->iv_size); cc 735 drivers/md/dm-crypt.c skcipher_request_set_crypt(req, &src, &dst, cc->iv_size, buf); cc 799 drivers/md/dm-crypt.c static bool crypt_integrity_aead(struct crypt_config *cc) cc 801 drivers/md/dm-crypt.c return test_bit(CRYPT_MODE_INTEGRITY_AEAD, &cc->cipher_flags); cc 804 drivers/md/dm-crypt.c static bool crypt_integrity_hmac(struct crypt_config *cc) cc 806 drivers/md/dm-crypt.c return crypt_integrity_aead(cc) && cc->key_mac_size; cc 810 drivers/md/dm-crypt.c static struct scatterlist *crypt_get_sg_data(struct crypt_config *cc, cc 813 drivers/md/dm-crypt.c if (unlikely(crypt_integrity_aead(cc))) cc 825 drivers/md/dm-crypt.c if (!bio_sectors(bio) || !io->cc->on_disk_tag_size) cc 832 drivers/md/dm-crypt.c tag_len = io->cc->on_disk_tag_size * (bio_sectors(bio) >> io->cc->sector_shift); cc 835 drivers/md/dm-crypt.c bip->bip_iter.bi_sector = io->cc->start + io->sector; cc 845 drivers/md/dm-crypt.c static int crypt_integrity_ctr(struct crypt_config *cc, struct dm_target *ti) cc 848 drivers/md/dm-crypt.c struct blk_integrity *bi = blk_get_integrity(cc->dev->bdev->bd_disk); cc 857 drivers/md/dm-crypt.c if (bi->tag_size != cc->on_disk_tag_size || cc 858 drivers/md/dm-crypt.c bi->tuple_size != cc->on_disk_tag_size) { cc 862 drivers/md/dm-crypt.c if (1 << bi->interval_exp != cc->sector_size) { cc 867 drivers/md/dm-crypt.c if (crypt_integrity_aead(cc)) { cc 868 drivers/md/dm-crypt.c cc->integrity_tag_size = cc->on_disk_tag_size - cc->integrity_iv_size; cc 870 drivers/md/dm-crypt.c cc->integrity_tag_size, cc->integrity_iv_size); cc 872 drivers/md/dm-crypt.c if (crypto_aead_setauthsize(any_tfm_aead(cc), cc->integrity_tag_size)) { cc 876 drivers/md/dm-crypt.c } else if (cc->integrity_iv_size) cc 878 drivers/md/dm-crypt.c cc->integrity_iv_size); cc 880 drivers/md/dm-crypt.c if ((cc->integrity_tag_size + cc->integrity_iv_size) != bi->tag_size) { cc 892 drivers/md/dm-crypt.c static void crypt_convert_init(struct crypt_config *cc, cc 903 drivers/md/dm-crypt.c ctx->cc_sector = sector + cc->iv_offset; cc 907 drivers/md/dm-crypt.c static struct dm_crypt_request *dmreq_of_req(struct crypt_config *cc, cc 910 drivers/md/dm-crypt.c return (struct dm_crypt_request *)((char *)req + cc->dmreq_start); cc 913 drivers/md/dm-crypt.c static void *req_of_dmreq(struct crypt_config *cc, struct dm_crypt_request *dmreq) cc 915 drivers/md/dm-crypt.c return (void *)((char *)dmreq - cc->dmreq_start); cc 918 drivers/md/dm-crypt.c static u8 *iv_of_dmreq(struct crypt_config *cc, cc 921 drivers/md/dm-crypt.c if (crypt_integrity_aead(cc)) cc 923 drivers/md/dm-crypt.c crypto_aead_alignmask(any_tfm_aead(cc)) + 1); cc 926 drivers/md/dm-crypt.c crypto_skcipher_alignmask(any_tfm(cc)) + 1); cc 929 drivers/md/dm-crypt.c static u8 *org_iv_of_dmreq(struct crypt_config *cc, cc 932 drivers/md/dm-crypt.c return iv_of_dmreq(cc, dmreq) + cc->iv_size; cc 935 drivers/md/dm-crypt.c static __le64 *org_sector_of_dmreq(struct crypt_config *cc, cc 938 drivers/md/dm-crypt.c u8 *ptr = iv_of_dmreq(cc, dmreq) + cc->iv_size + cc->iv_size; cc 942 drivers/md/dm-crypt.c static unsigned int *org_tag_of_dmreq(struct crypt_config *cc, cc 945 drivers/md/dm-crypt.c u8 *ptr = iv_of_dmreq(cc, dmreq) + cc->iv_size + cc 946 drivers/md/dm-crypt.c cc->iv_size + sizeof(uint64_t); cc 950 drivers/md/dm-crypt.c static void *tag_from_dmreq(struct crypt_config *cc, cc 956 drivers/md/dm-crypt.c return &io->integrity_metadata[*org_tag_of_dmreq(cc, dmreq) * cc 957 drivers/md/dm-crypt.c cc->on_disk_tag_size]; cc 960 drivers/md/dm-crypt.c static void *iv_tag_from_dmreq(struct crypt_config *cc, cc 963 drivers/md/dm-crypt.c return tag_from_dmreq(cc, dmreq) + cc->integrity_tag_size; cc 966 drivers/md/dm-crypt.c static int crypt_convert_block_aead(struct crypt_config *cc, cc 978 drivers/md/dm-crypt.c BUG_ON(cc->integrity_iv_size && cc->integrity_iv_size != cc->iv_size); cc 981 drivers/md/dm-crypt.c if (unlikely(bv_in.bv_len & (cc->sector_size - 1))) cc 984 drivers/md/dm-crypt.c dmreq = dmreq_of_req(cc, req); cc 986 drivers/md/dm-crypt.c if (test_bit(CRYPT_IV_LARGE_SECTORS, &cc->cipher_flags)) cc 987 drivers/md/dm-crypt.c dmreq->iv_sector >>= cc->sector_shift; cc 990 drivers/md/dm-crypt.c *org_tag_of_dmreq(cc, dmreq) = tag_offset; cc 992 drivers/md/dm-crypt.c sector = org_sector_of_dmreq(cc, dmreq); cc 993 drivers/md/dm-crypt.c *sector = cpu_to_le64(ctx->cc_sector - cc->iv_offset); cc 995 drivers/md/dm-crypt.c iv = iv_of_dmreq(cc, dmreq); cc 996 drivers/md/dm-crypt.c org_iv = org_iv_of_dmreq(cc, dmreq); cc 997 drivers/md/dm-crypt.c tag = tag_from_dmreq(cc, dmreq); cc 998 drivers/md/dm-crypt.c tag_iv = iv_tag_from_dmreq(cc, dmreq); cc 1007 drivers/md/dm-crypt.c sg_set_buf(&dmreq->sg_in[1], org_iv, cc->iv_size); cc 1008 drivers/md/dm-crypt.c sg_set_page(&dmreq->sg_in[2], bv_in.bv_page, cc->sector_size, bv_in.bv_offset); cc 1009 drivers/md/dm-crypt.c sg_set_buf(&dmreq->sg_in[3], tag, cc->integrity_tag_size); cc 1013 drivers/md/dm-crypt.c sg_set_buf(&dmreq->sg_out[1], org_iv, cc->iv_size); cc 1014 drivers/md/dm-crypt.c sg_set_page(&dmreq->sg_out[2], bv_out.bv_page, cc->sector_size, bv_out.bv_offset); cc 1015 drivers/md/dm-crypt.c sg_set_buf(&dmreq->sg_out[3], tag, cc->integrity_tag_size); cc 1017 drivers/md/dm-crypt.c if (cc->iv_gen_ops) { cc 1019 drivers/md/dm-crypt.c if (cc->integrity_iv_size && bio_data_dir(ctx->bio_in) != WRITE) { cc 1020 drivers/md/dm-crypt.c memcpy(org_iv, tag_iv, cc->iv_size); cc 1022 drivers/md/dm-crypt.c r = cc->iv_gen_ops->generator(cc, org_iv, dmreq); cc 1026 drivers/md/dm-crypt.c if (cc->integrity_iv_size) cc 1027 drivers/md/dm-crypt.c memcpy(tag_iv, org_iv, cc->iv_size); cc 1030 drivers/md/dm-crypt.c memcpy(iv, org_iv, cc->iv_size); cc 1033 drivers/md/dm-crypt.c aead_request_set_ad(req, sizeof(uint64_t) + cc->iv_size); cc 1036 drivers/md/dm-crypt.c cc->sector_size, iv); cc 1038 drivers/md/dm-crypt.c if (cc->integrity_tag_size + cc->integrity_iv_size != cc->on_disk_tag_size) cc 1039 drivers/md/dm-crypt.c memset(tag + cc->integrity_tag_size + cc->integrity_iv_size, 0, cc 1040 drivers/md/dm-crypt.c cc->on_disk_tag_size - (cc->integrity_tag_size + cc->integrity_iv_size)); cc 1043 drivers/md/dm-crypt.c cc->sector_size + cc->integrity_tag_size, iv); cc 1053 drivers/md/dm-crypt.c if (!r && cc->iv_gen_ops && cc->iv_gen_ops->post) cc 1054 drivers/md/dm-crypt.c r = cc->iv_gen_ops->post(cc, org_iv, dmreq); cc 1056 drivers/md/dm-crypt.c bio_advance_iter(ctx->bio_in, &ctx->iter_in, cc->sector_size); cc 1057 drivers/md/dm-crypt.c bio_advance_iter(ctx->bio_out, &ctx->iter_out, cc->sector_size); cc 1062 drivers/md/dm-crypt.c static int crypt_convert_block_skcipher(struct crypt_config *cc, cc 1076 drivers/md/dm-crypt.c if (unlikely(bv_in.bv_len & (cc->sector_size - 1))) cc 1079 drivers/md/dm-crypt.c dmreq = dmreq_of_req(cc, req); cc 1081 drivers/md/dm-crypt.c if (test_bit(CRYPT_IV_LARGE_SECTORS, &cc->cipher_flags)) cc 1082 drivers/md/dm-crypt.c dmreq->iv_sector >>= cc->sector_shift; cc 1085 drivers/md/dm-crypt.c *org_tag_of_dmreq(cc, dmreq) = tag_offset; cc 1087 drivers/md/dm-crypt.c iv = iv_of_dmreq(cc, dmreq); cc 1088 drivers/md/dm-crypt.c org_iv = org_iv_of_dmreq(cc, dmreq); cc 1089 drivers/md/dm-crypt.c tag_iv = iv_tag_from_dmreq(cc, dmreq); cc 1091 drivers/md/dm-crypt.c sector = org_sector_of_dmreq(cc, dmreq); cc 1092 drivers/md/dm-crypt.c *sector = cpu_to_le64(ctx->cc_sector - cc->iv_offset); cc 1099 drivers/md/dm-crypt.c sg_set_page(sg_in, bv_in.bv_page, cc->sector_size, bv_in.bv_offset); cc 1102 drivers/md/dm-crypt.c sg_set_page(sg_out, bv_out.bv_page, cc->sector_size, bv_out.bv_offset); cc 1104 drivers/md/dm-crypt.c if (cc->iv_gen_ops) { cc 1106 drivers/md/dm-crypt.c if (cc->integrity_iv_size && bio_data_dir(ctx->bio_in) != WRITE) { cc 1107 drivers/md/dm-crypt.c memcpy(org_iv, tag_iv, cc->integrity_iv_size); cc 1109 drivers/md/dm-crypt.c r = cc->iv_gen_ops->generator(cc, org_iv, dmreq); cc 1113 drivers/md/dm-crypt.c if (cc->integrity_iv_size) cc 1114 drivers/md/dm-crypt.c memcpy(tag_iv, org_iv, cc->integrity_iv_size); cc 1117 drivers/md/dm-crypt.c memcpy(iv, org_iv, cc->iv_size); cc 1120 drivers/md/dm-crypt.c skcipher_request_set_crypt(req, sg_in, sg_out, cc->sector_size, iv); cc 1127 drivers/md/dm-crypt.c if (!r && cc->iv_gen_ops && cc->iv_gen_ops->post) cc 1128 drivers/md/dm-crypt.c r = cc->iv_gen_ops->post(cc, org_iv, dmreq); cc 1130 drivers/md/dm-crypt.c bio_advance_iter(ctx->bio_in, &ctx->iter_in, cc->sector_size); cc 1131 drivers/md/dm-crypt.c bio_advance_iter(ctx->bio_out, &ctx->iter_out, cc->sector_size); cc 1139 drivers/md/dm-crypt.c static void crypt_alloc_req_skcipher(struct crypt_config *cc, cc 1142 drivers/md/dm-crypt.c unsigned key_index = ctx->cc_sector & (cc->tfms_count - 1); cc 1145 drivers/md/dm-crypt.c ctx->r.req = mempool_alloc(&cc->req_pool, GFP_NOIO); cc 1147 drivers/md/dm-crypt.c skcipher_request_set_tfm(ctx->r.req, cc->cipher_tfm.tfms[key_index]); cc 1155 drivers/md/dm-crypt.c kcryptd_async_done, dmreq_of_req(cc, ctx->r.req)); cc 1158 drivers/md/dm-crypt.c static void crypt_alloc_req_aead(struct crypt_config *cc, cc 1162 drivers/md/dm-crypt.c ctx->r.req_aead = mempool_alloc(&cc->req_pool, GFP_NOIO); cc 1164 drivers/md/dm-crypt.c aead_request_set_tfm(ctx->r.req_aead, cc->cipher_tfm.tfms_aead[0]); cc 1172 drivers/md/dm-crypt.c kcryptd_async_done, dmreq_of_req(cc, ctx->r.req_aead)); cc 1175 drivers/md/dm-crypt.c static void crypt_alloc_req(struct crypt_config *cc, cc 1178 drivers/md/dm-crypt.c if (crypt_integrity_aead(cc)) cc 1179 drivers/md/dm-crypt.c crypt_alloc_req_aead(cc, ctx); cc 1181 drivers/md/dm-crypt.c crypt_alloc_req_skcipher(cc, ctx); cc 1184 drivers/md/dm-crypt.c static void crypt_free_req_skcipher(struct crypt_config *cc, cc 1187 drivers/md/dm-crypt.c struct dm_crypt_io *io = dm_per_bio_data(base_bio, cc->per_bio_data_size); cc 1190 drivers/md/dm-crypt.c mempool_free(req, &cc->req_pool); cc 1193 drivers/md/dm-crypt.c static void crypt_free_req_aead(struct crypt_config *cc, cc 1196 drivers/md/dm-crypt.c struct dm_crypt_io *io = dm_per_bio_data(base_bio, cc->per_bio_data_size); cc 1199 drivers/md/dm-crypt.c mempool_free(req, &cc->req_pool); cc 1202 drivers/md/dm-crypt.c static void crypt_free_req(struct crypt_config *cc, void *req, struct bio *base_bio) cc 1204 drivers/md/dm-crypt.c if (crypt_integrity_aead(cc)) cc 1205 drivers/md/dm-crypt.c crypt_free_req_aead(cc, req, base_bio); cc 1207 drivers/md/dm-crypt.c crypt_free_req_skcipher(cc, req, base_bio); cc 1213 drivers/md/dm-crypt.c static blk_status_t crypt_convert(struct crypt_config *cc, cc 1217 drivers/md/dm-crypt.c unsigned int sector_step = cc->sector_size >> SECTOR_SHIFT; cc 1224 drivers/md/dm-crypt.c crypt_alloc_req(cc, ctx); cc 1227 drivers/md/dm-crypt.c if (crypt_integrity_aead(cc)) cc 1228 drivers/md/dm-crypt.c r = crypt_convert_block_aead(cc, ctx, ctx->r.req_aead, tag_offset); cc 1230 drivers/md/dm-crypt.c r = crypt_convert_block_skcipher(cc, ctx, ctx->r.req, tag_offset); cc 1277 drivers/md/dm-crypt.c static void crypt_free_buffer_pages(struct crypt_config *cc, struct bio *clone); cc 1298 drivers/md/dm-crypt.c struct crypt_config *cc = io->cc; cc 1307 drivers/md/dm-crypt.c mutex_lock(&cc->bio_alloc_lock); cc 1309 drivers/md/dm-crypt.c clone = bio_alloc_bioset(GFP_NOIO, nr_iovecs, &cc->bs); cc 1318 drivers/md/dm-crypt.c page = mempool_alloc(&cc->page_pool, gfp_mask); cc 1320 drivers/md/dm-crypt.c crypt_free_buffer_pages(cc, clone); cc 1335 drivers/md/dm-crypt.c crypt_free_buffer_pages(cc, clone); cc 1341 drivers/md/dm-crypt.c mutex_unlock(&cc->bio_alloc_lock); cc 1346 drivers/md/dm-crypt.c static void crypt_free_buffer_pages(struct crypt_config *cc, struct bio *clone) cc 1353 drivers/md/dm-crypt.c mempool_free(bv->bv_page, &cc->page_pool); cc 1357 drivers/md/dm-crypt.c static void crypt_io_init(struct dm_crypt_io *io, struct crypt_config *cc, cc 1360 drivers/md/dm-crypt.c io->cc = cc; cc 1381 drivers/md/dm-crypt.c struct crypt_config *cc = io->cc; cc 1389 drivers/md/dm-crypt.c crypt_free_req(cc, io->ctx.r.req, base_bio); cc 1392 drivers/md/dm-crypt.c mempool_free(io->integrity_metadata, &io->cc->tag_pool); cc 1420 drivers/md/dm-crypt.c struct crypt_config *cc = io->cc; cc 1428 drivers/md/dm-crypt.c crypt_free_buffer_pages(cc, clone); cc 1446 drivers/md/dm-crypt.c struct crypt_config *cc = io->cc; cc 1450 drivers/md/dm-crypt.c bio_set_dev(clone, cc->dev->bdev); cc 1456 drivers/md/dm-crypt.c struct crypt_config *cc = io->cc; cc 1465 drivers/md/dm-crypt.c clone = bio_clone_fast(io->base_bio, gfp, &cc->bs); cc 1472 drivers/md/dm-crypt.c clone->bi_iter.bi_sector = cc->start + io->sector; cc 1496 drivers/md/dm-crypt.c struct crypt_config *cc = io->cc; cc 1499 drivers/md/dm-crypt.c queue_work(cc->io_queue, &io->work); cc 1513 drivers/md/dm-crypt.c struct crypt_config *cc = data; cc 1520 drivers/md/dm-crypt.c spin_lock_irq(&cc->write_thread_lock); cc 1523 drivers/md/dm-crypt.c if (!RB_EMPTY_ROOT(&cc->write_tree)) cc 1528 drivers/md/dm-crypt.c spin_unlock_irq(&cc->write_thread_lock); cc 1538 drivers/md/dm-crypt.c spin_lock_irq(&cc->write_thread_lock); cc 1542 drivers/md/dm-crypt.c write_tree = cc->write_tree; cc 1543 drivers/md/dm-crypt.c cc->write_tree = RB_ROOT; cc 1544 drivers/md/dm-crypt.c spin_unlock_irq(&cc->write_thread_lock); cc 1566 drivers/md/dm-crypt.c struct crypt_config *cc = io->cc; cc 1572 drivers/md/dm-crypt.c crypt_free_buffer_pages(cc, clone); cc 1581 drivers/md/dm-crypt.c clone->bi_iter.bi_sector = cc->start + io->sector; cc 1583 drivers/md/dm-crypt.c if (likely(!async) && test_bit(DM_CRYPT_NO_OFFLOAD, &cc->flags)) { cc 1588 drivers/md/dm-crypt.c spin_lock_irqsave(&cc->write_thread_lock, flags); cc 1589 drivers/md/dm-crypt.c if (RB_EMPTY_ROOT(&cc->write_tree)) cc 1590 drivers/md/dm-crypt.c wake_up_process(cc->write_thread); cc 1591 drivers/md/dm-crypt.c rbp = &cc->write_tree.rb_node; cc 1602 drivers/md/dm-crypt.c rb_insert_color(&io->rb_node, &cc->write_tree); cc 1603 drivers/md/dm-crypt.c spin_unlock_irqrestore(&cc->write_thread_lock, flags); cc 1608 drivers/md/dm-crypt.c struct crypt_config *cc = io->cc; cc 1618 drivers/md/dm-crypt.c crypt_convert_init(cc, &io->ctx, NULL, io->base_bio, sector); cc 1632 drivers/md/dm-crypt.c r = crypt_convert(cc, &io->ctx); cc 1654 drivers/md/dm-crypt.c struct crypt_config *cc = io->cc; cc 1659 drivers/md/dm-crypt.c crypt_convert_init(cc, &io->ctx, io->base_bio, io->base_bio, cc 1662 drivers/md/dm-crypt.c r = crypt_convert(cc, &io->ctx); cc 1678 drivers/md/dm-crypt.c struct crypt_config *cc = io->cc; cc 1690 drivers/md/dm-crypt.c if (!error && cc->iv_gen_ops && cc->iv_gen_ops->post) cc 1691 drivers/md/dm-crypt.c error = cc->iv_gen_ops->post(cc, org_iv_of_dmreq(cc, dmreq), dmreq); cc 1696 drivers/md/dm-crypt.c (unsigned long long)le64_to_cpu(*org_sector_of_dmreq(cc, dmreq))); cc 1701 drivers/md/dm-crypt.c crypt_free_req(cc, req_of_dmreq(cc, dmreq), io->base_bio); cc 1724 drivers/md/dm-crypt.c struct crypt_config *cc = io->cc; cc 1727 drivers/md/dm-crypt.c queue_work(cc->crypt_queue, &io->work); cc 1730 drivers/md/dm-crypt.c static void crypt_free_tfms_aead(struct crypt_config *cc) cc 1732 drivers/md/dm-crypt.c if (!cc->cipher_tfm.tfms_aead) cc 1735 drivers/md/dm-crypt.c if (cc->cipher_tfm.tfms_aead[0] && !IS_ERR(cc->cipher_tfm.tfms_aead[0])) { cc 1736 drivers/md/dm-crypt.c crypto_free_aead(cc->cipher_tfm.tfms_aead[0]); cc 1737 drivers/md/dm-crypt.c cc->cipher_tfm.tfms_aead[0] = NULL; cc 1740 drivers/md/dm-crypt.c kfree(cc->cipher_tfm.tfms_aead); cc 1741 drivers/md/dm-crypt.c cc->cipher_tfm.tfms_aead = NULL; cc 1744 drivers/md/dm-crypt.c static void crypt_free_tfms_skcipher(struct crypt_config *cc) cc 1748 drivers/md/dm-crypt.c if (!cc->cipher_tfm.tfms) cc 1751 drivers/md/dm-crypt.c for (i = 0; i < cc->tfms_count; i++) cc 1752 drivers/md/dm-crypt.c if (cc->cipher_tfm.tfms[i] && !IS_ERR(cc->cipher_tfm.tfms[i])) { cc 1753 drivers/md/dm-crypt.c crypto_free_skcipher(cc->cipher_tfm.tfms[i]); cc 1754 drivers/md/dm-crypt.c cc->cipher_tfm.tfms[i] = NULL; cc 1757 drivers/md/dm-crypt.c kfree(cc->cipher_tfm.tfms); cc 1758 drivers/md/dm-crypt.c cc->cipher_tfm.tfms = NULL; cc 1761 drivers/md/dm-crypt.c static void crypt_free_tfms(struct crypt_config *cc) cc 1763 drivers/md/dm-crypt.c if (crypt_integrity_aead(cc)) cc 1764 drivers/md/dm-crypt.c crypt_free_tfms_aead(cc); cc 1766 drivers/md/dm-crypt.c crypt_free_tfms_skcipher(cc); cc 1769 drivers/md/dm-crypt.c static int crypt_alloc_tfms_skcipher(struct crypt_config *cc, char *ciphermode) cc 1774 drivers/md/dm-crypt.c cc->cipher_tfm.tfms = kcalloc(cc->tfms_count, cc 1777 drivers/md/dm-crypt.c if (!cc->cipher_tfm.tfms) cc 1780 drivers/md/dm-crypt.c for (i = 0; i < cc->tfms_count; i++) { cc 1781 drivers/md/dm-crypt.c cc->cipher_tfm.tfms[i] = crypto_alloc_skcipher(ciphermode, 0, 0); cc 1782 drivers/md/dm-crypt.c if (IS_ERR(cc->cipher_tfm.tfms[i])) { cc 1783 drivers/md/dm-crypt.c err = PTR_ERR(cc->cipher_tfm.tfms[i]); cc 1784 drivers/md/dm-crypt.c crypt_free_tfms(cc); cc 1795 drivers/md/dm-crypt.c crypto_skcipher_alg(any_tfm(cc))->base.cra_driver_name); cc 1799 drivers/md/dm-crypt.c static int crypt_alloc_tfms_aead(struct crypt_config *cc, char *ciphermode) cc 1803 drivers/md/dm-crypt.c cc->cipher_tfm.tfms = kmalloc(sizeof(struct crypto_aead *), GFP_KERNEL); cc 1804 drivers/md/dm-crypt.c if (!cc->cipher_tfm.tfms) cc 1807 drivers/md/dm-crypt.c cc->cipher_tfm.tfms_aead[0] = crypto_alloc_aead(ciphermode, 0, 0); cc 1808 drivers/md/dm-crypt.c if (IS_ERR(cc->cipher_tfm.tfms_aead[0])) { cc 1809 drivers/md/dm-crypt.c err = PTR_ERR(cc->cipher_tfm.tfms_aead[0]); cc 1810 drivers/md/dm-crypt.c crypt_free_tfms(cc); cc 1815 drivers/md/dm-crypt.c crypto_aead_alg(any_tfm_aead(cc))->base.cra_driver_name); cc 1819 drivers/md/dm-crypt.c static int crypt_alloc_tfms(struct crypt_config *cc, char *ciphermode) cc 1821 drivers/md/dm-crypt.c if (crypt_integrity_aead(cc)) cc 1822 drivers/md/dm-crypt.c return crypt_alloc_tfms_aead(cc, ciphermode); cc 1824 drivers/md/dm-crypt.c return crypt_alloc_tfms_skcipher(cc, ciphermode); cc 1827 drivers/md/dm-crypt.c static unsigned crypt_subkey_size(struct crypt_config *cc) cc 1829 drivers/md/dm-crypt.c return (cc->key_size - cc->key_extra_size) >> ilog2(cc->tfms_count); cc 1832 drivers/md/dm-crypt.c static unsigned crypt_authenckey_size(struct crypt_config *cc) cc 1834 drivers/md/dm-crypt.c return crypt_subkey_size(cc) + RTA_SPACE(sizeof(struct crypto_authenc_key_param)); cc 1859 drivers/md/dm-crypt.c static int crypt_setkey(struct crypt_config *cc) cc 1865 drivers/md/dm-crypt.c subkey_size = crypt_subkey_size(cc); cc 1867 drivers/md/dm-crypt.c if (crypt_integrity_hmac(cc)) { cc 1868 drivers/md/dm-crypt.c if (subkey_size < cc->key_mac_size) cc 1871 drivers/md/dm-crypt.c crypt_copy_authenckey(cc->authenc_key, cc->key, cc 1872 drivers/md/dm-crypt.c subkey_size - cc->key_mac_size, cc 1873 drivers/md/dm-crypt.c cc->key_mac_size); cc 1876 drivers/md/dm-crypt.c for (i = 0; i < cc->tfms_count; i++) { cc 1877 drivers/md/dm-crypt.c if (crypt_integrity_hmac(cc)) cc 1878 drivers/md/dm-crypt.c r = crypto_aead_setkey(cc->cipher_tfm.tfms_aead[i], cc 1879 drivers/md/dm-crypt.c cc->authenc_key, crypt_authenckey_size(cc)); cc 1880 drivers/md/dm-crypt.c else if (crypt_integrity_aead(cc)) cc 1881 drivers/md/dm-crypt.c r = crypto_aead_setkey(cc->cipher_tfm.tfms_aead[i], cc 1882 drivers/md/dm-crypt.c cc->key + (i * subkey_size), cc 1885 drivers/md/dm-crypt.c r = crypto_skcipher_setkey(cc->cipher_tfm.tfms[i], cc 1886 drivers/md/dm-crypt.c cc->key + (i * subkey_size), cc 1892 drivers/md/dm-crypt.c if (crypt_integrity_hmac(cc)) cc 1893 drivers/md/dm-crypt.c memzero_explicit(cc->authenc_key, crypt_authenckey_size(cc)); cc 1908 drivers/md/dm-crypt.c static int crypt_set_keyring_key(struct crypt_config *cc, const char *key_string) cc 1954 drivers/md/dm-crypt.c if (cc->key_size != ukp->datalen) { cc 1961 drivers/md/dm-crypt.c memcpy(cc->key, ukp->data, cc->key_size); cc 1967 drivers/md/dm-crypt.c clear_bit(DM_CRYPT_KEY_VALID, &cc->flags); cc 1969 drivers/md/dm-crypt.c ret = crypt_setkey(cc); cc 1972 drivers/md/dm-crypt.c set_bit(DM_CRYPT_KEY_VALID, &cc->flags); cc 1973 drivers/md/dm-crypt.c kzfree(cc->key_string); cc 1974 drivers/md/dm-crypt.c cc->key_string = new_key_string; cc 2006 drivers/md/dm-crypt.c static int crypt_set_keyring_key(struct crypt_config *cc, const char *key_string) cc 2018 drivers/md/dm-crypt.c static int crypt_set_key(struct crypt_config *cc, char *key) cc 2024 drivers/md/dm-crypt.c if (!cc->key_size && strcmp(key, "-")) cc 2029 drivers/md/dm-crypt.c r = crypt_set_keyring_key(cc, key + 1); cc 2034 drivers/md/dm-crypt.c clear_bit(DM_CRYPT_KEY_VALID, &cc->flags); cc 2037 drivers/md/dm-crypt.c kzfree(cc->key_string); cc 2038 drivers/md/dm-crypt.c cc->key_string = NULL; cc 2041 drivers/md/dm-crypt.c if (cc->key_size && hex2bin(cc->key, key, cc->key_size) < 0) cc 2044 drivers/md/dm-crypt.c r = crypt_setkey(cc); cc 2046 drivers/md/dm-crypt.c set_bit(DM_CRYPT_KEY_VALID, &cc->flags); cc 2055 drivers/md/dm-crypt.c static int crypt_wipe_key(struct crypt_config *cc) cc 2059 drivers/md/dm-crypt.c clear_bit(DM_CRYPT_KEY_VALID, &cc->flags); cc 2060 drivers/md/dm-crypt.c get_random_bytes(&cc->key, cc->key_size); cc 2063 drivers/md/dm-crypt.c if (cc->iv_gen_ops && cc->iv_gen_ops->wipe) { cc 2064 drivers/md/dm-crypt.c r = cc->iv_gen_ops->wipe(cc); cc 2069 drivers/md/dm-crypt.c kzfree(cc->key_string); cc 2070 drivers/md/dm-crypt.c cc->key_string = NULL; cc 2071 drivers/md/dm-crypt.c r = crypt_setkey(cc); cc 2072 drivers/md/dm-crypt.c memset(&cc->key, 0, cc->key_size * sizeof(u8)); cc 2092 drivers/md/dm-crypt.c struct crypt_config *cc = pool_data; cc 2095 drivers/md/dm-crypt.c if (unlikely(percpu_counter_compare(&cc->n_allocated_pages, dm_crypt_pages_per_client) >= 0) && cc 2101 drivers/md/dm-crypt.c percpu_counter_add(&cc->n_allocated_pages, 1); cc 2108 drivers/md/dm-crypt.c struct crypt_config *cc = pool_data; cc 2111 drivers/md/dm-crypt.c percpu_counter_sub(&cc->n_allocated_pages, 1); cc 2116 drivers/md/dm-crypt.c struct crypt_config *cc = ti->private; cc 2120 drivers/md/dm-crypt.c if (!cc) cc 2123 drivers/md/dm-crypt.c if (cc->write_thread) cc 2124 drivers/md/dm-crypt.c kthread_stop(cc->write_thread); cc 2126 drivers/md/dm-crypt.c if (cc->io_queue) cc 2127 drivers/md/dm-crypt.c destroy_workqueue(cc->io_queue); cc 2128 drivers/md/dm-crypt.c if (cc->crypt_queue) cc 2129 drivers/md/dm-crypt.c destroy_workqueue(cc->crypt_queue); cc 2131 drivers/md/dm-crypt.c crypt_free_tfms(cc); cc 2133 drivers/md/dm-crypt.c bioset_exit(&cc->bs); cc 2135 drivers/md/dm-crypt.c mempool_exit(&cc->page_pool); cc 2136 drivers/md/dm-crypt.c mempool_exit(&cc->req_pool); cc 2137 drivers/md/dm-crypt.c mempool_exit(&cc->tag_pool); cc 2139 drivers/md/dm-crypt.c WARN_ON(percpu_counter_sum(&cc->n_allocated_pages) != 0); cc 2140 drivers/md/dm-crypt.c percpu_counter_destroy(&cc->n_allocated_pages); cc 2142 drivers/md/dm-crypt.c if (cc->iv_gen_ops && cc->iv_gen_ops->dtr) cc 2143 drivers/md/dm-crypt.c cc->iv_gen_ops->dtr(cc); cc 2145 drivers/md/dm-crypt.c if (cc->dev) cc 2146 drivers/md/dm-crypt.c dm_put_device(ti, cc->dev); cc 2148 drivers/md/dm-crypt.c kzfree(cc->cipher_string); cc 2149 drivers/md/dm-crypt.c kzfree(cc->key_string); cc 2150 drivers/md/dm-crypt.c kzfree(cc->cipher_auth); cc 2151 drivers/md/dm-crypt.c kzfree(cc->authenc_key); cc 2153 drivers/md/dm-crypt.c mutex_destroy(&cc->bio_alloc_lock); cc 2156 drivers/md/dm-crypt.c kzfree(cc); cc 2167 drivers/md/dm-crypt.c struct crypt_config *cc = ti->private; cc 2169 drivers/md/dm-crypt.c if (crypt_integrity_aead(cc)) cc 2170 drivers/md/dm-crypt.c cc->iv_size = crypto_aead_ivsize(any_tfm_aead(cc)); cc 2172 drivers/md/dm-crypt.c cc->iv_size = crypto_skcipher_ivsize(any_tfm(cc)); cc 2174 drivers/md/dm-crypt.c if (cc->iv_size) cc 2176 drivers/md/dm-crypt.c cc->iv_size = max(cc->iv_size, cc 2185 drivers/md/dm-crypt.c cc->iv_gen_ops = NULL; cc 2187 drivers/md/dm-crypt.c cc->iv_gen_ops = &crypt_iv_plain_ops; cc 2189 drivers/md/dm-crypt.c cc->iv_gen_ops = &crypt_iv_plain64_ops; cc 2191 drivers/md/dm-crypt.c cc->iv_gen_ops = &crypt_iv_plain64be_ops; cc 2193 drivers/md/dm-crypt.c cc->iv_gen_ops = &crypt_iv_essiv_ops; cc 2195 drivers/md/dm-crypt.c cc->iv_gen_ops = &crypt_iv_benbi_ops; cc 2197 drivers/md/dm-crypt.c cc->iv_gen_ops = &crypt_iv_null_ops; cc 2199 drivers/md/dm-crypt.c cc->iv_gen_ops = &crypt_iv_eboiv_ops; cc 2201 drivers/md/dm-crypt.c cc->iv_gen_ops = &crypt_iv_lmk_ops; cc 2208 drivers/md/dm-crypt.c if (cc->key_size % cc->key_parts) { cc 2209 drivers/md/dm-crypt.c cc->key_parts++; cc 2210 drivers/md/dm-crypt.c cc->key_extra_size = cc->key_size / cc->key_parts; cc 2213 drivers/md/dm-crypt.c cc->iv_gen_ops = &crypt_iv_tcw_ops; cc 2214 drivers/md/dm-crypt.c cc->key_parts += 2; /* IV + whitening */ cc 2215 drivers/md/dm-crypt.c cc->key_extra_size = cc->iv_size + TCW_WHITENING_SIZE; cc 2217 drivers/md/dm-crypt.c cc->iv_gen_ops = &crypt_iv_random_ops; cc 2219 drivers/md/dm-crypt.c cc->integrity_iv_size = cc->iv_size; cc 2233 drivers/md/dm-crypt.c static int crypt_ctr_auth_cipher(struct crypt_config *cc, char *cipher_api) cc 2257 drivers/md/dm-crypt.c cc->key_mac_size = crypto_ahash_digestsize(mac); cc 2260 drivers/md/dm-crypt.c cc->authenc_key = kmalloc(crypt_authenckey_size(cc), GFP_KERNEL); cc 2261 drivers/md/dm-crypt.c if (!cc->authenc_key) cc 2270 drivers/md/dm-crypt.c struct crypt_config *cc = ti->private; cc 2274 drivers/md/dm-crypt.c cc->tfms_count = 1; cc 2298 drivers/md/dm-crypt.c if (crypt_integrity_aead(cc)) { cc 2299 drivers/md/dm-crypt.c ret = crypt_ctr_auth_cipher(cc, cipher_api); cc 2307 drivers/md/dm-crypt.c cc->tfms_count = 64; cc 2323 drivers/md/dm-crypt.c cc->key_parts = cc->tfms_count; cc 2326 drivers/md/dm-crypt.c ret = crypt_alloc_tfms(cc, cipher_api); cc 2332 drivers/md/dm-crypt.c if (crypt_integrity_aead(cc)) cc 2333 drivers/md/dm-crypt.c cc->iv_size = crypto_aead_ivsize(any_tfm_aead(cc)); cc 2335 drivers/md/dm-crypt.c cc->iv_size = crypto_skcipher_ivsize(any_tfm(cc)); cc 2343 drivers/md/dm-crypt.c struct crypt_config *cc = ti->private; cc 2349 drivers/md/dm-crypt.c if (strchr(cipher_in, '(') || crypt_integrity_aead(cc)) { cc 2363 drivers/md/dm-crypt.c cc->tfms_count = 1; cc 2364 drivers/md/dm-crypt.c else if (sscanf(keycount, "%u%c", &cc->tfms_count, &dummy) != 1 || cc 2365 drivers/md/dm-crypt.c !is_power_of_2(cc->tfms_count)) { cc 2369 drivers/md/dm-crypt.c cc->key_parts = cc->tfms_count; cc 2411 drivers/md/dm-crypt.c ret = crypt_alloc_tfms(cc, cipher_api); cc 2427 drivers/md/dm-crypt.c struct crypt_config *cc = ti->private; cc 2431 drivers/md/dm-crypt.c cc->cipher_string = kstrdup(cipher_in, GFP_KERNEL); cc 2432 drivers/md/dm-crypt.c if (!cc->cipher_string) { cc 2450 drivers/md/dm-crypt.c ret = crypt_set_key(cc, key); cc 2457 drivers/md/dm-crypt.c if (cc->iv_gen_ops && cc->iv_gen_ops->ctr) { cc 2458 drivers/md/dm-crypt.c ret = cc->iv_gen_ops->ctr(cc, ti, ivopts); cc 2466 drivers/md/dm-crypt.c if (cc->iv_gen_ops && cc->iv_gen_ops->init) { cc 2467 drivers/md/dm-crypt.c ret = cc->iv_gen_ops->init(cc); cc 2475 drivers/md/dm-crypt.c if (cc->key_string) cc 2476 drivers/md/dm-crypt.c memset(cc->key, 0, cc->key_size * sizeof(u8)); cc 2483 drivers/md/dm-crypt.c struct crypt_config *cc = ti->private; cc 2512 drivers/md/dm-crypt.c set_bit(DM_CRYPT_SAME_CPU, &cc->flags); cc 2515 drivers/md/dm-crypt.c set_bit(DM_CRYPT_NO_OFFLOAD, &cc->flags); cc 2521 drivers/md/dm-crypt.c cc->on_disk_tag_size = val; cc 2524 drivers/md/dm-crypt.c set_bit(CRYPT_MODE_INTEGRITY_AEAD, &cc->cipher_flags); cc 2530 drivers/md/dm-crypt.c cc->cipher_auth = kstrdup(sval, GFP_KERNEL); cc 2531 drivers/md/dm-crypt.c if (!cc->cipher_auth) cc 2533 drivers/md/dm-crypt.c } else if (sscanf(opt_string, "sector_size:%hu%c", &cc->sector_size, &dummy) == 1) { cc 2534 drivers/md/dm-crypt.c if (cc->sector_size < (1 << SECTOR_SHIFT) || cc 2535 drivers/md/dm-crypt.c cc->sector_size > 4096 || cc 2536 drivers/md/dm-crypt.c (cc->sector_size & (cc->sector_size - 1))) { cc 2540 drivers/md/dm-crypt.c if (ti->len & ((cc->sector_size >> SECTOR_SHIFT) - 1)) { cc 2544 drivers/md/dm-crypt.c cc->sector_shift = __ffs(cc->sector_size) - SECTOR_SHIFT; cc 2546 drivers/md/dm-crypt.c set_bit(CRYPT_IV_LARGE_SECTORS, &cc->cipher_flags); cc 2562 drivers/md/dm-crypt.c struct crypt_config *cc; cc 2582 drivers/md/dm-crypt.c cc = kzalloc(struct_size(cc, key, key_size), GFP_KERNEL); cc 2583 drivers/md/dm-crypt.c if (!cc) { cc 2587 drivers/md/dm-crypt.c cc->key_size = key_size; cc 2588 drivers/md/dm-crypt.c cc->sector_size = (1 << SECTOR_SHIFT); cc 2589 drivers/md/dm-crypt.c cc->sector_shift = 0; cc 2591 drivers/md/dm-crypt.c ti->private = cc; cc 2598 drivers/md/dm-crypt.c ret = percpu_counter_init(&cc->n_allocated_pages, 0, GFP_KERNEL); cc 2613 drivers/md/dm-crypt.c if (crypt_integrity_aead(cc)) { cc 2614 drivers/md/dm-crypt.c cc->dmreq_start = sizeof(struct aead_request); cc 2615 drivers/md/dm-crypt.c cc->dmreq_start += crypto_aead_reqsize(any_tfm_aead(cc)); cc 2616 drivers/md/dm-crypt.c align_mask = crypto_aead_alignmask(any_tfm_aead(cc)); cc 2618 drivers/md/dm-crypt.c cc->dmreq_start = sizeof(struct skcipher_request); cc 2619 drivers/md/dm-crypt.c cc->dmreq_start += crypto_skcipher_reqsize(any_tfm(cc)); cc 2620 drivers/md/dm-crypt.c align_mask = crypto_skcipher_alignmask(any_tfm(cc)); cc 2622 drivers/md/dm-crypt.c cc->dmreq_start = ALIGN(cc->dmreq_start, __alignof__(struct dm_crypt_request)); cc 2626 drivers/md/dm-crypt.c iv_size_padding = -(cc->dmreq_start + sizeof(struct dm_crypt_request)) cc 2639 drivers/md/dm-crypt.c iv_size_padding + cc->iv_size + cc 2640 drivers/md/dm-crypt.c cc->iv_size + cc 2644 drivers/md/dm-crypt.c ret = mempool_init_kmalloc_pool(&cc->req_pool, MIN_IOS, cc->dmreq_start + additional_req_size); cc 2650 drivers/md/dm-crypt.c cc->per_bio_data_size = ti->per_io_data_size = cc 2651 drivers/md/dm-crypt.c ALIGN(sizeof(struct dm_crypt_io) + cc->dmreq_start + additional_req_size, cc 2654 drivers/md/dm-crypt.c ret = mempool_init(&cc->page_pool, BIO_MAX_PAGES, crypt_page_alloc, crypt_page_free, cc); cc 2660 drivers/md/dm-crypt.c ret = bioset_init(&cc->bs, MIN_IOS, 0, BIOSET_NEED_BVECS); cc 2666 drivers/md/dm-crypt.c mutex_init(&cc->bio_alloc_lock); cc 2670 drivers/md/dm-crypt.c (tmpll & ((cc->sector_size >> SECTOR_SHIFT) - 1))) { cc 2674 drivers/md/dm-crypt.c cc->iv_offset = tmpll; cc 2676 drivers/md/dm-crypt.c ret = dm_get_device(ti, argv[3], dm_table_get_mode(ti->table), &cc->dev); cc 2687 drivers/md/dm-crypt.c cc->start = tmpll; cc 2689 drivers/md/dm-crypt.c if (crypt_integrity_aead(cc) || cc->integrity_iv_size) { cc 2690 drivers/md/dm-crypt.c ret = crypt_integrity_ctr(cc, ti); cc 2694 drivers/md/dm-crypt.c cc->tag_pool_max_sectors = POOL_ENTRY_SIZE / cc->on_disk_tag_size; cc 2695 drivers/md/dm-crypt.c if (!cc->tag_pool_max_sectors) cc 2696 drivers/md/dm-crypt.c cc->tag_pool_max_sectors = 1; cc 2698 drivers/md/dm-crypt.c ret = mempool_init_kmalloc_pool(&cc->tag_pool, MIN_IOS, cc 2699 drivers/md/dm-crypt.c cc->tag_pool_max_sectors * cc->on_disk_tag_size); cc 2705 drivers/md/dm-crypt.c cc->tag_pool_max_sectors <<= cc->sector_shift; cc 2709 drivers/md/dm-crypt.c cc->io_queue = alloc_workqueue("kcryptd_io/%s", WQ_MEM_RECLAIM, 1, devname); cc 2710 drivers/md/dm-crypt.c if (!cc->io_queue) { cc 2715 drivers/md/dm-crypt.c if (test_bit(DM_CRYPT_SAME_CPU, &cc->flags)) cc 2716 drivers/md/dm-crypt.c cc->crypt_queue = alloc_workqueue("kcryptd/%s", WQ_CPU_INTENSIVE | WQ_MEM_RECLAIM, cc 2719 drivers/md/dm-crypt.c cc->crypt_queue = alloc_workqueue("kcryptd/%s", cc 2722 drivers/md/dm-crypt.c if (!cc->crypt_queue) { cc 2727 drivers/md/dm-crypt.c spin_lock_init(&cc->write_thread_lock); cc 2728 drivers/md/dm-crypt.c cc->write_tree = RB_ROOT; cc 2730 drivers/md/dm-crypt.c cc->write_thread = kthread_create(dmcrypt_write, cc, "dmcrypt_write/%s", devname); cc 2731 drivers/md/dm-crypt.c if (IS_ERR(cc->write_thread)) { cc 2732 drivers/md/dm-crypt.c ret = PTR_ERR(cc->write_thread); cc 2733 drivers/md/dm-crypt.c cc->write_thread = NULL; cc 2737 drivers/md/dm-crypt.c wake_up_process(cc->write_thread); cc 2751 drivers/md/dm-crypt.c struct crypt_config *cc = ti->private; cc 2760 drivers/md/dm-crypt.c bio_set_dev(bio, cc->dev->bdev); cc 2762 drivers/md/dm-crypt.c bio->bi_iter.bi_sector = cc->start + cc 2771 drivers/md/dm-crypt.c (bio_data_dir(bio) == WRITE || cc->on_disk_tag_size)) cc 2778 drivers/md/dm-crypt.c if (unlikely((bio->bi_iter.bi_sector & ((cc->sector_size >> SECTOR_SHIFT) - 1)) != 0)) cc 2781 drivers/md/dm-crypt.c if (unlikely(bio->bi_iter.bi_size & (cc->sector_size - 1))) cc 2784 drivers/md/dm-crypt.c io = dm_per_bio_data(bio, cc->per_bio_data_size); cc 2785 drivers/md/dm-crypt.c crypt_io_init(io, cc, bio, dm_target_offset(ti, bio->bi_iter.bi_sector)); cc 2787 drivers/md/dm-crypt.c if (cc->on_disk_tag_size) { cc 2788 drivers/md/dm-crypt.c unsigned tag_len = cc->on_disk_tag_size * (bio_sectors(bio) >> cc->sector_shift); cc 2793 drivers/md/dm-crypt.c if (bio_sectors(bio) > cc->tag_pool_max_sectors) cc 2794 drivers/md/dm-crypt.c dm_accept_partial_bio(bio, cc->tag_pool_max_sectors); cc 2795 drivers/md/dm-crypt.c io->integrity_metadata = mempool_alloc(&cc->tag_pool, GFP_NOIO); cc 2800 drivers/md/dm-crypt.c if (crypt_integrity_aead(cc)) cc 2817 drivers/md/dm-crypt.c struct crypt_config *cc = ti->private; cc 2827 drivers/md/dm-crypt.c DMEMIT("%s ", cc->cipher_string); cc 2829 drivers/md/dm-crypt.c if (cc->key_size > 0) { cc 2830 drivers/md/dm-crypt.c if (cc->key_string) cc 2831 drivers/md/dm-crypt.c DMEMIT(":%u:%s", cc->key_size, cc->key_string); cc 2833 drivers/md/dm-crypt.c for (i = 0; i < cc->key_size; i++) cc 2834 drivers/md/dm-crypt.c DMEMIT("%02x", cc->key[i]); cc 2838 drivers/md/dm-crypt.c DMEMIT(" %llu %s %llu", (unsigned long long)cc->iv_offset, cc 2839 drivers/md/dm-crypt.c cc->dev->name, (unsigned long long)cc->start); cc 2842 drivers/md/dm-crypt.c num_feature_args += test_bit(DM_CRYPT_SAME_CPU, &cc->flags); cc 2843 drivers/md/dm-crypt.c num_feature_args += test_bit(DM_CRYPT_NO_OFFLOAD, &cc->flags); cc 2844 drivers/md/dm-crypt.c num_feature_args += cc->sector_size != (1 << SECTOR_SHIFT); cc 2845 drivers/md/dm-crypt.c num_feature_args += test_bit(CRYPT_IV_LARGE_SECTORS, &cc->cipher_flags); cc 2846 drivers/md/dm-crypt.c if (cc->on_disk_tag_size) cc 2852 drivers/md/dm-crypt.c if (test_bit(DM_CRYPT_SAME_CPU, &cc->flags)) cc 2854 drivers/md/dm-crypt.c if (test_bit(DM_CRYPT_NO_OFFLOAD, &cc->flags)) cc 2856 drivers/md/dm-crypt.c if (cc->on_disk_tag_size) cc 2857 drivers/md/dm-crypt.c DMEMIT(" integrity:%u:%s", cc->on_disk_tag_size, cc->cipher_auth); cc 2858 drivers/md/dm-crypt.c if (cc->sector_size != (1 << SECTOR_SHIFT)) cc 2859 drivers/md/dm-crypt.c DMEMIT(" sector_size:%d", cc->sector_size); cc 2860 drivers/md/dm-crypt.c if (test_bit(CRYPT_IV_LARGE_SECTORS, &cc->cipher_flags)) cc 2870 drivers/md/dm-crypt.c struct crypt_config *cc = ti->private; cc 2872 drivers/md/dm-crypt.c set_bit(DM_CRYPT_SUSPENDED, &cc->flags); cc 2877 drivers/md/dm-crypt.c struct crypt_config *cc = ti->private; cc 2879 drivers/md/dm-crypt.c if (!test_bit(DM_CRYPT_KEY_VALID, &cc->flags)) { cc 2889 drivers/md/dm-crypt.c struct crypt_config *cc = ti->private; cc 2891 drivers/md/dm-crypt.c clear_bit(DM_CRYPT_SUSPENDED, &cc->flags); cc 2901 drivers/md/dm-crypt.c struct crypt_config *cc = ti->private; cc 2908 drivers/md/dm-crypt.c if (!test_bit(DM_CRYPT_SUSPENDED, &cc->flags)) { cc 2915 drivers/md/dm-crypt.c if (key_size < 0 || cc->key_size != key_size) { cc 2920 drivers/md/dm-crypt.c ret = crypt_set_key(cc, argv[2]); cc 2923 drivers/md/dm-crypt.c if (cc->iv_gen_ops && cc->iv_gen_ops->init) cc 2924 drivers/md/dm-crypt.c ret = cc->iv_gen_ops->init(cc); cc 2926 drivers/md/dm-crypt.c if (cc->key_string) cc 2927 drivers/md/dm-crypt.c memset(cc->key, 0, cc->key_size * sizeof(u8)); cc 2931 drivers/md/dm-crypt.c return crypt_wipe_key(cc); cc 2942 drivers/md/dm-crypt.c struct crypt_config *cc = ti->private; cc 2944 drivers/md/dm-crypt.c return fn(ti, cc->dev, cc->start, ti->len, data); cc 2949 drivers/md/dm-crypt.c struct crypt_config *cc = ti->private; cc 2960 drivers/md/dm-crypt.c max_t(unsigned short, limits->logical_block_size, cc->sector_size); cc 2962 drivers/md/dm-crypt.c max_t(unsigned, limits->physical_block_size, cc->sector_size); cc 2963 drivers/md/dm-crypt.c limits->io_min = max_t(unsigned, limits->io_min, cc->sector_size); cc 119 drivers/media/dvb-core/dvb_demux.c u8 cc; cc 126 drivers/media/dvb-core/dvb_demux.c cc = buf[3] & 0x0f; cc 127 drivers/media/dvb-core/dvb_demux.c ccok = ((feed->cc + 1) & 0x0f) == cc; cc 128 drivers/media/dvb-core/dvb_demux.c feed->cc = cc; cc 132 drivers/media/dvb-core/dvb_demux.c cc, (feed->cc + 1) & 0x0f); cc 302 drivers/media/dvb-core/dvb_demux.c u8 cc; cc 311 drivers/media/dvb-core/dvb_demux.c cc = buf[3] & 0x0f; cc 312 drivers/media/dvb-core/dvb_demux.c ccok = ((feed->cc + 1) & 0x0f) == cc; cc 313 drivers/media/dvb-core/dvb_demux.c feed->cc = cc; cc 326 drivers/media/dvb-core/dvb_demux.c cc); cc 331 drivers/media/dvb-core/dvb_demux.c cc, (feed->cc + 1) & 0x0f, count + 4); cc 126 drivers/media/dvb-frontends/ix2505v.c u8 gain, cc, ref, psc, local_osc, lpf; cc 140 drivers/media/dvb-frontends/ix2505v.c cc = state->config->tuner_chargepump; cc 142 drivers/media/dvb-frontends/ix2505v.c cc = 0x3; cc 154 drivers/media/dvb-frontends/ix2505v.c data[2] = 0x81 | ((cc & 0x3) << 5) ; /*PD5,PD4 & TM = 0|C1,C0|REF=1*/ cc 372 drivers/media/i2c/saa7127.c u16 cc = data->data[1] << 8 | data->data[0]; cc 387 drivers/media/i2c/saa7127.c v4l2_dbg(2, debug, sd, "CC data: %04x\n", cc); cc 388 drivers/media/i2c/saa7127.c saa7127_write(sd, SAA7127_REG_LINE_21_ODD_0, cc & 0xff); cc 389 drivers/media/i2c/saa7127.c saa7127_write(sd, SAA7127_REG_LINE_21_ODD_1, cc >> 8); cc 390 drivers/media/i2c/saa7127.c state->cc_data = cc; cc 611 drivers/media/pci/cx88/cx88-video.c const struct cx88_ctrl *cc = ctrl->priv; cc 614 drivers/media/pci/cx88/cx88-video.c mask = cc->mask; cc 619 drivers/media/pci/cx88/cx88-video.c value = ((ctrl->val - cc->off) << cc->shift) & cc->mask; cc 637 drivers/media/pci/cx88/cx88-video.c value = ((ctrl->val - cc->off) << cc->shift) & cc->mask; cc 640 drivers/media/pci/cx88/cx88-video.c value = ((ctrl->val - cc->off) << cc->shift) & cc->mask; cc 645 drivers/media/pci/cx88/cx88-video.c ctrl->id, ctrl->name, ctrl->val, cc->reg, value, cc 646 drivers/media/pci/cx88/cx88-video.c mask, cc->sreg ? " [shadowed]" : ""); cc 647 drivers/media/pci/cx88/cx88-video.c if (cc->sreg) cc 648 drivers/media/pci/cx88/cx88-video.c cx_sandor(cc->sreg, cc->reg, mask, value); cc 650 drivers/media/pci/cx88/cx88-video.c cx_andor(cc->reg, mask, value); cc 658 drivers/media/pci/cx88/cx88-video.c const struct cx88_ctrl *cc = ctrl->priv; cc 679 drivers/media/pci/cx88/cx88-video.c mask = cc->mask; cc 689 drivers/media/pci/cx88/cx88-video.c value = ((ctrl->val - cc->off) << cc->shift) & cc->mask; cc 694 drivers/media/pci/cx88/cx88-video.c ctrl->id, ctrl->name, ctrl->val, cc->reg, value, cc 695 drivers/media/pci/cx88/cx88-video.c mask, cc->sreg ? " [shadowed]" : ""); cc 696 drivers/media/pci/cx88/cx88-video.c if (cc->sreg) cc 697 drivers/media/pci/cx88/cx88-video.c cx_sandor(cc->sreg, cc->reg, mask, value); cc 699 drivers/media/pci/cx88/cx88-video.c cx_andor(cc->reg, mask, value); cc 1318 drivers/media/pci/cx88/cx88-video.c const struct cx88_ctrl *cc = &cx8800_aud_ctls[i]; cc 1322 drivers/media/pci/cx88/cx88-video.c cc->id, cc->minimum, cc->maximum, cc 1323 drivers/media/pci/cx88/cx88-video.c cc->step, cc->default_value); cc 1328 drivers/media/pci/cx88/cx88-video.c vc->priv = (void *)cc; cc 1332 drivers/media/pci/cx88/cx88-video.c const struct cx88_ctrl *cc = &cx8800_vid_ctls[i]; cc 1336 drivers/media/pci/cx88/cx88-video.c cc->id, cc->minimum, cc->maximum, cc 1337 drivers/media/pci/cx88/cx88-video.c cc->step, cc->default_value); cc 1342 drivers/media/pci/cx88/cx88-video.c vc->priv = (void *)cc; cc 32 drivers/media/pci/ivtv/ivtv-vbi.c static void ivtv_set_cc(struct ivtv *itv, int mode, const struct vbi_cc *cc) cc 41 drivers/media/pci/ivtv/ivtv-vbi.c data.data[0] = cc->odd[0]; cc 42 drivers/media/pci/ivtv/ivtv-vbi.c data.data[1] = cc->odd[1]; cc 46 drivers/media/pci/ivtv/ivtv-vbi.c data.data[0] = cc->even[0]; cc 47 drivers/media/pci/ivtv/ivtv-vbi.c data.data[1] = cc->even[1]; cc 85 drivers/media/pci/ivtv/ivtv-vbi.c struct vbi_cc *cc, int *found_cc) cc 91 drivers/media/pci/ivtv/ivtv-vbi.c cc->even[0] = d->data[0]; cc 92 drivers/media/pci/ivtv/ivtv-vbi.c cc->even[1] = d->data[1]; cc 94 drivers/media/pci/ivtv/ivtv-vbi.c cc->odd[0] = d->data[0]; cc 95 drivers/media/pci/ivtv/ivtv-vbi.c cc->odd[1] = d->data[1]; cc 121 drivers/media/pci/ivtv/ivtv-vbi.c static void ivtv_write_vbi_cc_lines(struct ivtv *itv, const struct vbi_cc *cc) cc 126 drivers/media/pci/ivtv/ivtv-vbi.c memcpy(&vi->cc_payload[vi->cc_payload_idx], cc, cc 137 drivers/media/pci/ivtv/ivtv-vbi.c struct vbi_cc cc = { .odd = { 0x80, 0x80 }, .even = { 0x80, 0x80 } }; cc 142 drivers/media/pci/ivtv/ivtv-vbi.c ivtv_write_vbi_line(itv, sliced + i, &cc, &found_cc); cc 145 drivers/media/pci/ivtv/ivtv-vbi.c ivtv_write_vbi_cc_lines(itv, &cc); cc 153 drivers/media/pci/ivtv/ivtv-vbi.c struct vbi_cc cc = { .odd = { 0x80, 0x80 }, .even = { 0x80, 0x80 } }; cc 165 drivers/media/pci/ivtv/ivtv-vbi.c ivtv_write_vbi_line(itv, &d, &cc, &found_cc); cc 169 drivers/media/pci/ivtv/ivtv-vbi.c ivtv_write_vbi_cc_lines(itv, &cc); cc 454 drivers/media/pci/ivtv/ivtv-vbi.c struct vbi_cc cc = { .odd = { 0x80, 0x80 }, .even = { 0x80, 0x80 } }; cc 457 drivers/media/pci/ivtv/ivtv-vbi.c ivtv_set_cc(itv, 0, &cc); cc 466 drivers/media/pci/ivtv/ivtv-vbi.c struct vbi_cc cc = { .odd = { 0x80, 0x80 }, .even = { 0x80, 0x80 } }; cc 490 drivers/media/pci/ivtv/ivtv-vbi.c cc.odd[0] = data.data[0]; cc 491 drivers/media/pci/ivtv/ivtv-vbi.c cc.odd[1] = data.data[1]; cc 496 drivers/media/pci/ivtv/ivtv-vbi.c cc.even[0] = data.data[0]; cc 497 drivers/media/pci/ivtv/ivtv-vbi.c cc.even[1] = data.data[1]; cc 501 drivers/media/pci/ivtv/ivtv-vbi.c ivtv_set_cc(itv, mode, &cc); cc 503 drivers/media/pci/ivtv/ivtv-vbi.c ivtv_set_cc(itv, 0, &cc); cc 518 drivers/media/pci/ivtv/ivtv-vbi.c ivtv_set_cc(itv, 3, &cc); cc 521 drivers/media/pci/ivtv/ivtv-vbi.c cc = vi->cc_payload[0]; cc 526 drivers/media/pci/ivtv/ivtv-vbi.c if (vi->cc_payload_idx && cc.odd[0] == 0x80 && cc.odd[1] == 0x80) cc 529 drivers/media/pci/ivtv/ivtv-vbi.c ivtv_set_cc(itv, 3, &cc); cc 111 drivers/media/pci/saa7164/saa7164-core.c u8 cc, a; cc 125 drivers/media/pci/saa7164/saa7164-core.c cc = *(bufcpu + i + 3) & 0x0f; cc 129 drivers/media/pci/saa7164/saa7164-core.c if (a != cc) { cc 131 drivers/media/pci/saa7164/saa7164-core.c port->last_v_cc, cc, i); cc 135 drivers/media/pci/saa7164/saa7164-core.c port->last_v_cc = cc; cc 139 drivers/media/pci/saa7164/saa7164-core.c if (a != cc) { cc 141 drivers/media/pci/saa7164/saa7164-core.c port->last_a_cc, cc, i); cc 145 drivers/media/pci/saa7164/saa7164-core.c port->last_a_cc = cc; cc 78 drivers/media/pci/ttpci/dvb_filter.c p2ts->cc=0; cc 96 drivers/media/pci/ttpci/dvb_filter.c buf[3]=0x10|((p2ts->cc++)&0x0f); cc 105 drivers/media/pci/ttpci/dvb_filter.c buf[3]=0x30|((p2ts->cc++)&0x0f); cc 28 drivers/media/pci/ttpci/dvb_filter.h unsigned char cc; cc 253 drivers/media/platform/ti-vpe/cal.c struct cc_data *cc[CAL_NUM_CSI2_PORTS]; cc 271 drivers/media/platform/ti-vpe/cal.c struct cc_data *cc; cc 435 drivers/media/platform/ti-vpe/cal.c struct cc_data *cc; cc 437 drivers/media/platform/ti-vpe/cal.c cc = devm_kzalloc(&pdev->dev, sizeof(*cc), GFP_KERNEL); cc 438 drivers/media/platform/ti-vpe/cal.c if (!cc) cc 441 drivers/media/platform/ti-vpe/cal.c cc->res = platform_get_resource_byname(pdev, cc 446 drivers/media/platform/ti-vpe/cal.c cc->base = devm_ioremap_resource(&pdev->dev, cc->res); cc 447 drivers/media/platform/ti-vpe/cal.c if (IS_ERR(cc->base)) { cc 449 drivers/media/platform/ti-vpe/cal.c return ERR_CAST(cc->base); cc 453 drivers/media/platform/ti-vpe/cal.c cc->res->name, &cc->res->start, &cc->res->end); cc 455 drivers/media/platform/ti-vpe/cal.c return cc; cc 494 drivers/media/platform/ti-vpe/cal.c &dev->ctx[0]->cc->res->start); cc 496 drivers/media/platform/ti-vpe/cal.c (__force const void *)dev->ctx[0]->cc->base, cc 497 drivers/media/platform/ti-vpe/cal.c resource_size(dev->ctx[0]->cc->res), cc 503 drivers/media/platform/ti-vpe/cal.c &dev->ctx[1]->cc->res->start); cc 505 drivers/media/platform/ti-vpe/cal.c (__force const void *)dev->ctx[1]->cc->base, cc 506 drivers/media/platform/ti-vpe/cal.c resource_size(dev->ctx[1]->cc->res), cc 771 drivers/media/platform/ti-vpe/cal.c reg0 = reg_read(ctx->cc, CAL_CSI2_PHY_REG0); cc 778 drivers/media/platform/ti-vpe/cal.c reg_write(ctx->cc, CAL_CSI2_PHY_REG0, reg0); cc 780 drivers/media/platform/ti-vpe/cal.c reg1 = reg_read(ctx->cc, CAL_CSI2_PHY_REG1); cc 787 drivers/media/platform/ti-vpe/cal.c reg_write(ctx->cc, CAL_CSI2_PHY_REG1, reg1); cc 1790 drivers/media/platform/ti-vpe/cal.c ctx->cc = dev->cc[inst]; cc 1851 drivers/media/platform/ti-vpe/cal.c dev->cc[0] = cc_create(dev, 0); cc 1852 drivers/media/platform/ti-vpe/cal.c if (IS_ERR(dev->cc[0])) cc 1853 drivers/media/platform/ti-vpe/cal.c return PTR_ERR(dev->cc[0]); cc 1855 drivers/media/platform/ti-vpe/cal.c dev->cc[1] = cc_create(dev, 1); cc 1856 drivers/media/platform/ti-vpe/cal.c if (IS_ERR(dev->cc[1])) cc 1857 drivers/media/platform/ti-vpe/cal.c return PTR_ERR(dev->cc[1]); cc 69 drivers/media/platform/vivid/vivid-vbi-gen.c static void cc_insert(u8 *cc, u8 ch) cc 75 drivers/media/platform/vivid/vivid-vbi-gen.c cc[2 * i] = cc[2 * i + 1] = (ch & (1 << i)) ? 1 : 0; cc 76 drivers/media/platform/vivid/vivid-vbi-gen.c tot += cc[2 * i]; cc 78 drivers/media/platform/vivid/vivid-vbi-gen.c cc[14] = cc[15] = !(tot & 1); cc 88 drivers/media/platform/vivid/vivid-vbi-gen.c u8 cc[CC_PREAMBLE_BITS + 2 * 16] = { cc 98 drivers/media/platform/vivid/vivid-vbi-gen.c cc_insert(cc + CC_PREAMBLE_BITS, data->data[0]); cc 99 drivers/media/platform/vivid/vivid-vbi-gen.c cc_insert(cc + CC_PREAMBLE_BITS + 16, data->data[1]); cc 101 drivers/media/platform/vivid/vivid-vbi-gen.c for (i = 0, bit = 0; bit < sizeof(cc); bit++) { cc 105 drivers/media/platform/vivid/vivid-vbi-gen.c buf[i++] = cc[bit] ? 0xc0 : 0x10; cc 260 drivers/media/usb/s2255/s2255drv.c u32 cc; /* current channel */ cc 1705 drivers/media/usb/s2255/s2255drv.c vc = &dev->vc[dev->cc]; cc 1710 drivers/media/usb/s2255/s2255drv.c unsigned int cc; cc 1723 drivers/media/usb/s2255/s2255drv.c cc = le32_to_cpu(pdword[1]); cc 1724 drivers/media/usb/s2255/s2255drv.c if (cc >= MAX_CHANNELS) { cc 1730 drivers/media/usb/s2255/s2255drv.c dev->cc = G_chnmap[cc]; cc 1731 drivers/media/usb/s2255/s2255drv.c vc = &dev->vc[dev->cc]; cc 1747 drivers/media/usb/s2255/s2255drv.c cc = G_chnmap[le32_to_cpu(pdword[1])]; cc 1748 drivers/media/usb/s2255/s2255drv.c if (cc >= MAX_CHANNELS) cc 1750 drivers/media/usb/s2255/s2255drv.c vc = &dev->vc[cc]; cc 1757 drivers/media/usb/s2255/s2255drv.c dprintk(dev, 5, "setmode rdy %d\n", cc); cc 1760 drivers/media/usb/s2255/s2255drv.c dev->chn_ready |= (1 << cc); cc 1774 drivers/media/usb/s2255/s2255drv.c le32_to_cpu(pdword[3]), cc); cc 1791 drivers/media/usb/s2255/s2255drv.c vc = &dev->vc[dev->cc]; cc 1812 drivers/media/usb/s2255/s2255drv.c frm, dev, dev->cc, idx); cc 1831 drivers/media/usb/s2255/s2255drv.c dev->cc, idx); cc 1856 drivers/media/usb/s2255/s2255drv.c if (dev->cc >= MAX_CHANNELS) { cc 1857 drivers/media/usb/s2255/s2255drv.c dev->cc = 0; cc 119 drivers/media/usb/ttusb-budget/dvb-ttusb-budget.c int cc; /* MuxCounter - will increment on EVERY MUX PACKET */ cc 558 drivers/media/usb/ttusb-budget/dvb-ttusb-budget.c u16 csum = 0, cc; cc 576 drivers/media/usb/ttusb-budget/dvb-ttusb-budget.c cc = (muxpack[len - 4] << 8) | muxpack[len - 3]; cc 577 drivers/media/usb/ttusb-budget/dvb-ttusb-budget.c cc &= 0x7FFF; cc 578 drivers/media/usb/ttusb-budget/dvb-ttusb-budget.c if ((cc != ttusb->cc) && (ttusb->cc != -1)) cc 580 drivers/media/usb/ttusb-budget/dvb-ttusb-budget.c __func__, (cc - ttusb->cc) & 0x7FFF); cc 581 drivers/media/usb/ttusb-budget/dvb-ttusb-budget.c ttusb->cc = (cc + 1) & 0x7FFF; cc 837 drivers/media/usb/ttusb-budget/dvb-ttusb-budget.c ttusb->cc = -1; cc 84 drivers/media/usb/ttusb-dec/ttusb_dec.c unsigned char cc; cc 206 drivers/media/usb/ttusb-dec/ttusb_dec.c p2ts->cc=0; cc 224 drivers/media/usb/ttusb-dec/ttusb_dec.c buf[3]=0x10|((p2ts->cc++)&0x0f); cc 233 drivers/media/usb/ttusb-dec/ttusb_dec.c buf[3]=0x30|((p2ts->cc++)&0x0f); cc 13 drivers/mtd/nand/raw/bcm47xxnflash/bcm47xxnflash.h struct bcma_drv_cc *cc; cc 36 drivers/mtd/nand/raw/bcm47xxnflash/main.c b47n->cc = container_of(nflash, struct bcma_drv_cc, nflash); cc 38 drivers/mtd/nand/raw/bcm47xxnflash/main.c if (b47n->cc->core->bus->chipinfo.id == BCMA_CHIP_ID_BCM4706) { cc 43 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c static int bcm47xxnflash_ops_bcm4706_ctl_cmd(struct bcma_drv_cc *cc, u32 code) cc 47 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c bcma_cc_write32(cc, BCMA_CC_NFLASH_CTL, NCTL_START | code); cc 49 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c if (!(bcma_cc_read32(cc, BCMA_CC_NFLASH_CTL) & NCTL_START)) { cc 61 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c static int bcm47xxnflash_ops_bcm4706_poll(struct bcma_drv_cc *cc) cc 66 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c if (bcma_cc_read32(cc, BCMA_CC_NFLASH_CTL) & NCTL_READY) { cc 67 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c if (bcma_cc_read32(cc, BCMA_CC_NFLASH_CTL) & cc 105 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c bcma_cc_write32(b47n->cc, BCMA_CC_NFLASH_COL_ADDR, cc 107 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c bcma_cc_write32(b47n->cc, BCMA_CC_NFLASH_ROW_ADDR, cc 114 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c if (bcm47xxnflash_ops_bcm4706_ctl_cmd(b47n->cc, ctlcode)) cc 116 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c if (bcm47xxnflash_ops_bcm4706_poll(b47n->cc)) cc 124 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c if (bcm47xxnflash_ops_bcm4706_ctl_cmd(b47n->cc, cc 127 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c *dest = bcma_cc_read32(b47n->cc, BCMA_CC_NFLASH_DATA); cc 140 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c struct bcma_drv_cc *cc = b47n->cc; cc 151 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c bcma_cc_write32(cc, BCMA_CC_NFLASH_DATA, *data); cc 156 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c if (bcm47xxnflash_ops_bcm4706_ctl_cmd(cc, ctlcode)) { cc 185 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c bcm47xxnflash_ops_bcm4706_ctl_cmd(b47n->cc, code); cc 199 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c return !!(bcma_cc_read32(b47n->cc, BCMA_CC_NFLASH_CTL) & NCTL_READY); cc 215 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c struct bcma_drv_cc *cc = b47n->cc; cc 234 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c if (bcm47xxnflash_ops_bcm4706_ctl_cmd(b47n->cc, ctlcode)) { cc 248 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c if (bcm47xxnflash_ops_bcm4706_ctl_cmd(b47n->cc, cc 254 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c bcma_cc_read32(b47n->cc, BCMA_CC_NFLASH_DATA) cc 261 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c if (bcm47xxnflash_ops_bcm4706_ctl_cmd(cc, ctlcode)) cc 271 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c bcma_cc_write32(cc, BCMA_CC_NFLASH_ROW_ADDR, cc 275 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c if (bcm47xxnflash_ops_bcm4706_ctl_cmd(cc, ctlcode)) cc 282 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c bcma_cc_write32(cc, BCMA_CC_NFLASH_COL_ADDR, cc 284 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c bcma_cc_write32(cc, BCMA_CC_NFLASH_ROW_ADDR, cc 290 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c if (bcm47xxnflash_ops_bcm4706_ctl_cmd(cc, ctlcode)) cc 294 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c if (bcm47xxnflash_ops_bcm4706_ctl_cmd(cc, NCTL_CMD0 | cc 297 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c if (bcm47xxnflash_ops_bcm4706_poll(cc)) cc 311 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c struct bcma_drv_cc *cc = b47n->cc; cc 323 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c if (bcm47xxnflash_ops_bcm4706_ctl_cmd(cc, NCTL_READ)) cc 325 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c return bcma_cc_read32(cc, BCMA_CC_NFLASH_DATA) & 0xff; cc 397 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c bcma_cc_set32(b47n->cc, BCMA_CC_4706_FLASHSCFG, cc 401 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c if (b47n->cc->status & BCMA_CC_CHIPST_4706_PKG_OPTION) { cc 405 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c freq = bcma_chipco_pll_read(b47n->cc, 4); cc 416 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c bcma_cc_write32(b47n->cc, BCMA_CC_NFLASH_WAITCNT0, cc 443 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c bcma_cc_write32(b47n->cc, BCMA_CC_NFLASH_CONF, val); cc 447 drivers/mtd/nand/raw/bcm47xxnflash/ops_bcm4706.c bcma_cc_mask32(b47n->cc, BCMA_CC_4706_FLASHSCFG, cc 177 drivers/net/can/usb/ems_usb.c } cc; cc 355 drivers/net/can/usb/ems_usb.c u8 ecc = msg->msg.error.cc.regs.sja1000.ecc; cc 356 drivers/net/can/usb/ems_usb.c u8 txerr = msg->msg.error.cc.regs.sja1000.txerr; cc 357 drivers/net/can/usb/ems_usb.c u8 rxerr = msg->msg.error.cc.regs.sja1000.rxerr; cc 570 drivers/net/dsa/mv88e6xxx/chip.h u64 (*clock_read)(const struct cyclecounter *cc); cc 46 drivers/net/dsa/mv88e6xxx/ptp.c #define cc_to_chip(cc) container_of(cc, struct mv88e6xxx_chip, tstamp_cc) cc 85 drivers/net/dsa/mv88e6xxx/ptp.c static u64 mv88e6352_ptp_clock_read(const struct cyclecounter *cc) cc 87 drivers/net/dsa/mv88e6xxx/ptp.c struct mv88e6xxx_chip *chip = cc_to_chip(cc); cc 99 drivers/net/dsa/mv88e6xxx/ptp.c static u64 mv88e6165_ptp_clock_read(const struct cyclecounter *cc) cc 101 drivers/net/dsa/mv88e6xxx/ptp.c struct mv88e6xxx_chip *chip = cc_to_chip(cc); cc 422 drivers/net/dsa/mv88e6xxx/ptp.c static u64 mv88e6xxx_ptp_clock_read(const struct cyclecounter *cc) cc 424 drivers/net/dsa/mv88e6xxx/ptp.c struct mv88e6xxx_chip *chip = cc_to_chip(cc); cc 427 drivers/net/dsa/mv88e6xxx/ptp.c return chip->info->ops->ptp_ops->clock_read(cc); cc 323 drivers/net/dsa/sja1105/sja1105_ptp.c static u64 sja1105_ptptsclk_read(const struct cyclecounter *cc) cc 325 drivers/net/dsa/sja1105/sja1105_ptp.c struct sja1105_private *priv = cc_to_sja1105(cc); cc 1225 drivers/net/ethernet/amd/xgbe/xgbe-phy-v2.c u8 cc; cc 1227 drivers/net/ethernet/amd/xgbe/xgbe-phy-v2.c for (cc = 0; len; buf++, len--) cc 1228 drivers/net/ethernet/amd/xgbe/xgbe-phy-v2.c cc += *buf; cc 1230 drivers/net/ethernet/amd/xgbe/xgbe-phy-v2.c return (cc == cc_in) ? true : false; cc 125 drivers/net/ethernet/amd/xgbe/xgbe-ptp.c static u64 xgbe_cc_read(const struct cyclecounter *cc) cc 127 drivers/net/ethernet/amd/xgbe/xgbe-ptp.c struct xgbe_prv_data *pdata = container_of(cc, cc 231 drivers/net/ethernet/amd/xgbe/xgbe-ptp.c struct cyclecounter *cc = &pdata->tstamp_cc; cc 261 drivers/net/ethernet/amd/xgbe/xgbe-ptp.c cc->read = xgbe_cc_read; cc 262 drivers/net/ethernet/amd/xgbe/xgbe-ptp.c cc->mask = CLOCKSOURCE_MASK(64); cc 263 drivers/net/ethernet/amd/xgbe/xgbe-ptp.c cc->mult = 1; cc 264 drivers/net/ethernet/amd/xgbe/xgbe-ptp.c cc->shift = 0; cc 152 drivers/net/ethernet/broadcom/bgmac-bcma-mdio.c struct bcma_drv_cc *cc = &bgmac->bcma.core->bus->drv_cc; cc 154 drivers/net/ethernet/broadcom/bgmac-bcma-mdio.c bcma_chipco_chipctl_maskset(cc, 2, ~0xc0000000, 0); cc 155 drivers/net/ethernet/broadcom/bgmac-bcma-mdio.c bcma_chipco_chipctl_maskset(cc, 4, ~0x80000000, 0); cc 66 drivers/net/ethernet/broadcom/bgmac-bcma.c struct bcma_drv_cc *cc = &bgmac->bcma.core->bus->drv_cc; cc 68 drivers/net/ethernet/broadcom/bgmac-bcma.c bcma_chipco_chipctl_maskset(cc, offset, mask, set); cc 73 drivers/net/ethernet/broadcom/bgmac-bcma.c struct bcma_drv_cc *cc = &bgmac->bcma.core->bus->drv_cc; cc 75 drivers/net/ethernet/broadcom/bgmac-bcma.c return bcma_pmu_get_bus_clock(cc); cc 15302 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c static u64 bnx2x_cyclecounter_read(const struct cyclecounter *cc) cc 15304 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c struct bnx2x *bp = container_of(cc, struct bnx2x, cyclecounter); cc 209 drivers/net/ethernet/cavium/common/cavium_ptp.c static u64 cavium_ptp_cc_read(const struct cyclecounter *cc) cc 212 drivers/net/ethernet/cavium/common/cavium_ptp.c container_of(cc, struct cavium_ptp, cycle_counter); cc 222 drivers/net/ethernet/cavium/common/cavium_ptp.c struct cyclecounter *cc; cc 247 drivers/net/ethernet/cavium/common/cavium_ptp.c cc = &clock->cycle_counter; cc 248 drivers/net/ethernet/cavium/common/cavium_ptp.c cc->read = cavium_ptp_cc_read; cc 249 drivers/net/ethernet/cavium/common/cavium_ptp.c cc->mask = CYCLECOUNTER_MASK(64); cc 250 drivers/net/ethernet/cavium/common/cavium_ptp.c cc->mult = 1; cc 251 drivers/net/ethernet/cavium/common/cavium_ptp.c cc->shift = 0; cc 562 drivers/net/ethernet/freescale/fec.h struct cyclecounter cc; cc 177 drivers/net/ethernet/freescale/fec_ptp.c val &= fep->cc.mask; cc 181 drivers/net/ethernet/freescale/fec_ptp.c fep->next_counter = (val + fep->reload_period) & fep->cc.mask; cc 200 drivers/net/ethernet/freescale/fec_ptp.c fep->next_counter = (fep->next_counter + fep->reload_period) & fep->cc.mask; cc 219 drivers/net/ethernet/freescale/fec_ptp.c static u64 fec_ptp_read(const struct cyclecounter *cc) cc 222 drivers/net/ethernet/freescale/fec_ptp.c container_of(cc, struct fec_enet_private, cc); cc 265 drivers/net/ethernet/freescale/fec_ptp.c memset(&fep->cc, 0, sizeof(fep->cc)); cc 266 drivers/net/ethernet/freescale/fec_ptp.c fep->cc.read = fec_ptp_read; cc 267 drivers/net/ethernet/freescale/fec_ptp.c fep->cc.mask = CLOCKSOURCE_MASK(31); cc 268 drivers/net/ethernet/freescale/fec_ptp.c fep->cc.shift = 31; cc 269 drivers/net/ethernet/freescale/fec_ptp.c fep->cc.mult = FEC_CC_MULT; cc 272 drivers/net/ethernet/freescale/fec_ptp.c timecounter_init(&fep->tc, &fep->cc, ktime_to_ns(ktime_get_real())); cc 423 drivers/net/ethernet/freescale/fec_ptp.c counter = ns & fep->cc.mask; cc 427 drivers/net/ethernet/freescale/fec_ptp.c timecounter_init(&fep->tc, &fep->cc, ns); cc 553 drivers/net/ethernet/freescale/fec_ptp.c fep->cc.mask; cc 324 drivers/net/ethernet/intel/e1000e/e1000.h struct cyclecounter cc; cc 3513 drivers/net/ethernet/intel/e1000e/netdev.c adapter->cc.shift = shift + INCPERIOD_SHIFT_96MHZ; cc 3521 drivers/net/ethernet/intel/e1000e/netdev.c adapter->cc.shift = shift + INCPERIOD_SHIFT_96MHZ; cc 3527 drivers/net/ethernet/intel/e1000e/netdev.c adapter->cc.shift = shift; cc 3535 drivers/net/ethernet/intel/e1000e/netdev.c adapter->cc.shift = shift; cc 3543 drivers/net/ethernet/intel/e1000e/netdev.c adapter->cc.shift = shift; cc 3549 drivers/net/ethernet/intel/e1000e/netdev.c adapter->cc.shift = shift; cc 3558 drivers/net/ethernet/intel/e1000e/netdev.c adapter->cc.shift = shift; cc 3928 drivers/net/ethernet/intel/e1000e/netdev.c timecounter_init(&adapter->tc, &adapter->cc, cc 4408 drivers/net/ethernet/intel/e1000e/netdev.c static u64 e1000e_cyclecounter_read(const struct cyclecounter *cc) cc 4410 drivers/net/ethernet/intel/e1000e/netdev.c struct e1000_adapter *adapter = container_of(cc, struct e1000_adapter, cc 4411 drivers/net/ethernet/intel/e1000e/netdev.c cc); cc 4444 drivers/net/ethernet/intel/e1000e/netdev.c adapter->cc.read = e1000e_cyclecounter_read; cc 4445 drivers/net/ethernet/intel/e1000e/netdev.c adapter->cc.mask = CYCLECOUNTER_MASK(64); cc 4446 drivers/net/ethernet/intel/e1000e/netdev.c adapter->cc.mult = 1; cc 215 drivers/net/ethernet/intel/e1000e/ptp.c timecounter_init(&adapter->tc, &adapter->cc, ns); cc 557 drivers/net/ethernet/intel/igb/igb.h struct cyclecounter cc; cc 74 drivers/net/ethernet/intel/igb/igb_ptp.c static u64 igb_ptp_read_82576(const struct cyclecounter *cc) cc 76 drivers/net/ethernet/intel/igb/igb_ptp.c struct igb_adapter *igb = container_of(cc, struct igb_adapter, cc); cc 91 drivers/net/ethernet/intel/igb/igb_ptp.c static u64 igb_ptp_read_82580(const struct cyclecounter *cc) cc 93 drivers/net/ethernet/intel/igb/igb_ptp.c struct igb_adapter *igb = container_of(cc, struct igb_adapter, cc); cc 369 drivers/net/ethernet/intel/igb/igb_ptp.c timecounter_init(&igb->tc, &igb->cc, ns); cc 1194 drivers/net/ethernet/intel/igb/igb_ptp.c adapter->cc.read = igb_ptp_read_82576; cc 1195 drivers/net/ethernet/intel/igb/igb_ptp.c adapter->cc.mask = CYCLECOUNTER_MASK(64); cc 1196 drivers/net/ethernet/intel/igb/igb_ptp.c adapter->cc.mult = 1; cc 1197 drivers/net/ethernet/intel/igb/igb_ptp.c adapter->cc.shift = IGB_82576_TSYNC_SHIFT; cc 1213 drivers/net/ethernet/intel/igb/igb_ptp.c adapter->cc.read = igb_ptp_read_82580; cc 1214 drivers/net/ethernet/intel/igb/igb_ptp.c adapter->cc.mask = CYCLECOUNTER_MASK(IGB_NBITS_82580); cc 1215 drivers/net/ethernet/intel/igb/igb_ptp.c adapter->cc.mult = 1; cc 1216 drivers/net/ethernet/intel/igb/igb_ptp.c adapter->cc.shift = 0; cc 1357 drivers/net/ethernet/intel/igb/igb_ptp.c timecounter_init(&adapter->tc, &adapter->cc, cc 159 drivers/net/ethernet/intel/ixgbe/ixgbe_ptp.c struct cyclecounter *cc = &adapter->hw_cc; cc 190 drivers/net/ethernet/intel/ixgbe/ixgbe_ptp.c clock_period = div_u64((NS_PER_HALF_SEC << cc->shift), cc->mult); cc 209 drivers/net/ethernet/intel/ixgbe/ixgbe_ptp.c clock_edge += div_u64(((u64)rem << cc->shift), cc->mult); cc 239 drivers/net/ethernet/intel/ixgbe/ixgbe_ptp.c struct cyclecounter *cc = &adapter->hw_cc; cc 275 drivers/net/ethernet/intel/ixgbe/ixgbe_ptp.c freqout = div_u64(NS_PER_HALF_SEC << cc->shift, cc->mult); cc 292 drivers/net/ethernet/intel/ixgbe/ixgbe_ptp.c clock_edge += div_u64(((u64)rem << cc->shift), cc->mult); cc 326 drivers/net/ethernet/intel/ixgbe/ixgbe_ptp.c static u64 ixgbe_ptp_read_X550(const struct cyclecounter *cc) cc 329 drivers/net/ethernet/intel/ixgbe/ixgbe_ptp.c container_of(cc, struct ixgbe_adapter, hw_cc); cc 363 drivers/net/ethernet/intel/ixgbe/ixgbe_ptp.c static u64 ixgbe_ptp_read_82599(const struct cyclecounter *cc) cc 366 drivers/net/ethernet/intel/ixgbe/ixgbe_ptp.c container_of(cc, struct ixgbe_adapter, hw_cc); cc 1211 drivers/net/ethernet/intel/ixgbe/ixgbe_ptp.c struct cyclecounter cc; cc 1228 drivers/net/ethernet/intel/ixgbe/ixgbe_ptp.c cc.mask = CLOCKSOURCE_MASK(64); cc 1229 drivers/net/ethernet/intel/ixgbe/ixgbe_ptp.c cc.mult = 1; cc 1230 drivers/net/ethernet/intel/ixgbe/ixgbe_ptp.c cc.shift = 0; cc 1242 drivers/net/ethernet/intel/ixgbe/ixgbe_ptp.c cc.mult = 3; cc 1243 drivers/net/ethernet/intel/ixgbe/ixgbe_ptp.c cc.shift = 2; cc 1248 drivers/net/ethernet/intel/ixgbe/ixgbe_ptp.c cc.read = ixgbe_ptp_read_X550; cc 1263 drivers/net/ethernet/intel/ixgbe/ixgbe_ptp.c cc.read = ixgbe_ptp_read_82599; cc 1265 drivers/net/ethernet/intel/ixgbe/ixgbe_ptp.c ixgbe_ptp_link_speed_adjust(adapter, &cc.shift, &incval); cc 1269 drivers/net/ethernet/intel/ixgbe/ixgbe_ptp.c cc.read = ixgbe_ptp_read_82599; cc 1271 drivers/net/ethernet/intel/ixgbe/ixgbe_ptp.c ixgbe_ptp_link_speed_adjust(adapter, &cc.shift, &incval); cc 1273 drivers/net/ethernet/intel/ixgbe/ixgbe_ptp.c cc.shift -= IXGBE_INCVAL_SHIFT_82599; cc 1288 drivers/net/ethernet/intel/ixgbe/ixgbe_ptp.c memcpy(&adapter->hw_cc, &cc, sizeof(adapter->hw_cc)); cc 389 drivers/net/ethernet/mellanox/mlx5/core/en.h u16 cc; cc 482 drivers/net/ethernet/mellanox/mlx5/core/en.h u32 *cc; cc 512 drivers/net/ethernet/mellanox/mlx5/core/en.h u16 cc; cc 547 drivers/net/ethernet/mellanox/mlx5/core/en.h u16 cc; cc 35 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_rx.c if (icosq->cc == icosq->pc) cc 43 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_rx.c icosq->sqn, icosq->cc, icosq->pc); cc 50 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_rx.c WARN_ONCE(icosq->cc != icosq->pc, "ICOSQ 0x%x: cc (0x%x) != pc (0x%x)\n", cc 51 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_rx.c icosq->sqn, icosq->cc, icosq->pc); cc 52 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_rx.c icosq->cc = 0; cc 11 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_tx.c if (sq->cc == sq->pc) cc 19 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_tx.c sq->sqn, sq->cc, sq->pc); cc 26 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_tx.c WARN_ONCE(sq->cc != sq->pc, cc 28 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_tx.c sq->sqn, sq->cc, sq->pc); cc 29 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_tx.c sq->cc = 0; cc 123 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_tx.c sq->channel->ix, sq->sqn, sq->cq.mcq.cqn, sq->cc, sq->pc, cc 188 drivers/net/ethernet/mellanox/mlx5/core/en/reporter_tx.c err = devlink_fmsg_u32_pair_put(fmsg, "cc", sq->cc); cc 31 drivers/net/ethernet/mellanox/mlx5/core/en/txrx.h mlx5e_wqc_has_room_for(struct mlx5_wq_cyc *wq, u16 cc, u16 pc, u16 n) cc 33 drivers/net/ethernet/mellanox/mlx5/core/en/txrx.h return (mlx5_wq_cyc_ctr2ix(wq, cc - pc) >= n) || (cc == pc); cc 147 drivers/net/ethernet/mellanox/mlx5/core/en/txrx.h mlx5_cq_arm(mcq, MLX5_CQ_DB_REQ_NOT, mcq->uar->map, cq->wq.cc); cc 236 drivers/net/ethernet/mellanox/mlx5/core/en/xdp.c if (unlikely(!mlx5e_wqc_has_room_for(&sq->wq, sq->cc, sq->pc, cc 289 drivers/net/ethernet/mellanox/mlx5/core/en/xdp.c if (unlikely(!mlx5e_wqc_has_room_for(&sq->wq, sq->cc, sq->pc, 1))) { cc 407 drivers/net/ethernet/mellanox/mlx5/core/en/xdp.c sqcc = sq->cc; cc 447 drivers/net/ethernet/mellanox/mlx5/core/en/xdp.c sq->cc = sqcc; cc 455 drivers/net/ethernet/mellanox/mlx5/core/en/xdp.c while (sq->cc != sq->pc) { cc 459 drivers/net/ethernet/mellanox/mlx5/core/en/xdp.c ci = mlx5_wq_cyc_ctr2ix(&sq->wq, sq->cc); cc 462 drivers/net/ethernet/mellanox/mlx5/core/en/xdp.c sq->cc += wi->num_wqebbs; cc 214 drivers/net/ethernet/mellanox/mlx5/core/en/xdp.h return fifo->xi[(*fifo->cc)++ & fifo->mask]; cc 21 drivers/net/ethernet/mellanox/mlx5/core/en/xsk/tx.h if (sq->pc != sq->cc) cc 963 drivers/net/ethernet/mellanox/mlx5/core/en_main.c xdpi_fifo->cc = &sq->xdpi_fifo_cc; cc 1375 drivers/net/ethernet/mellanox/mlx5/core/en_main.c if (mlx5e_wqc_has_room_for(wq, sq->cc, sq->pc, 1)) { cc 89 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c u32 cqcc = wq->cc; cc 149 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c u32 cqcc = wq->cc + update_owner_only; cc 163 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c mlx5e_cqes_update_owner(wq, cqcc - wq->cc); cc 164 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c wq->cc = cqcc; cc 176 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c u32 cc = wq->cc; cc 178 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c mlx5e_read_title_slot(rq, wq, cc); cc 179 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c mlx5e_read_mini_arr_slot(wq, cqd, cc + 1); cc 180 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c mlx5e_decompress_cqe(rq, wq, cc); cc 607 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c sqcc = sq->cc; cc 646 drivers/net/ethernet/mellanox/mlx5/core/en_rx.c sq->cc = sqcc; cc 258 drivers/net/ethernet/mellanox/mlx5/core/en_tx.c if (unlikely(!mlx5e_wqc_has_room_for(wq, sq->cc, sq->pc, sq->stop_room))) { cc 409 drivers/net/ethernet/mellanox/mlx5/core/en_tx.c ci = mlx5_cqwq_ctr2ix(wq, wq->cc - 1); cc 447 drivers/net/ethernet/mellanox/mlx5/core/en_tx.c sqcc = sq->cc; cc 523 drivers/net/ethernet/mellanox/mlx5/core/en_tx.c sq->cc = sqcc; cc 528 drivers/net/ethernet/mellanox/mlx5/core/en_tx.c mlx5e_wqc_has_room_for(&sq->wq, sq->cc, sq->pc, sq->stop_room) && cc 545 drivers/net/ethernet/mellanox/mlx5/core/en_tx.c sqcc = sq->cc; cc 573 drivers/net/ethernet/mellanox/mlx5/core/en_tx.c sq->cc = sqcc; cc 733 drivers/net/ethernet/mellanox/mlx5/core/eq.c struct mlx5_eqe *mlx5_eq_get_eqe(struct mlx5_eq *eq, u32 cc) cc 735 drivers/net/ethernet/mellanox/mlx5/core/eq.c u32 ci = eq->cons_index + cc; cc 750 drivers/net/ethernet/mellanox/mlx5/core/eq.c void mlx5_eq_update_ci(struct mlx5_eq *eq, u32 cc, bool arm) cc 755 drivers/net/ethernet/mellanox/mlx5/core/eq.c eq->cons_index += cc; cc 110 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c if (unlikely(conn->qp.rq.pc - conn->qp.rq.cc >= conn->qp.rq.size)) { cc 191 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c if (conn->qp.sq.pc - conn->qp.sq.cc >= conn->qp.sq.size) { cc 259 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c conn->qp.rq.cc++; cc 302 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c conn->qp.sq.cc++; cc 362 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c conn->fdev->conn_res.uar->map, conn->cq.wq.cc); cc 402 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c mlx5_fpga_dbg(conn->fdev, "Re-arming CQ with cc# %u\n", conn->cq.wq.cc); cc 543 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c conn->qp.rq.cc = 0; cc 546 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c conn->qp.sq.cc = 0; cc 72 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.h unsigned int cc; cc 79 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.h unsigned int cc; cc 69 drivers/net/ethernet/mellanox/mlx5/core/lib/clock.c static u64 read_internal_timer(const struct cyclecounter *cc) cc 71 drivers/net/ethernet/mellanox/mlx5/core/lib/clock.c struct mlx5_clock *clock = container_of(cc, struct mlx5_clock, cycles); cc 75 drivers/net/ethernet/mellanox/mlx5/core/lib/clock.c return mlx5_read_internal_timer(mdev, NULL) & cc->mask; cc 59 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_send.c dr_cq->qp->sq.cc = dr_cq->qp->sq.wqe_head[idx] + 1; cc 61 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_send.c ++dr_cq->qp->sq.cc; cc 65 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_send.c dr_cq->qp->sq.cc = dr_cq->qp->sq.wqe_head[idx] + 1; cc 127 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_send.c dr_qp->rq.cc = 0; cc 130 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_send.c dr_qp->sq.cc = 0; cc 978 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h unsigned int cc; cc 985 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h unsigned int cc; cc 67 drivers/net/ethernet/mellanox/mlx5/core/wq.h u32 cc; /* consumer counter */ cc 180 drivers/net/ethernet/mellanox/mlx5/core/wq.h return mlx5_cqwq_ctr2ix(wq, wq->cc); cc 200 drivers/net/ethernet/mellanox/mlx5/core/wq.h return mlx5_cqwq_get_ctr_wrap_cnt(wq, wq->cc); cc 205 drivers/net/ethernet/mellanox/mlx5/core/wq.h wq->cc++; cc 210 drivers/net/ethernet/mellanox/mlx5/core/wq.h *wq->db = cpu_to_be32(wq->cc & 0xffffff); cc 95 drivers/net/ethernet/mellanox/mlxsw/spectrum_ptp.c static u64 mlxsw_sp1_ptp_read_frc(const struct cyclecounter *cc) cc 98 drivers/net/ethernet/mellanox/mlxsw/spectrum_ptp.c container_of(cc, struct mlxsw_sp_ptp_clock, cycles); cc 100 drivers/net/ethernet/mellanox/mlxsw/spectrum_ptp.c return __mlxsw_sp1_ptp_read_frc(clock, NULL) & cc->mask; cc 118 drivers/net/ethernet/mellanox/mlxsw/spectrum_ptp.c cycles <<= tc->cc->shift; cc 119 drivers/net/ethernet/mellanox/mlxsw/spectrum_ptp.c cycles = div_u64(cycles, tc->cc->mult); cc 38 drivers/net/ethernet/qlogic/qede/qede_ptp.c struct cyclecounter cc; cc 137 drivers/net/ethernet/qlogic/qede/qede_ptp.c timecounter_init(&ptp->tc, &ptp->cc, ns); cc 206 drivers/net/ethernet/qlogic/qede/qede_ptp.c static u64 qede_ptp_read_cc(const struct cyclecounter *cc) cc 213 drivers/net/ethernet/qlogic/qede/qede_ptp.c ptp = container_of(cc, struct qede_ptp, cc); cc 451 drivers/net/ethernet/qlogic/qede/qede_ptp.c memset(&ptp->cc, 0, sizeof(ptp->cc)); cc 452 drivers/net/ethernet/qlogic/qede/qede_ptp.c ptp->cc.read = qede_ptp_read_cc; cc 453 drivers/net/ethernet/qlogic/qede/qede_ptp.c ptp->cc.mask = CYCLECOUNTER_MASK(64); cc 454 drivers/net/ethernet/qlogic/qede/qede_ptp.c ptp->cc.shift = 0; cc 455 drivers/net/ethernet/qlogic/qede/qede_ptp.c ptp->cc.mult = 1; cc 457 drivers/net/ethernet/qlogic/qede/qede_ptp.c timecounter_init(&ptp->tc, &ptp->cc, cc 869 drivers/net/ethernet/renesas/ravb.h u8 cc; /* Content control MSBs (reserved) */ cc 190 drivers/net/ethernet/ti/cpts.c static u64 cpts_systim_read(const struct cyclecounter *cc) cc 195 drivers/net/ethernet/ti/cpts.c struct cpts *cpts = container_of(cc, struct cpts, cc); cc 237 drivers/net/ethernet/ti/cpts.c cpts->cc.mult = neg_adj ? mult - diff : mult + diff; cc 281 drivers/net/ethernet/ti/cpts.c timecounter_init(&cpts->tc, &cpts->cc, ns); cc 462 drivers/net/ethernet/ti/cpts.c timecounter_init(&cpts->tc, &cpts->cc, ktime_to_ns(ktime_get_real())); cc 509 drivers/net/ethernet/ti/cpts.c maxsec = cpts->cc.mask; cc 522 drivers/net/ethernet/ti/cpts.c if (cpts->cc.mult || cpts->cc.shift) cc 525 drivers/net/ethernet/ti/cpts.c clocks_calc_mult_shift(&cpts->cc.mult, &cpts->cc.shift, cc 529 drivers/net/ethernet/ti/cpts.c ns = cyclecounter_cyc2ns(&cpts->cc, freq, cpts->cc.mask, &frac); cc 533 drivers/net/ethernet/ti/cpts.c freq, cpts->cc.mult, cpts->cc.shift, (ns - NSEC_PER_SEC)); cc 618 drivers/net/ethernet/ti/cpts.c cpts->cc.mult = prop; cc 621 drivers/net/ethernet/ti/cpts.c cpts->cc.shift = prop; cc 623 drivers/net/ethernet/ti/cpts.c if ((cpts->cc.mult && !cpts->cc.shift) || cc 624 drivers/net/ethernet/ti/cpts.c (!cpts->cc.mult && cpts->cc.shift)) cc 667 drivers/net/ethernet/ti/cpts.c cpts->cc.read = cpts_systim_read; cc 668 drivers/net/ethernet/ti/cpts.c cpts->cc.mask = CLOCKSOURCE_MASK(32); cc 675 drivers/net/ethernet/ti/cpts.c cpts->cc_mult = cpts->cc.mult; cc 108 drivers/net/ethernet/ti/cpts.h struct cyclecounter cc; cc 546 drivers/net/wireless/ath/ath10k/hw.c u32 cc, u32 rcc, u32 cc_prev, u32 rcc_prev) cc 557 drivers/net/wireless/ath/ath10k/hw.c if (cc < cc_prev || rcc < rcc_prev) { cc 560 drivers/net/wireless/ath/ath10k/hw.c if (cc < cc_prev) { cc 566 drivers/net/wireless/ath/ath10k/hw.c if (cc < cc_prev) cc 577 drivers/net/wireless/ath/ath10k/hw.c cc -= cc_prev - cc_fix; cc 580 drivers/net/wireless/ath/ath10k/hw.c survey->time = CCNT_TO_MSEC(ar, cc); cc 394 drivers/net/wireless/ath/ath10k/hw.h u32 cc, u32 rcc, u32 cc_prev, u32 rcc_prev); cc 652 drivers/net/wireless/ath/ath5k/mac80211-ops.c struct ath_cycle_counters *cc = &common->cc_survey; cc 660 drivers/net/wireless/ath/ath5k/mac80211-ops.c if (cc->cycles > 0) { cc 661 drivers/net/wireless/ath/ath5k/mac80211-ops.c ah->survey.time += cc->cycles / div; cc 662 drivers/net/wireless/ath/ath5k/mac80211-ops.c ah->survey.time_busy += cc->rx_busy / div; cc 663 drivers/net/wireless/ath/ath5k/mac80211-ops.c ah->survey.time_rx += cc->rx_frame / div; cc 664 drivers/net/wireless/ath/ath5k/mac80211-ops.c ah->survey.time_tx += cc->tx_frame / div; cc 666 drivers/net/wireless/ath/ath5k/mac80211-ops.c memset(cc, 0, sizeof(*cc)); cc 1865 drivers/net/wireless/ath/ath6kl/wmi.c struct wmi_connect_cmd *cc; cc 1887 drivers/net/wireless/ath/ath6kl/wmi.c cc = (struct wmi_connect_cmd *) skb->data; cc 1890 drivers/net/wireless/ath/ath6kl/wmi.c memcpy(cc->ssid, ssid, ssid_len); cc 1892 drivers/net/wireless/ath/ath6kl/wmi.c cc->ssid_len = ssid_len; cc 1893 drivers/net/wireless/ath/ath6kl/wmi.c cc->nw_type = nw_type; cc 1894 drivers/net/wireless/ath/ath6kl/wmi.c cc->dot11_auth_mode = dot11_auth_mode; cc 1895 drivers/net/wireless/ath/ath6kl/wmi.c cc->auth_mode = auth_mode; cc 1896 drivers/net/wireless/ath/ath6kl/wmi.c cc->prwise_crypto_type = pairwise_crypto; cc 1897 drivers/net/wireless/ath/ath6kl/wmi.c cc->prwise_crypto_len = pairwise_crypto_len; cc 1898 drivers/net/wireless/ath/ath6kl/wmi.c cc->grp_crypto_type = group_crypto; cc 1899 drivers/net/wireless/ath/ath6kl/wmi.c cc->grp_crypto_len = group_crypto_len; cc 1900 drivers/net/wireless/ath/ath6kl/wmi.c cc->ch = cpu_to_le16(channel); cc 1901 drivers/net/wireless/ath/ath6kl/wmi.c cc->ctrl_flags = cpu_to_le32(ctrl_flags); cc 1902 drivers/net/wireless/ath/ath6kl/wmi.c cc->nw_subtype = nw_subtype; cc 1905 drivers/net/wireless/ath/ath6kl/wmi.c memcpy(cc->bssid, bssid, ETH_ALEN); cc 1917 drivers/net/wireless/ath/ath6kl/wmi.c struct wmi_reconnect_cmd *cc; cc 1929 drivers/net/wireless/ath/ath6kl/wmi.c cc = (struct wmi_reconnect_cmd *) skb->data; cc 1930 drivers/net/wireless/ath/ath6kl/wmi.c cc->channel = cpu_to_le16(channel); cc 1933 drivers/net/wireless/ath/ath6kl/wmi.c memcpy(cc->bssid, bssid, ETH_ALEN); cc 512 drivers/net/wireless/ath/ath9k/link.c struct ath_cycle_counters *cc = &common->cc_survey; cc 522 drivers/net/wireless/ath/ath9k/link.c if (cc->cycles > 0) { cc 527 drivers/net/wireless/ath/ath9k/link.c survey->time += cc->cycles / div; cc 528 drivers/net/wireless/ath/ath9k/link.c survey->time_busy += cc->rx_busy / div; cc 529 drivers/net/wireless/ath/ath9k/link.c survey->time_rx += cc->rx_frame / div; cc 530 drivers/net/wireless/ath/ath9k/link.c survey->time_tx += cc->tx_frame / div; cc 533 drivers/net/wireless/ath/ath9k/link.c if (cc->cycles < div) cc 536 drivers/net/wireless/ath/ath9k/link.c if (cc->cycles > 0) cc 537 drivers/net/wireless/ath/ath9k/link.c ret = cc->rx_busy * 100 / cc->cycles; cc 539 drivers/net/wireless/ath/ath9k/link.c memset(cc, 0, sizeof(*cc)); cc 180 drivers/net/wireless/ath/hw.c struct ath_cycle_counters *cc = &common->cc_ani; cc 183 drivers/net/wireless/ath/hw.c listen_time = (cc->cycles - cc->rx_frame - cc->tx_frame) / cc 186 drivers/net/wireless/ath/hw.c memset(cc, 0, sizeof(*cc)); cc 557 drivers/net/wireless/ath/regd.c u16 cc = rd & ~COUNTRY_ERD_FLAG; cc 562 drivers/net/wireless/ath/regd.c if (allCountries[i].countryCode == cc) cc 609 drivers/net/wireless/ath/regd.c u16 cc = rd & ~COUNTRY_ERD_FLAG; cc 611 drivers/net/wireless/ath/regd.c country = ath_regd_find_country(cc); cc 613 drivers/net/wireless/ath/regd.c return cc; cc 1552 drivers/net/wireless/ath/wil6210/cfg80211.c struct wil_tid_crypto_rx_single *cc; cc 1562 drivers/net/wireless/ath/wil6210/cfg80211.c cc = &cs->tid_crypto_rx[tid].key_id[key_index]; cc 1564 drivers/net/wireless/ath/wil6210/cfg80211.c memcpy(cc->pn, params->seq, cc 1567 drivers/net/wireless/ath/wil6210/cfg80211.c memset(cc->pn, 0, IEEE80211_GCMP_PN_LEN); cc 1568 drivers/net/wireless/ath/wil6210/cfg80211.c cc->key_set = true; cc 1572 drivers/net/wireless/ath/wil6210/cfg80211.c cc = &cs->group_crypto_rx.key_id[key_index]; cc 1574 drivers/net/wireless/ath/wil6210/cfg80211.c memcpy(cc->pn, params->seq, IEEE80211_GCMP_PN_LEN); cc 1576 drivers/net/wireless/ath/wil6210/cfg80211.c memset(cc->pn, 0, IEEE80211_GCMP_PN_LEN); cc 1577 drivers/net/wireless/ath/wil6210/cfg80211.c cc->key_set = true; cc 1587 drivers/net/wireless/ath/wil6210/cfg80211.c struct wil_tid_crypto_rx_single *cc; cc 1596 drivers/net/wireless/ath/wil6210/cfg80211.c cc = &cs->tid_crypto_rx[tid].key_id[key_index]; cc 1597 drivers/net/wireless/ath/wil6210/cfg80211.c cc->key_set = false; cc 1601 drivers/net/wireless/ath/wil6210/cfg80211.c cc = &cs->group_crypto_rx.key_id[key_index]; cc 1602 drivers/net/wireless/ath/wil6210/cfg80211.c cc->key_set = false; cc 1601 drivers/net/wireless/ath/wil6210/debugfs.c struct wil_tid_crypto_rx_single *cc = &c->key_id[i]; cc 1603 drivers/net/wireless/ath/wil6210/debugfs.c if (cc->key_set) cc 1615 drivers/net/wireless/ath/wil6210/debugfs.c struct wil_tid_crypto_rx_single *cc = &c->key_id[i]; cc 1617 drivers/net/wireless/ath/wil6210/debugfs.c seq_printf(s, " [%i%s]%6phN", i, cc->key_set ? "+" : "-", cc 1618 drivers/net/wireless/ath/wil6210/debugfs.c cc->pn); cc 682 drivers/net/wireless/ath/wil6210/txrx.c struct wil_tid_crypto_rx_single *cc = &c->key_id[key_id]; cc 685 drivers/net/wireless/ath/wil6210/txrx.c if (!cc->key_set) { cc 692 drivers/net/wireless/ath/wil6210/txrx.c if (reverse_memcmp(pn, cc->pn, IEEE80211_GCMP_PN_LEN) <= 0) { cc 695 drivers/net/wireless/ath/wil6210/txrx.c cid, tid, mc, key_id, pn, cc->pn); cc 698 drivers/net/wireless/ath/wil6210/txrx.c memcpy(cc->pn, pn, IEEE80211_GCMP_PN_LEN); cc 548 drivers/net/wireless/ath/wil6210/txrx_edma.c struct wil_tid_crypto_rx_single *cc; cc 563 drivers/net/wireless/ath/wil6210/txrx_edma.c cc = &c->key_id[key_id]; cc 566 drivers/net/wireless/ath/wil6210/txrx_edma.c if (!cc->key_set) { cc 573 drivers/net/wireless/ath/wil6210/txrx_edma.c if (reverse_memcmp(pn, cc->pn, IEEE80211_GCMP_PN_LEN) <= 0) { cc 576 drivers/net/wireless/ath/wil6210/txrx_edma.c cid, tid, mc, key_id, pn, cc->pn); cc 579 drivers/net/wireless/ath/wil6210/txrx_edma.c memcpy(cc->pn, pn, IEEE80211_GCMP_PN_LEN); cc 589 drivers/net/wireless/broadcom/b43/phy_lcn.c struct bcma_drv_cc *cc = &dev->dev->bdev->bus->drv_cc; cc 595 drivers/net/wireless/broadcom/b43/phy_lcn.c bcma_chipco_pll_write(cc, 0x2, 0x03000c04); cc 596 drivers/net/wireless/broadcom/b43/phy_lcn.c bcma_chipco_pll_maskset(cc, 0x3, 0x00ffffff, 0x0); cc 597 drivers/net/wireless/broadcom/b43/phy_lcn.c bcma_chipco_pll_write(cc, 0x4, 0x200005c0); cc 599 drivers/net/wireless/broadcom/b43/phy_lcn.c bcma_cc_set32(cc, BCMA_CC_PMU_CTL, 0x400); cc 607 drivers/net/wireless/broadcom/b43/phy_lcn.c bcma_chipco_pll_write(cc, 0x2, 0x03140c04); cc 608 drivers/net/wireless/broadcom/b43/phy_lcn.c bcma_chipco_pll_maskset(cc, 0x3, 0x00ffffff, 0x333333); cc 609 drivers/net/wireless/broadcom/b43/phy_lcn.c bcma_chipco_pll_write(cc, 0x4, 0x202c2820); cc 611 drivers/net/wireless/broadcom/b43/phy_lcn.c bcma_cc_set32(cc, BCMA_CC_PMU_CTL, 0x400); cc 700 drivers/net/wireless/broadcom/b43/phy_lcn.c struct bcma_drv_cc *cc = &dev->dev->bdev->bus->drv_cc; cc 728 drivers/net/wireless/broadcom/b43/phy_lcn.c bcma_chipco_regctl_maskset(cc, 0, 0xf, 0x9); cc 729 drivers/net/wireless/broadcom/b43/phy_lcn.c bcma_chipco_chipctl_maskset(cc, 0, 0, 0x03cddddd); cc 6909 drivers/net/wireless/broadcom/brcm80211/brcmfmac/cfg80211.c struct brcmfmac_pd_cc_entry *cc; cc 6927 drivers/net/wireless/broadcom/brcm80211/brcmfmac/cfg80211.c cc = &country_codes->table[i]; cc 6928 drivers/net/wireless/broadcom/brcm80211/brcmfmac/cfg80211.c if ((cc->iso3166[0] == '\0') && (found_index == -1)) cc 6930 drivers/net/wireless/broadcom/brcm80211/brcmfmac/cfg80211.c if ((cc->iso3166[0] == alpha2[0]) && cc 6931 drivers/net/wireless/broadcom/brcm80211/brcmfmac/cfg80211.c (cc->iso3166[1] == alpha2[1])) { cc 6942 drivers/net/wireless/broadcom/brcm80211/brcmfmac/cfg80211.c memcpy(ccreq->ccode, country_codes->table[found_index].cc, cc 1021 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c struct brcmf_core_priv *cc; cc 1028 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c cc = list_first_entry(&chip->cores, struct brcmf_core_priv, list); cc 1029 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c base = cc->pub.base; cc 1048 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c cc->pub.rev, pub->pmurev, pub->pmucaps); cc 1132 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c struct brcmf_core_priv *cc; cc 1135 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c cc = list_first_entry(&chip->cores, struct brcmf_core_priv, list); cc 1136 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c if (WARN_ON(!cc || cc->pub.id != BCMA_CORE_CHIPCOMMON)) cc 1138 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c return &cc->pub; cc 1143 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c struct brcmf_core *cc = brcmf_chip_get_chipcommon(pub); cc 1147 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c if (cc->rev >= 35 && cc 1155 drivers/net/wireless/broadcom/brcm80211/brcmfmac/chip.c return cc; cc 444 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c ai_buscore_setup(struct si_info *sii, struct bcma_device *cc) cc 447 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c if (cc->bus->nr_cores == 0) cc 451 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c sii->pub.ccrev = cc->id.rev; cc 454 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c sii->chipst = bcma_read32(cc, CHIPCREGOFFS(chipstatus)); cc 457 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c sii->pub.cccaps = bcma_read32(cc, CHIPCREGOFFS(capabilities)); cc 461 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c sii->pub.pmucaps = bcma_read32(cc, cc 473 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c struct bcma_device *cc; cc 479 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c cc = pbus->drv_cc.core; cc 487 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c if (!ai_buscore_setup(sii, cc)) cc 491 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c bcma_write32(cc, CHIPCREGOFFS(gpiopullup), 0); cc 492 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c bcma_write32(cc, CHIPCREGOFFS(gpiopulldown), 0); cc 545 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c struct bcma_device *cc; cc 550 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c cc = sii->icbus->drv_cc.core; cc 554 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c bcma_maskset32(cc, regoff, ~mask, val); cc 557 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c w = bcma_read32(cc, regoff); cc 563 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c static uint ai_slowclk_src(struct si_pub *sih, struct bcma_device *cc) cc 573 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c struct bcma_device *cc) cc 578 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c div = bcma_read32(cc, CHIPCREGOFFS(system_clk_ctl)); cc 584 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c ai_clkctl_setdelay(struct si_pub *sih, struct bcma_device *cc) cc 597 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c slowclk = ai_slowclk_src(sih, cc); cc 603 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c ai_slowclk_freq(sih, false, cc); cc 608 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c bcma_write32(cc, CHIPCREGOFFS(pll_on_delay), pll_on_delay); cc 609 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c bcma_write32(cc, CHIPCREGOFFS(fref_sel_delay), fref_sel_delay); cc 616 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c struct bcma_device *cc; cc 621 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c cc = sii->icbus->drv_cc.core; cc 622 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c if (cc == NULL) cc 626 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c bcma_maskset32(cc, CHIPCREGOFFS(system_clk_ctl), SYCC_CD_MASK, cc 629 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c ai_clkctl_setdelay(sih, cc); cc 639 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c struct bcma_device *cc; cc 653 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c cc = sii->icbus->drv_cc.core; cc 654 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c if (cc) { cc 655 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c slowminfreq = ai_slowclk_freq(sih, false, cc); cc 656 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c fpdelay = (((bcma_read32(cc, CHIPCREGOFFS(pll_on_delay)) + 2) cc 673 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c struct bcma_device *cc; cc 677 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c cc = sii->icbus->drv_cc.core; cc 678 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c bcma_core_set_clockmode(cc, mode); cc 686 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c struct bcma_device *cc; cc 688 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c cc = sii->icbus->drv_cc.core; cc 691 drivers/net/wireless/broadcom/brcm80211/brcmsmac/aiutils.c bcma_set32(cc, CHIPCREGOFFS(gpiocontrol), GPIO_CTRL_EPA_EN_MASK); cc 3536 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c struct lcnphy_unsign16_struct cc; cc 3537 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c cc.re = 0; cc 3538 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c cc.im = 0; cc 3542 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c cc.re = a; cc 3543 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c cc.im = b; cc 3549 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c cc.re = (u16) di0; cc 3550 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c cc.im = (u16) dq0; cc 3554 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c cc.re = (u16) ei; cc 3555 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c cc.im = (u16) eq; cc 3559 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c cc.re = (u16) fi; cc 3560 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c cc.im = (u16) fq; cc 3563 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c return cc; cc 989 drivers/nvme/target/core.c static inline bool nvmet_cc_en(u32 cc) cc 991 drivers/nvme/target/core.c return (cc >> NVME_CC_EN_SHIFT) & 0x1; cc 994 drivers/nvme/target/core.c static inline u8 nvmet_cc_css(u32 cc) cc 996 drivers/nvme/target/core.c return (cc >> NVME_CC_CSS_SHIFT) & 0x7; cc 999 drivers/nvme/target/core.c static inline u8 nvmet_cc_mps(u32 cc) cc 1001 drivers/nvme/target/core.c return (cc >> NVME_CC_MPS_SHIFT) & 0xf; cc 1004 drivers/nvme/target/core.c static inline u8 nvmet_cc_ams(u32 cc) cc 1006 drivers/nvme/target/core.c return (cc >> NVME_CC_AMS_SHIFT) & 0x7; cc 1009 drivers/nvme/target/core.c static inline u8 nvmet_cc_shn(u32 cc) cc 1011 drivers/nvme/target/core.c return (cc >> NVME_CC_SHN_SHIFT) & 0x3; cc 1014 drivers/nvme/target/core.c static inline u8 nvmet_cc_iosqes(u32 cc) cc 1016 drivers/nvme/target/core.c return (cc >> NVME_CC_IOSQES_SHIFT) & 0xf; cc 1019 drivers/nvme/target/core.c static inline u8 nvmet_cc_iocqes(u32 cc) cc 1021 drivers/nvme/target/core.c return (cc >> NVME_CC_IOCQES_SHIFT) & 0xf; cc 1028 drivers/nvme/target/core.c if (nvmet_cc_iosqes(ctrl->cc) != NVME_NVM_IOSQES || cc 1029 drivers/nvme/target/core.c nvmet_cc_iocqes(ctrl->cc) != NVME_NVM_IOCQES || cc 1030 drivers/nvme/target/core.c nvmet_cc_mps(ctrl->cc) != 0 || cc 1031 drivers/nvme/target/core.c nvmet_cc_ams(ctrl->cc) != 0 || cc 1032 drivers/nvme/target/core.c nvmet_cc_css(ctrl->cc) != 0) { cc 1054 drivers/nvme/target/core.c ctrl->cc = 0; cc 1062 drivers/nvme/target/core.c old = ctrl->cc; cc 1063 drivers/nvme/target/core.c ctrl->cc = new; cc 1131 drivers/nvme/target/core.c if (unlikely(!(req->sq->ctrl->cc & NVME_CC_ENABLE))) { cc 56 drivers/nvme/target/fabrics-cmd.c val = ctrl->cc; cc 168 drivers/nvme/target/nvmet.h u32 cc; cc 137 drivers/power/supply/axp288_charger.c int cc; cc 143 drivers/power/supply/axp288_charger.c static inline int axp288_charger_set_cc(struct axp288_chrg_info *info, int cc) cc 148 drivers/power/supply/axp288_charger.c if (cc < CHRG_CCCV_CC_OFFSET) cc 149 drivers/power/supply/axp288_charger.c cc = CHRG_CCCV_CC_OFFSET; cc 150 drivers/power/supply/axp288_charger.c else if (cc > info->max_cc) cc 151 drivers/power/supply/axp288_charger.c cc = info->max_cc; cc 153 drivers/power/supply/axp288_charger.c reg_val = (cc - CHRG_CCCV_CC_OFFSET) / CHRG_CCCV_CC_LSB_RES; cc 154 drivers/power/supply/axp288_charger.c cc = (reg_val * CHRG_CCCV_CC_LSB_RES) + CHRG_CCCV_CC_OFFSET; cc 161 drivers/power/supply/axp288_charger.c info->cc = cc; cc 426 drivers/power/supply/axp288_charger.c val->intval = info->cc * 1000; cc 688 drivers/power/supply/axp288_charger.c int ret, cc, cv; cc 769 drivers/power/supply/axp288_charger.c cc = (val & CHRG_CCCV_CC_MASK) >> CHRG_CCCV_CC_BIT_POS; cc 770 drivers/power/supply/axp288_charger.c cc = (cc * CHRG_CCCV_CC_LSB_RES) + CHRG_CCCV_CC_OFFSET; cc 771 drivers/power/supply/axp288_charger.c info->cc = cc; cc 778 drivers/power/supply/axp288_charger.c info->max_cc = info->cc; cc 102 drivers/power/supply/cpcap-battery.c struct cpcap_coulomb_counter_data cc; cc 399 drivers/power/supply/cpcap-battery.c state.counter_uah = cpcap_battery_read_accumulated(ddata, &state.cc); cc 479 drivers/power/supply/cpcap-battery.c sample = latest->cc.sample - previous->cc.sample; cc 484 drivers/power/supply/cpcap-battery.c accumulator = latest->cc.accumulator - previous->cc.accumulator; cc 487 drivers/power/supply/cpcap-battery.c latest->cc.offset); cc 500 drivers/power/supply/cpcap-battery.c sample = latest->cc.sample - previous->cc.sample; cc 507 drivers/power/supply/cpcap-battery.c accumulator = latest->cc.accumulator - previous->cc.accumulator; cc 509 drivers/power/supply/cpcap-battery.c latest->cc.offset); cc 607 drivers/ps3/ps3av_cmd.c info->pb1.cc = mode->audio_num_of_ch + 1; /* CH2:0x01 --- CH8:0x07 */ cc 6133 drivers/s390/block/dasd_eckd.c psf_cuir->cc = response; cc 489 drivers/s390/block/dasd_eckd.h __u8 cc; cc 93 drivers/s390/block/xpram.c int cc = 2; /* return unused cc 2 if pgin traps */ cc 101 drivers/s390/block/xpram.c : "+d" (cc) : "a" (__pa(page_addr)), "d" (xpage_index) : "cc"); cc 102 drivers/s390/block/xpram.c if (cc == 3) cc 104 drivers/s390/block/xpram.c if (cc == 2) cc 106 drivers/s390/block/xpram.c if (cc == 1) cc 123 drivers/s390/block/xpram.c int cc = 2; /* return unused cc 2 if pgin traps */ cc 131 drivers/s390/block/xpram.c : "+d" (cc) : "a" (__pa(page_addr)), "d" (xpage_index) : "cc"); cc 132 drivers/s390/block/xpram.c if (cc == 3) cc 134 drivers/s390/block/xpram.c if (cc == 2) cc 136 drivers/s390/block/xpram.c if (cc == 1) cc 340 drivers/s390/char/sclp.h int cc = 4; /* Initialize for program check handling */ cc 349 drivers/s390/char/sclp.h : "+&d" (cc) : "d" (command), "a" ((unsigned long)sccb) cc 351 drivers/s390/char/sclp.h if (cc == 4) cc 353 drivers/s390/char/sclp.h if (cc == 3) cc 355 drivers/s390/char/sclp.h if (cc == 2) cc 1139 drivers/s390/char/tape_core.c if (irb->scsw.cmd.cc != 0 && cc 1143 drivers/s390/char/tape_core.c device->cdev_id, irb->scsw.cmd.cc, irb->scsw.cmd.fctl); cc 371 drivers/s390/char/vmur.c int cc; cc 376 drivers/s390/char/vmur.c cc = diag210(&ur_diag210); cc 377 drivers/s390/char/vmur.c switch (cc) { cc 470 drivers/s390/char/vmur.c int cc; cc 472 drivers/s390/char/vmur.c cc = diag14(record, devno, 0x28); cc 473 drivers/s390/char/vmur.c switch (cc) { cc 495 drivers/s390/char/vmur.c int cc; cc 497 drivers/s390/char/vmur.c cc = diag14((unsigned long) buf, devno, 0x00); cc 498 drivers/s390/char/vmur.c switch (cc) { cc 588 drivers/s390/char/vmur.c int cc; cc 590 drivers/s390/char/vmur.c cc = diag14((unsigned long) buf, spid, 0xfff); cc 591 drivers/s390/char/vmur.c switch (cc) { cc 183 drivers/s390/cio/ccwreq.c if (scsw->cc == 3 || scsw->pno) cc 224 drivers/s390/cio/ccwreq.c if (scsw->cc == 1 && (scsw->stctl & SCSW_STCTL_ALERT_STATUS)) cc 290 drivers/s390/cio/chsc.c u8 cc; /* content code */ cc 303 drivers/s390/cio/chsc.c u8 cc; /* content code */ cc 571 drivers/s390/cio/chsc.c switch (sei_area->cc) { cc 580 drivers/s390/cio/chsc.c sei_area->cc); cc 588 drivers/s390/cio/chsc.c switch (sei_area->cc) { cc 612 drivers/s390/cio/chsc.c sei_area->cc); cc 125 drivers/s390/cio/chsc_sch.c int cc; cc 132 drivers/s390/cio/chsc_sch.c cc = stsch(sch->schid, &schib); cc 133 drivers/s390/cio/chsc_sch.c if (!cc && scsw_stctl(&schib.scsw)) cc 240 drivers/s390/cio/chsc_sch.c int cc; cc 258 drivers/s390/cio/chsc_sch.c cc = chsc(chsc_area); cc 259 drivers/s390/cio/chsc_sch.c snprintf(dbf, sizeof(dbf), "cc:%d", cc); cc 261 drivers/s390/cio/chsc_sch.c switch (cc) { cc 278 drivers/s390/cio/chsc_sch.c sch->schid.ssid, sch->schid.sch_no, cc); cc 716 drivers/s390/cio/cio.c int cc; cc 725 drivers/s390/cio/cio.c cc = ssch(sch->schid, orb); cc 726 drivers/s390/cio/cio.c switch (cc) { cc 747 drivers/s390/cio/cio.c int cc; cc 751 drivers/s390/cio/cio.c cc = xsch(sch->schid); cc 752 drivers/s390/cio/cio.c switch (cc) { cc 43 drivers/s390/cio/device_fsm.c int cc; cc 48 drivers/s390/cio/device_fsm.c cc = stsch(sch->schid, &schib); cc 85 drivers/s390/cio/device_fsm.c printk(KERN_WARNING "cio: store subchannel returned: cc=%d\n", cc); cc 470 drivers/s390/cio/device_fsm.c scsw->cc = 1; cc 477 drivers/s390/cio/device_fsm.c scsw->cc = 1; cc 239 drivers/s390/cio/device_status.c cdev_irb->scsw.cmd.cc |= irb->scsw.cmd.cc; cc 62 drivers/s390/cio/eadm_sch.c int cc; cc 72 drivers/s390/cio/eadm_sch.c cc = ssch(sch->schid, orb); cc 73 drivers/s390/cio/eadm_sch.c switch (cc) { cc 88 drivers/s390/cio/eadm_sch.c int cc; cc 90 drivers/s390/cio/eadm_sch.c cc = csch(sch->schid); cc 91 drivers/s390/cio/eadm_sch.c if (cc) cc 169 drivers/s390/cio/ioasm.c int cc = -EIO; cc 177 drivers/s390/cio/ioasm.c : "+d" (cc), "=m" (*(addr_type *) chsc_area) cc 180 drivers/s390/cio/ioasm.c trace_s390_cio_chsc(chsc_area, cc); cc 182 drivers/s390/cio/ioasm.c return cc; cc 41 drivers/s390/cio/qdio_main.c int cc; cc 47 drivers/s390/cio/qdio_main.c : "=d" (cc) cc 49 drivers/s390/cio/qdio_main.c return cc; cc 58 drivers/s390/cio/qdio_main.c int cc; cc 64 drivers/s390/cio/qdio_main.c : "=d" (cc) cc 66 drivers/s390/cio/qdio_main.c return cc; cc 88 drivers/s390/cio/qdio_main.c int cc; cc 94 drivers/s390/cio/qdio_main.c : "=d" (cc), "+d" (__fc), "+d" (__aob) cc 98 drivers/s390/cio/qdio_main.c return cc; cc 289 drivers/s390/cio/qdio_main.c int cc; cc 299 drivers/s390/cio/qdio_main.c cc = do_siga_sync(schid, output, input, fc); cc 300 drivers/s390/cio/qdio_main.c if (unlikely(cc)) cc 301 drivers/s390/cio/qdio_main.c DBF_ERROR("%4x SIGA-S:%2d", SCH_NO(q), cc); cc 302 drivers/s390/cio/qdio_main.c return (cc) ? -EIO : 0; cc 319 drivers/s390/cio/qdio_main.c int retries = 0, cc; cc 332 drivers/s390/cio/qdio_main.c cc = do_siga_output(schid, q->mask, busy_bit, fc, laob); cc 350 drivers/s390/cio/qdio_main.c return cc; cc 357 drivers/s390/cio/qdio_main.c int cc; cc 367 drivers/s390/cio/qdio_main.c cc = do_siga_input(schid, q->mask, fc); cc 368 drivers/s390/cio/qdio_main.c if (unlikely(cc)) cc 369 drivers/s390/cio/qdio_main.c DBF_ERROR("%4x SIGA-R:%2d", SCH_NO(q), cc); cc 370 drivers/s390/cio/qdio_main.c return (cc) ? -EIO : 0; cc 786 drivers/s390/cio/qdio_main.c int retries = 0, cc; cc 796 drivers/s390/cio/qdio_main.c cc = qdio_siga_output(q, &busy_bit, aob); cc 797 drivers/s390/cio/qdio_main.c switch (cc) { cc 807 drivers/s390/cio/qdio_main.c cc = -EBUSY; cc 810 drivers/s390/cio/qdio_main.c cc = -ENOBUFS; cc 815 drivers/s390/cio/qdio_main.c DBF_ERROR("%4x SIGA-W:%1d", SCH_NO(q), cc); cc 816 drivers/s390/cio/qdio_main.c cc = -EIO; cc 823 drivers/s390/cio/qdio_main.c return cc; cc 25 drivers/s390/cio/trace.h TP_PROTO(struct subchannel_id schid, struct schib *schib, int cc), cc 26 drivers/s390/cio/trace.h TP_ARGS(schid, schib, cc), cc 44 drivers/s390/cio/trace.h __field(int, cc) cc 63 drivers/s390/cio/trace.h __entry->cc = cc; cc 68 drivers/s390/cio/trace.h __entry->cssid, __entry->ssid, __entry->schno, __entry->cc, cc 85 drivers/s390/cio/trace.h TP_PROTO(struct subchannel_id schid, struct schib *schib, int cc), cc 86 drivers/s390/cio/trace.h TP_ARGS(schid, schib, cc) cc 96 drivers/s390/cio/trace.h TP_PROTO(struct subchannel_id schid, struct schib *schib, int cc), cc 97 drivers/s390/cio/trace.h TP_ARGS(schid, schib, cc) cc 107 drivers/s390/cio/trace.h TP_PROTO(struct subchannel_id schid, struct irb *irb, int cc), cc 108 drivers/s390/cio/trace.h TP_ARGS(schid, irb, cc), cc 121 drivers/s390/cio/trace.h __field(int, cc) cc 135 drivers/s390/cio/trace.h __entry->cc = cc; cc 139 drivers/s390/cio/trace.h __entry->cssid, __entry->ssid, __entry->schno, __entry->cc, cc 153 drivers/s390/cio/trace.h TP_PROTO(struct tpi_info *addr, int cc), cc 154 drivers/s390/cio/trace.h TP_ARGS(addr, cc), cc 156 drivers/s390/cio/trace.h __field(int, cc) cc 166 drivers/s390/cio/trace.h __entry->cc = cc; cc 167 drivers/s390/cio/trace.h if (cc != 0) cc 183 drivers/s390/cio/trace.h __entry->cssid, __entry->ssid, __entry->schno, __entry->cc, cc 196 drivers/s390/cio/trace.h TP_PROTO(struct subchannel_id schid, union orb *orb, int cc), cc 197 drivers/s390/cio/trace.h TP_ARGS(schid, orb, cc), cc 203 drivers/s390/cio/trace.h __field(int, cc) cc 210 drivers/s390/cio/trace.h __entry->cc = cc; cc 213 drivers/s390/cio/trace.h __entry->schno, __entry->cc cc 218 drivers/s390/cio/trace.h TP_PROTO(struct subchannel_id schid, int cc), cc 219 drivers/s390/cio/trace.h TP_ARGS(schid, cc), cc 224 drivers/s390/cio/trace.h __field(int, cc) cc 230 drivers/s390/cio/trace.h __entry->cc = cc; cc 233 drivers/s390/cio/trace.h __entry->schno, __entry->cc cc 243 drivers/s390/cio/trace.h TP_PROTO(struct subchannel_id schid, int cc), cc 244 drivers/s390/cio/trace.h TP_ARGS(schid, cc) cc 253 drivers/s390/cio/trace.h TP_PROTO(struct subchannel_id schid, int cc), cc 254 drivers/s390/cio/trace.h TP_ARGS(schid, cc) cc 263 drivers/s390/cio/trace.h TP_PROTO(struct subchannel_id schid, int cc), cc 264 drivers/s390/cio/trace.h TP_ARGS(schid, cc) cc 273 drivers/s390/cio/trace.h TP_PROTO(struct subchannel_id schid, int cc), cc 274 drivers/s390/cio/trace.h TP_ARGS(schid, cc) cc 286 drivers/s390/cio/trace.h TP_PROTO(struct chsc_header *chsc, int cc), cc 287 drivers/s390/cio/trace.h TP_ARGS(chsc, cc), cc 289 drivers/s390/cio/trace.h __field(int, cc) cc 296 drivers/s390/cio/trace.h __entry->cc = cc; cc 306 drivers/s390/cio/trace.h __entry->cc, __entry->rcode) cc 362 drivers/s390/cio/trace.h TP_PROTO(struct crw *crw, int cc), cc 363 drivers/s390/cio/trace.h TP_ARGS(crw, cc), cc 366 drivers/s390/cio/trace.h __field(int, cc) cc 377 drivers/s390/cio/trace.h __entry->cc = cc; cc 388 drivers/s390/cio/trace.h __entry->cc, __entry->slct, __entry->oflw, cc 1429 drivers/s390/net/lcs.c if (irb->scsw.cmd.cc != 0) { cc 382 drivers/s390/scsi/zfcp_qdio.c int cc; cc 416 drivers/s390/scsi/zfcp_qdio.c for (cc = 0; cc < QDIO_MAX_BUFFERS_PER_Q; cc++) { cc 417 drivers/s390/scsi/zfcp_qdio.c sbale = &(qdio->res_q[cc]->element[0]); cc 426 drivers/s390/scsi/zfcp_sysfs.c unsigned long long fsum, fmin, fmax, csum, cmin, cmax, cc; \ cc 435 drivers/s390/scsi/zfcp_sysfs.c cc = lat->_name.counter; \ cc 446 drivers/s390/scsi/zfcp_sysfs.c fmin, fmax, fsum, cmin, cmax, csum, cc); \ cc 1265 drivers/scsi/myrs.c mbox->cc.ldev.ldev_num = ldev_num; cc 1266 drivers/scsi/myrs.c mbox->cc.ioctl_opcode = MYRS_IOCTL_CC_START; cc 1267 drivers/scsi/myrs.c mbox->cc.restore_consistency = true; cc 1268 drivers/scsi/myrs.c mbox->cc.initialized_area_only = false; cc 1270 drivers/scsi/myrs.c mbox->cc.ldev.ldev_num = ldev_num; cc 1271 drivers/scsi/myrs.c mbox->cc.ioctl_opcode = MYRS_IOCTL_CC_STOP; cc 821 drivers/scsi/myrs.h } cc; cc 32 drivers/ssb/driver_chipcommon.c static inline u32 chipco_write32_masked(struct ssb_chipcommon *cc, u16 offset, cc 36 drivers/ssb/driver_chipcommon.c value |= chipco_read32(cc, offset) & ~mask; cc 37 drivers/ssb/driver_chipcommon.c chipco_write32(cc, offset, value); cc 42 drivers/ssb/driver_chipcommon.c void ssb_chipco_set_clockmode(struct ssb_chipcommon *cc, cc 45 drivers/ssb/driver_chipcommon.c struct ssb_device *ccdev = cc->dev; cc 57 drivers/ssb/driver_chipcommon.c if (cc->capabilities & SSB_CHIPCO_CAP_PMU) cc 69 drivers/ssb/driver_chipcommon.c if (!(cc->capabilities & SSB_CHIPCO_CAP_PCTL)) cc 74 drivers/ssb/driver_chipcommon.c tmp = chipco_read32(cc, SSB_CHIPCO_SLOWCLKCTL); cc 76 drivers/ssb/driver_chipcommon.c chipco_write32(cc, SSB_CHIPCO_SLOWCLKCTL, tmp); cc 81 drivers/ssb/driver_chipcommon.c tmp = chipco_read32(cc, SSB_CHIPCO_SLOWCLKCTL); cc 84 drivers/ssb/driver_chipcommon.c chipco_write32(cc, SSB_CHIPCO_SLOWCLKCTL, tmp); cc 86 drivers/ssb/driver_chipcommon.c chipco_write32(cc, SSB_CHIPCO_SYSCLKCTL, cc 87 drivers/ssb/driver_chipcommon.c (chipco_read32(cc, SSB_CHIPCO_SYSCLKCTL) | cc 94 drivers/ssb/driver_chipcommon.c tmp = chipco_read32(cc, SSB_CHIPCO_SLOWCLKCTL); cc 101 drivers/ssb/driver_chipcommon.c chipco_write32(cc, SSB_CHIPCO_SLOWCLKCTL, tmp); cc 108 drivers/ssb/driver_chipcommon.c chipco_write32(cc, SSB_CHIPCO_SYSCLKCTL, cc 109 drivers/ssb/driver_chipcommon.c (chipco_read32(cc, SSB_CHIPCO_SYSCLKCTL) & cc 119 drivers/ssb/driver_chipcommon.c static enum ssb_clksrc chipco_pctl_get_slowclksrc(struct ssb_chipcommon *cc) cc 121 drivers/ssb/driver_chipcommon.c struct ssb_bus *bus = cc->dev->bus; cc 124 drivers/ssb/driver_chipcommon.c if (cc->dev->id.revision < 6) { cc 135 drivers/ssb/driver_chipcommon.c if (cc->dev->id.revision < 10) { cc 136 drivers/ssb/driver_chipcommon.c tmp = chipco_read32(cc, SSB_CHIPCO_SLOWCLKCTL); cc 150 drivers/ssb/driver_chipcommon.c static int chipco_pctl_clockfreqlimit(struct ssb_chipcommon *cc, int get_max) cc 157 drivers/ssb/driver_chipcommon.c clocksrc = chipco_pctl_get_slowclksrc(cc); cc 158 drivers/ssb/driver_chipcommon.c if (cc->dev->id.revision < 6) { cc 169 drivers/ssb/driver_chipcommon.c } else if (cc->dev->id.revision < 10) { cc 175 drivers/ssb/driver_chipcommon.c tmp = chipco_read32(cc, SSB_CHIPCO_SLOWCLKCTL); cc 181 drivers/ssb/driver_chipcommon.c tmp = chipco_read32(cc, SSB_CHIPCO_SYSCLKCTL); cc 211 drivers/ssb/driver_chipcommon.c static void chipco_powercontrol_init(struct ssb_chipcommon *cc) cc 213 drivers/ssb/driver_chipcommon.c struct ssb_bus *bus = cc->dev->bus; cc 217 drivers/ssb/driver_chipcommon.c chipco_write32(cc, SSB_CHIPCO_CHIPCTL, 0x3A4); cc 219 drivers/ssb/driver_chipcommon.c chipco_write32(cc, SSB_CHIPCO_CHIPCTL, 0xA4); cc 222 drivers/ssb/driver_chipcommon.c if (!(cc->capabilities & SSB_CHIPCO_CAP_PCTL)) cc 225 drivers/ssb/driver_chipcommon.c if (cc->dev->id.revision >= 10) { cc 227 drivers/ssb/driver_chipcommon.c chipco_write32(cc, SSB_CHIPCO_SYSCLKCTL, cc 228 drivers/ssb/driver_chipcommon.c (chipco_read32(cc, SSB_CHIPCO_SYSCLKCTL) & cc 233 drivers/ssb/driver_chipcommon.c maxfreq = chipco_pctl_clockfreqlimit(cc, 1); cc 234 drivers/ssb/driver_chipcommon.c chipco_write32(cc, SSB_CHIPCO_PLLONDELAY, cc 236 drivers/ssb/driver_chipcommon.c chipco_write32(cc, SSB_CHIPCO_FREFSELDELAY, cc 242 drivers/ssb/driver_chipcommon.c static u16 pmu_fast_powerup_delay(struct ssb_chipcommon *cc) cc 244 drivers/ssb/driver_chipcommon.c struct ssb_bus *bus = cc->dev->bus; cc 259 drivers/ssb/driver_chipcommon.c static void calc_fast_powerup_delay(struct ssb_chipcommon *cc) cc 261 drivers/ssb/driver_chipcommon.c struct ssb_bus *bus = cc->dev->bus; cc 269 drivers/ssb/driver_chipcommon.c if (cc->capabilities & SSB_CHIPCO_CAP_PMU) { cc 270 drivers/ssb/driver_chipcommon.c cc->fast_pwrup_delay = pmu_fast_powerup_delay(cc); cc 274 drivers/ssb/driver_chipcommon.c if (!(cc->capabilities & SSB_CHIPCO_CAP_PCTL)) cc 277 drivers/ssb/driver_chipcommon.c minfreq = chipco_pctl_clockfreqlimit(cc, 0); cc 278 drivers/ssb/driver_chipcommon.c pll_on_delay = chipco_read32(cc, SSB_CHIPCO_PLLONDELAY); cc 282 drivers/ssb/driver_chipcommon.c cc->fast_pwrup_delay = tmp; cc 285 drivers/ssb/driver_chipcommon.c static u32 ssb_chipco_alp_clock(struct ssb_chipcommon *cc) cc 287 drivers/ssb/driver_chipcommon.c if (cc->capabilities & SSB_CHIPCO_CAP_PMU) cc 288 drivers/ssb/driver_chipcommon.c return ssb_pmu_get_alp_clock(cc); cc 293 drivers/ssb/driver_chipcommon.c static u32 ssb_chipco_watchdog_get_max_timer(struct ssb_chipcommon *cc) cc 297 drivers/ssb/driver_chipcommon.c if (cc->capabilities & SSB_CHIPCO_CAP_PMU) { cc 298 drivers/ssb/driver_chipcommon.c if (cc->dev->id.revision < 26) cc 301 drivers/ssb/driver_chipcommon.c nb = (cc->dev->id.revision >= 37) ? 32 : 24; cc 313 drivers/ssb/driver_chipcommon.c struct ssb_chipcommon *cc = bcm47xx_wdt_get_drvdata(wdt); cc 315 drivers/ssb/driver_chipcommon.c if (cc->dev->bus->bustype != SSB_BUSTYPE_SSB) cc 318 drivers/ssb/driver_chipcommon.c return ssb_chipco_watchdog_timer_set(cc, ticks); cc 323 drivers/ssb/driver_chipcommon.c struct ssb_chipcommon *cc = bcm47xx_wdt_get_drvdata(wdt); cc 326 drivers/ssb/driver_chipcommon.c if (cc->dev->bus->bustype != SSB_BUSTYPE_SSB) cc 329 drivers/ssb/driver_chipcommon.c ticks = ssb_chipco_watchdog_timer_set(cc, cc->ticks_per_ms * ms); cc 330 drivers/ssb/driver_chipcommon.c return ticks / cc->ticks_per_ms; cc 333 drivers/ssb/driver_chipcommon.c static int ssb_chipco_watchdog_ticks_per_ms(struct ssb_chipcommon *cc) cc 335 drivers/ssb/driver_chipcommon.c struct ssb_bus *bus = cc->dev->bus; cc 337 drivers/ssb/driver_chipcommon.c if (cc->capabilities & SSB_CHIPCO_CAP_PMU) { cc 341 drivers/ssb/driver_chipcommon.c if (cc->dev->id.revision < 18) cc 344 drivers/ssb/driver_chipcommon.c return ssb_chipco_alp_clock(cc) / 1000; cc 348 drivers/ssb/driver_chipcommon.c void ssb_chipcommon_init(struct ssb_chipcommon *cc) cc 350 drivers/ssb/driver_chipcommon.c if (!cc->dev) cc 353 drivers/ssb/driver_chipcommon.c spin_lock_init(&cc->gpio_lock); cc 355 drivers/ssb/driver_chipcommon.c if (cc->dev->id.revision >= 11) cc 356 drivers/ssb/driver_chipcommon.c cc->status = chipco_read32(cc, SSB_CHIPCO_CHIPSTAT); cc 357 drivers/ssb/driver_chipcommon.c dev_dbg(cc->dev->dev, "chipcommon status is 0x%x\n", cc->status); cc 359 drivers/ssb/driver_chipcommon.c if (cc->dev->id.revision >= 20) { cc 360 drivers/ssb/driver_chipcommon.c chipco_write32(cc, SSB_CHIPCO_GPIOPULLUP, 0); cc 361 drivers/ssb/driver_chipcommon.c chipco_write32(cc, SSB_CHIPCO_GPIOPULLDOWN, 0); cc 364 drivers/ssb/driver_chipcommon.c ssb_pmu_init(cc); cc 365 drivers/ssb/driver_chipcommon.c chipco_powercontrol_init(cc); cc 366 drivers/ssb/driver_chipcommon.c ssb_chipco_set_clockmode(cc, SSB_CLKMODE_FAST); cc 367 drivers/ssb/driver_chipcommon.c calc_fast_powerup_delay(cc); cc 369 drivers/ssb/driver_chipcommon.c if (cc->dev->bus->bustype == SSB_BUSTYPE_SSB) { cc 370 drivers/ssb/driver_chipcommon.c cc->ticks_per_ms = ssb_chipco_watchdog_ticks_per_ms(cc); cc 371 drivers/ssb/driver_chipcommon.c cc->max_timer_ms = ssb_chipco_watchdog_get_max_timer(cc) / cc->ticks_per_ms; cc 375 drivers/ssb/driver_chipcommon.c void ssb_chipco_suspend(struct ssb_chipcommon *cc) cc 377 drivers/ssb/driver_chipcommon.c if (!cc->dev) cc 379 drivers/ssb/driver_chipcommon.c ssb_chipco_set_clockmode(cc, SSB_CLKMODE_SLOW); cc 382 drivers/ssb/driver_chipcommon.c void ssb_chipco_resume(struct ssb_chipcommon *cc) cc 384 drivers/ssb/driver_chipcommon.c if (!cc->dev) cc 386 drivers/ssb/driver_chipcommon.c chipco_powercontrol_init(cc); cc 387 drivers/ssb/driver_chipcommon.c ssb_chipco_set_clockmode(cc, SSB_CLKMODE_FAST); cc 391 drivers/ssb/driver_chipcommon.c void ssb_chipco_get_clockcpu(struct ssb_chipcommon *cc, cc 394 drivers/ssb/driver_chipcommon.c *n = chipco_read32(cc, SSB_CHIPCO_CLOCK_N); cc 395 drivers/ssb/driver_chipcommon.c *plltype = (cc->capabilities & SSB_CHIPCO_CAP_PLLT); cc 401 drivers/ssb/driver_chipcommon.c *m = chipco_read32(cc, SSB_CHIPCO_CLOCK_MIPS); cc 405 drivers/ssb/driver_chipcommon.c *m = chipco_read32(cc, SSB_CHIPCO_CLOCK_M2); cc 408 drivers/ssb/driver_chipcommon.c *m = chipco_read32(cc, SSB_CHIPCO_CLOCK_SB); cc 414 drivers/ssb/driver_chipcommon.c void ssb_chipco_get_clockcontrol(struct ssb_chipcommon *cc, cc 417 drivers/ssb/driver_chipcommon.c *n = chipco_read32(cc, SSB_CHIPCO_CLOCK_N); cc 418 drivers/ssb/driver_chipcommon.c *plltype = (cc->capabilities & SSB_CHIPCO_CAP_PLLT); cc 421 drivers/ssb/driver_chipcommon.c *m = chipco_read32(cc, SSB_CHIPCO_CLOCK_MIPS); cc 424 drivers/ssb/driver_chipcommon.c if (cc->dev->bus->chip_id != 0x5365) { cc 425 drivers/ssb/driver_chipcommon.c *m = chipco_read32(cc, SSB_CHIPCO_CLOCK_M2); cc 430 drivers/ssb/driver_chipcommon.c *m = chipco_read32(cc, SSB_CHIPCO_CLOCK_SB); cc 434 drivers/ssb/driver_chipcommon.c void ssb_chipco_timing_init(struct ssb_chipcommon *cc, cc 437 drivers/ssb/driver_chipcommon.c struct ssb_device *dev = cc->dev; cc 442 drivers/ssb/driver_chipcommon.c chipco_write32(cc, SSB_CHIPCO_PROG_CFG, 0x11); cc 446 drivers/ssb/driver_chipcommon.c chipco_write32(cc, SSB_CHIPCO_PROG_WAITCNT, tmp); /* 0x01020a0c for a 100Mhz clock */ cc 454 drivers/ssb/driver_chipcommon.c chipco_write32(cc, SSB_CHIPCO_FLASH_WAITCNT, tmp); cc 458 drivers/ssb/driver_chipcommon.c chipco_write32(cc, SSB_CHIPCO_PCMCIA_MEMWAIT, tmp); cc 466 drivers/ssb/driver_chipcommon.c chipco_write32(cc, SSB_CHIPCO_PROG_WAITCNT, tmp); /* 0x01020a0c for a 100Mhz clock */ cc 471 drivers/ssb/driver_chipcommon.c u32 ssb_chipco_watchdog_timer_set(struct ssb_chipcommon *cc, u32 ticks) cc 476 drivers/ssb/driver_chipcommon.c maxt = ssb_chipco_watchdog_get_max_timer(cc); cc 477 drivers/ssb/driver_chipcommon.c if (cc->capabilities & SSB_CHIPCO_CAP_PMU) { cc 482 drivers/ssb/driver_chipcommon.c chipco_write32(cc, SSB_CHIPCO_PMU_WATCHDOG, ticks); cc 485 drivers/ssb/driver_chipcommon.c ssb_chipco_set_clockmode(cc, clkmode); cc 489 drivers/ssb/driver_chipcommon.c chipco_write32(cc, SSB_CHIPCO_WATCHDOG, ticks); cc 494 drivers/ssb/driver_chipcommon.c void ssb_chipco_irq_mask(struct ssb_chipcommon *cc, u32 mask, u32 value) cc 496 drivers/ssb/driver_chipcommon.c chipco_write32_masked(cc, SSB_CHIPCO_IRQMASK, mask, value); cc 499 drivers/ssb/driver_chipcommon.c u32 ssb_chipco_irq_status(struct ssb_chipcommon *cc, u32 mask) cc 501 drivers/ssb/driver_chipcommon.c return chipco_read32(cc, SSB_CHIPCO_IRQSTAT) & mask; cc 504 drivers/ssb/driver_chipcommon.c u32 ssb_chipco_gpio_in(struct ssb_chipcommon *cc, u32 mask) cc 506 drivers/ssb/driver_chipcommon.c return chipco_read32(cc, SSB_CHIPCO_GPIOIN) & mask; cc 509 drivers/ssb/driver_chipcommon.c u32 ssb_chipco_gpio_out(struct ssb_chipcommon *cc, u32 mask, u32 value) cc 514 drivers/ssb/driver_chipcommon.c spin_lock_irqsave(&cc->gpio_lock, flags); cc 515 drivers/ssb/driver_chipcommon.c res = chipco_write32_masked(cc, SSB_CHIPCO_GPIOOUT, mask, value); cc 516 drivers/ssb/driver_chipcommon.c spin_unlock_irqrestore(&cc->gpio_lock, flags); cc 521 drivers/ssb/driver_chipcommon.c u32 ssb_chipco_gpio_outen(struct ssb_chipcommon *cc, u32 mask, u32 value) cc 526 drivers/ssb/driver_chipcommon.c spin_lock_irqsave(&cc->gpio_lock, flags); cc 527 drivers/ssb/driver_chipcommon.c res = chipco_write32_masked(cc, SSB_CHIPCO_GPIOOUTEN, mask, value); cc 528 drivers/ssb/driver_chipcommon.c spin_unlock_irqrestore(&cc->gpio_lock, flags); cc 533 drivers/ssb/driver_chipcommon.c u32 ssb_chipco_gpio_control(struct ssb_chipcommon *cc, u32 mask, u32 value) cc 538 drivers/ssb/driver_chipcommon.c spin_lock_irqsave(&cc->gpio_lock, flags); cc 539 drivers/ssb/driver_chipcommon.c res = chipco_write32_masked(cc, SSB_CHIPCO_GPIOCTL, mask, value); cc 540 drivers/ssb/driver_chipcommon.c spin_unlock_irqrestore(&cc->gpio_lock, flags); cc 546 drivers/ssb/driver_chipcommon.c u32 ssb_chipco_gpio_intmask(struct ssb_chipcommon *cc, u32 mask, u32 value) cc 551 drivers/ssb/driver_chipcommon.c spin_lock_irqsave(&cc->gpio_lock, flags); cc 552 drivers/ssb/driver_chipcommon.c res = chipco_write32_masked(cc, SSB_CHIPCO_GPIOIRQ, mask, value); cc 553 drivers/ssb/driver_chipcommon.c spin_unlock_irqrestore(&cc->gpio_lock, flags); cc 558 drivers/ssb/driver_chipcommon.c u32 ssb_chipco_gpio_polarity(struct ssb_chipcommon *cc, u32 mask, u32 value) cc 563 drivers/ssb/driver_chipcommon.c spin_lock_irqsave(&cc->gpio_lock, flags); cc 564 drivers/ssb/driver_chipcommon.c res = chipco_write32_masked(cc, SSB_CHIPCO_GPIOPOL, mask, value); cc 565 drivers/ssb/driver_chipcommon.c spin_unlock_irqrestore(&cc->gpio_lock, flags); cc 570 drivers/ssb/driver_chipcommon.c u32 ssb_chipco_gpio_pullup(struct ssb_chipcommon *cc, u32 mask, u32 value) cc 575 drivers/ssb/driver_chipcommon.c if (cc->dev->id.revision < 20) cc 578 drivers/ssb/driver_chipcommon.c spin_lock_irqsave(&cc->gpio_lock, flags); cc 579 drivers/ssb/driver_chipcommon.c res = chipco_write32_masked(cc, SSB_CHIPCO_GPIOPULLUP, mask, value); cc 580 drivers/ssb/driver_chipcommon.c spin_unlock_irqrestore(&cc->gpio_lock, flags); cc 585 drivers/ssb/driver_chipcommon.c u32 ssb_chipco_gpio_pulldown(struct ssb_chipcommon *cc, u32 mask, u32 value) cc 590 drivers/ssb/driver_chipcommon.c if (cc->dev->id.revision < 20) cc 593 drivers/ssb/driver_chipcommon.c spin_lock_irqsave(&cc->gpio_lock, flags); cc 594 drivers/ssb/driver_chipcommon.c res = chipco_write32_masked(cc, SSB_CHIPCO_GPIOPULLDOWN, mask, value); cc 595 drivers/ssb/driver_chipcommon.c spin_unlock_irqrestore(&cc->gpio_lock, flags); cc 601 drivers/ssb/driver_chipcommon.c int ssb_chipco_serial_init(struct ssb_chipcommon *cc, cc 604 drivers/ssb/driver_chipcommon.c struct ssb_bus *bus = cc->dev->bus; cc 610 drivers/ssb/driver_chipcommon.c unsigned int ccrev = cc->dev->id.revision; cc 612 drivers/ssb/driver_chipcommon.c plltype = (cc->capabilities & SSB_CHIPCO_CAP_PLLT); cc 613 drivers/ssb/driver_chipcommon.c irq = ssb_mips_irq(cc->dev); cc 618 drivers/ssb/driver_chipcommon.c chipco_read32(cc, SSB_CHIPCO_CLOCK_N), cc 619 drivers/ssb/driver_chipcommon.c chipco_read32(cc, SSB_CHIPCO_CLOCK_M2)); cc 627 drivers/ssb/driver_chipcommon.c chipco_write32(cc, SSB_CHIPCO_CORECTL, cc 628 drivers/ssb/driver_chipcommon.c chipco_read32(cc, SSB_CHIPCO_CORECTL) cc 631 drivers/ssb/driver_chipcommon.c baud_base = ssb_chipco_alp_clock(cc); cc 635 drivers/ssb/driver_chipcommon.c chipco_write32(cc, SSB_CHIPCO_CORECTL, cc 636 drivers/ssb/driver_chipcommon.c chipco_read32(cc, SSB_CHIPCO_CORECTL) cc 640 drivers/ssb/driver_chipcommon.c chipco_write32(cc, SSB_CHIPCO_CORECTL, cc 641 drivers/ssb/driver_chipcommon.c chipco_read32(cc, SSB_CHIPCO_CORECTL) cc 645 drivers/ssb/driver_chipcommon.c chipco_write32(cc, SSB_CHIPCO_CORECTL, cc 646 drivers/ssb/driver_chipcommon.c chipco_read32(cc, SSB_CHIPCO_CORECTL) cc 652 drivers/ssb/driver_chipcommon.c div = chipco_read32(cc, SSB_CHIPCO_CLKDIV) cc 662 drivers/ssb/driver_chipcommon.c !(chipco_read32(cc, SSB_CHIPCO_CORECTL) & SSB_CHIPCO_CORECTL_UARTCLK0)) { cc 663 drivers/ssb/driver_chipcommon.c if ((cc->capabilities & SSB_CHIPCO_CAP_UARTCLK) == cc 675 drivers/ssb/driver_chipcommon.c n = (cc->capabilities & SSB_CHIPCO_CAP_NRUART); cc 680 drivers/ssb/driver_chipcommon.c cc_mmio = cc->dev->bus->mmio + (cc->dev->core_index * SSB_CORE_SIZE); cc 22 drivers/ssb/driver_chipcommon_pmu.c static u32 ssb_chipco_pll_read(struct ssb_chipcommon *cc, u32 offset) cc 24 drivers/ssb/driver_chipcommon_pmu.c chipco_write32(cc, SSB_CHIPCO_PLLCTL_ADDR, offset); cc 25 drivers/ssb/driver_chipcommon_pmu.c return chipco_read32(cc, SSB_CHIPCO_PLLCTL_DATA); cc 28 drivers/ssb/driver_chipcommon_pmu.c static void ssb_chipco_pll_write(struct ssb_chipcommon *cc, cc 31 drivers/ssb/driver_chipcommon_pmu.c chipco_write32(cc, SSB_CHIPCO_PLLCTL_ADDR, offset); cc 32 drivers/ssb/driver_chipcommon_pmu.c chipco_write32(cc, SSB_CHIPCO_PLLCTL_DATA, value); cc 35 drivers/ssb/driver_chipcommon_pmu.c static void ssb_chipco_regctl_maskset(struct ssb_chipcommon *cc, cc 40 drivers/ssb/driver_chipcommon_pmu.c chipco_read32(cc, SSB_CHIPCO_REGCTL_ADDR); cc 41 drivers/ssb/driver_chipcommon_pmu.c chipco_write32(cc, SSB_CHIPCO_REGCTL_ADDR, offset); cc 42 drivers/ssb/driver_chipcommon_pmu.c chipco_read32(cc, SSB_CHIPCO_REGCTL_ADDR); cc 43 drivers/ssb/driver_chipcommon_pmu.c value = chipco_read32(cc, SSB_CHIPCO_REGCTL_DATA); cc 46 drivers/ssb/driver_chipcommon_pmu.c chipco_write32(cc, SSB_CHIPCO_REGCTL_DATA, value); cc 47 drivers/ssb/driver_chipcommon_pmu.c chipco_read32(cc, SSB_CHIPCO_REGCTL_DATA); cc 90 drivers/ssb/driver_chipcommon_pmu.c static void ssb_pmu0_pllinit_r0(struct ssb_chipcommon *cc, cc 93 drivers/ssb/driver_chipcommon_pmu.c struct ssb_bus *bus = cc->dev->bus; cc 104 drivers/ssb/driver_chipcommon_pmu.c cc->pmu.crystalfreq = e->freq; cc 107 drivers/ssb/driver_chipcommon_pmu.c pmuctl = chipco_read32(cc, SSB_CHIPCO_PMU_CTL); cc 113 drivers/ssb/driver_chipcommon_pmu.c dev_info(cc->dev->dev, "Programming PLL to %u.%03u MHz\n", cc 119 drivers/ssb/driver_chipcommon_pmu.c chipco_mask32(cc, SSB_CHIPCO_PMU_MINRES_MSK, cc 121 drivers/ssb/driver_chipcommon_pmu.c chipco_mask32(cc, SSB_CHIPCO_PMU_MAXRES_MSK, cc 125 drivers/ssb/driver_chipcommon_pmu.c chipco_mask32(cc, SSB_CHIPCO_PMU_MINRES_MSK, cc 127 drivers/ssb/driver_chipcommon_pmu.c chipco_mask32(cc, SSB_CHIPCO_PMU_MAXRES_MSK, cc 134 drivers/ssb/driver_chipcommon_pmu.c tmp = chipco_read32(cc, SSB_CHIPCO_CLKCTLST); cc 139 drivers/ssb/driver_chipcommon_pmu.c tmp = chipco_read32(cc, SSB_CHIPCO_CLKCTLST); cc 141 drivers/ssb/driver_chipcommon_pmu.c dev_emerg(cc->dev->dev, "Failed to turn the PLL off!\n"); cc 144 drivers/ssb/driver_chipcommon_pmu.c pllctl = ssb_chipco_pll_read(cc, SSB_PMU0_PLLCTL0); cc 149 drivers/ssb/driver_chipcommon_pmu.c ssb_chipco_pll_write(cc, SSB_PMU0_PLLCTL0, pllctl); cc 152 drivers/ssb/driver_chipcommon_pmu.c pllctl = ssb_chipco_pll_read(cc, SSB_PMU0_PLLCTL1); cc 159 drivers/ssb/driver_chipcommon_pmu.c ssb_chipco_pll_write(cc, SSB_PMU0_PLLCTL1, pllctl); cc 162 drivers/ssb/driver_chipcommon_pmu.c pllctl = ssb_chipco_pll_read(cc, SSB_PMU0_PLLCTL2); cc 165 drivers/ssb/driver_chipcommon_pmu.c ssb_chipco_pll_write(cc, SSB_PMU0_PLLCTL2, pllctl); cc 168 drivers/ssb/driver_chipcommon_pmu.c pmuctl = chipco_read32(cc, SSB_CHIPCO_PMU_CTL); cc 174 drivers/ssb/driver_chipcommon_pmu.c chipco_write32(cc, SSB_CHIPCO_PMU_CTL, pmuctl); cc 221 drivers/ssb/driver_chipcommon_pmu.c static void ssb_pmu1_pllinit_r0(struct ssb_chipcommon *cc, cc 224 drivers/ssb/driver_chipcommon_pmu.c struct ssb_bus *bus = cc->dev->bus; cc 233 drivers/ssb/driver_chipcommon_pmu.c cc->pmu.crystalfreq = 20000; cc 243 drivers/ssb/driver_chipcommon_pmu.c cc->pmu.crystalfreq = e->freq; cc 246 drivers/ssb/driver_chipcommon_pmu.c pmuctl = chipco_read32(cc, SSB_CHIPCO_PMU_CTL); cc 252 drivers/ssb/driver_chipcommon_pmu.c dev_info(cc->dev->dev, "Programming PLL to %u.%03u MHz\n", cc 258 drivers/ssb/driver_chipcommon_pmu.c chipco_mask32(cc, SSB_CHIPCO_PMU_MINRES_MSK, cc 261 drivers/ssb/driver_chipcommon_pmu.c chipco_mask32(cc, SSB_CHIPCO_PMU_MAXRES_MSK, cc 271 drivers/ssb/driver_chipcommon_pmu.c tmp = chipco_read32(cc, SSB_CHIPCO_CLKCTLST); cc 276 drivers/ssb/driver_chipcommon_pmu.c tmp = chipco_read32(cc, SSB_CHIPCO_CLKCTLST); cc 278 drivers/ssb/driver_chipcommon_pmu.c dev_emerg(cc->dev->dev, "Failed to turn the PLL off!\n"); cc 281 drivers/ssb/driver_chipcommon_pmu.c pllctl = ssb_chipco_pll_read(cc, SSB_PMU1_PLLCTL0); cc 285 drivers/ssb/driver_chipcommon_pmu.c ssb_chipco_pll_write(cc, SSB_PMU1_PLLCTL0, pllctl); cc 288 drivers/ssb/driver_chipcommon_pmu.c pllctl = ssb_chipco_pll_read(cc, SSB_PMU1_PLLCTL2); cc 292 drivers/ssb/driver_chipcommon_pmu.c ssb_chipco_pll_write(cc, SSB_PMU1_PLLCTL2, pllctl); cc 295 drivers/ssb/driver_chipcommon_pmu.c pllctl = ssb_chipco_pll_read(cc, SSB_PMU1_PLLCTL3); cc 298 drivers/ssb/driver_chipcommon_pmu.c ssb_chipco_pll_write(cc, SSB_PMU1_PLLCTL3, pllctl); cc 302 drivers/ssb/driver_chipcommon_pmu.c pllctl = ssb_chipco_pll_read(cc, SSB_PMU1_PLLCTL5); cc 305 drivers/ssb/driver_chipcommon_pmu.c ssb_chipco_pll_write(cc, SSB_PMU1_PLLCTL5, pllctl); cc 309 drivers/ssb/driver_chipcommon_pmu.c pmuctl = chipco_read32(cc, SSB_CHIPCO_PMU_CTL); cc 314 drivers/ssb/driver_chipcommon_pmu.c chipco_write32(cc, SSB_CHIPCO_PMU_CTL, pmuctl); cc 317 drivers/ssb/driver_chipcommon_pmu.c static void ssb_pmu_pll_init(struct ssb_chipcommon *cc) cc 319 drivers/ssb/driver_chipcommon_pmu.c struct ssb_bus *bus = cc->dev->bus; cc 333 drivers/ssb/driver_chipcommon_pmu.c ssb_pmu1_pllinit_r0(cc, crystalfreq); cc 336 drivers/ssb/driver_chipcommon_pmu.c ssb_pmu0_pllinit_r0(cc, crystalfreq); cc 341 drivers/ssb/driver_chipcommon_pmu.c ssb_pmu0_pllinit_r0(cc, crystalfreq); cc 344 drivers/ssb/driver_chipcommon_pmu.c if (cc->pmu.rev == 2) { cc 345 drivers/ssb/driver_chipcommon_pmu.c chipco_write32(cc, SSB_CHIPCO_PLLCTL_ADDR, 0x0000000A); cc 346 drivers/ssb/driver_chipcommon_pmu.c chipco_write32(cc, SSB_CHIPCO_PLLCTL_DATA, 0x380005C0); cc 352 drivers/ssb/driver_chipcommon_pmu.c dev_err(cc->dev->dev, "ERROR: PLL init unknown for device %04X\n", cc 423 drivers/ssb/driver_chipcommon_pmu.c static void ssb_pmu_resources_init(struct ssb_chipcommon *cc) cc 425 drivers/ssb/driver_chipcommon_pmu.c struct ssb_bus *bus = cc->dev->bus; cc 448 drivers/ssb/driver_chipcommon_pmu.c if (chipco_read32(cc, SSB_CHIPCO_CHIPSTAT) & cc 474 drivers/ssb/driver_chipcommon_pmu.c dev_err(cc->dev->dev, "ERROR: PMU resource config unknown for device %04X\n", cc 480 drivers/ssb/driver_chipcommon_pmu.c chipco_write32(cc, SSB_CHIPCO_PMU_RES_TABSEL, cc 482 drivers/ssb/driver_chipcommon_pmu.c chipco_write32(cc, SSB_CHIPCO_PMU_RES_UPDNTM, cc 488 drivers/ssb/driver_chipcommon_pmu.c chipco_write32(cc, SSB_CHIPCO_PMU_RES_TABSEL, cc 492 drivers/ssb/driver_chipcommon_pmu.c chipco_write32(cc, SSB_CHIPCO_PMU_RES_DEPMSK, cc 496 drivers/ssb/driver_chipcommon_pmu.c chipco_set32(cc, SSB_CHIPCO_PMU_RES_DEPMSK, cc 500 drivers/ssb/driver_chipcommon_pmu.c chipco_mask32(cc, SSB_CHIPCO_PMU_RES_DEPMSK, cc 511 drivers/ssb/driver_chipcommon_pmu.c chipco_write32(cc, SSB_CHIPCO_PMU_MINRES_MSK, min_msk); cc 513 drivers/ssb/driver_chipcommon_pmu.c chipco_write32(cc, SSB_CHIPCO_PMU_MAXRES_MSK, max_msk); cc 517 drivers/ssb/driver_chipcommon_pmu.c void ssb_pmu_init(struct ssb_chipcommon *cc) cc 521 drivers/ssb/driver_chipcommon_pmu.c if (!(cc->capabilities & SSB_CHIPCO_CAP_PMU)) cc 524 drivers/ssb/driver_chipcommon_pmu.c pmucap = chipco_read32(cc, SSB_CHIPCO_PMU_CAP); cc 525 drivers/ssb/driver_chipcommon_pmu.c cc->pmu.rev = (pmucap & SSB_CHIPCO_PMU_CAP_REVISION); cc 527 drivers/ssb/driver_chipcommon_pmu.c dev_dbg(cc->dev->dev, "Found rev %u PMU (capabilities 0x%08X)\n", cc 528 drivers/ssb/driver_chipcommon_pmu.c cc->pmu.rev, pmucap); cc 530 drivers/ssb/driver_chipcommon_pmu.c if (cc->pmu.rev == 1) cc 531 drivers/ssb/driver_chipcommon_pmu.c chipco_mask32(cc, SSB_CHIPCO_PMU_CTL, cc 534 drivers/ssb/driver_chipcommon_pmu.c chipco_set32(cc, SSB_CHIPCO_PMU_CTL, cc 536 drivers/ssb/driver_chipcommon_pmu.c ssb_pmu_pll_init(cc); cc 537 drivers/ssb/driver_chipcommon_pmu.c ssb_pmu_resources_init(cc); cc 540 drivers/ssb/driver_chipcommon_pmu.c void ssb_pmu_set_ldo_voltage(struct ssb_chipcommon *cc, cc 543 drivers/ssb/driver_chipcommon_pmu.c struct ssb_bus *bus = cc->dev->bus; cc 586 drivers/ssb/driver_chipcommon_pmu.c ssb_chipco_regctl_maskset(cc, addr, ~(mask << shift), cc 590 drivers/ssb/driver_chipcommon_pmu.c void ssb_pmu_set_ldo_paref(struct ssb_chipcommon *cc, bool on) cc 592 drivers/ssb/driver_chipcommon_pmu.c struct ssb_bus *bus = cc->dev->bus; cc 610 drivers/ssb/driver_chipcommon_pmu.c chipco_set32(cc, SSB_CHIPCO_PMU_MINRES_MSK, 1 << ldo); cc 612 drivers/ssb/driver_chipcommon_pmu.c chipco_mask32(cc, SSB_CHIPCO_PMU_MINRES_MSK, ~(1 << ldo)); cc 613 drivers/ssb/driver_chipcommon_pmu.c chipco_read32(cc, SSB_CHIPCO_PMU_MINRES_MSK); //SPEC FIXME found via mmiotrace - dummy read? cc 619 drivers/ssb/driver_chipcommon_pmu.c static u32 ssb_pmu_get_alp_clock_clk0(struct ssb_chipcommon *cc) cc 624 drivers/ssb/driver_chipcommon_pmu.c crystalfreq = (chipco_read32(cc, SSB_CHIPCO_PMU_CTL) & cc 631 drivers/ssb/driver_chipcommon_pmu.c u32 ssb_pmu_get_alp_clock(struct ssb_chipcommon *cc) cc 633 drivers/ssb/driver_chipcommon_pmu.c struct ssb_bus *bus = cc->dev->bus; cc 637 drivers/ssb/driver_chipcommon_pmu.c return ssb_pmu_get_alp_clock_clk0(cc); cc 639 drivers/ssb/driver_chipcommon_pmu.c dev_err(cc->dev->dev, "ERROR: PMU alp clock unknown for device %04X\n", cc 645 drivers/ssb/driver_chipcommon_pmu.c u32 ssb_pmu_get_cpu_clock(struct ssb_chipcommon *cc) cc 647 drivers/ssb/driver_chipcommon_pmu.c struct ssb_bus *bus = cc->dev->bus; cc 654 drivers/ssb/driver_chipcommon_pmu.c dev_err(cc->dev->dev, "ERROR: PMU cpu clock unknown for device %04X\n", cc 660 drivers/ssb/driver_chipcommon_pmu.c u32 ssb_pmu_get_controlclock(struct ssb_chipcommon *cc) cc 662 drivers/ssb/driver_chipcommon_pmu.c struct ssb_bus *bus = cc->dev->bus; cc 668 drivers/ssb/driver_chipcommon_pmu.c dev_err(cc->dev->dev, "ERROR: PMU controlclock unknown for device %04X\n", cc 674 drivers/ssb/driver_chipcommon_pmu.c void ssb_pmu_spuravoid_pllupdate(struct ssb_chipcommon *cc, int spuravoid) cc 678 drivers/ssb/driver_chipcommon_pmu.c switch (cc->dev->bus->chip_id) { cc 680 drivers/ssb/driver_chipcommon_pmu.c ssb_chipco_pll_write(cc, SSB_PMU1_PLLCTL0, 0x11100070); cc 681 drivers/ssb/driver_chipcommon_pmu.c ssb_chipco_pll_write(cc, SSB_PMU1_PLLCTL1, 0x1014140a); cc 682 drivers/ssb/driver_chipcommon_pmu.c ssb_chipco_pll_write(cc, SSB_PMU1_PLLCTL5, 0x88888854); cc 684 drivers/ssb/driver_chipcommon_pmu.c ssb_chipco_pll_write(cc, SSB_PMU1_PLLCTL2, 0x05201828); cc 686 drivers/ssb/driver_chipcommon_pmu.c ssb_chipco_pll_write(cc, SSB_PMU1_PLLCTL2, 0x05001828); cc 691 drivers/ssb/driver_chipcommon_pmu.c ssb_chipco_pll_write(cc, SSB_PMU1_PLLCTL0, 0x11500008); cc 692 drivers/ssb/driver_chipcommon_pmu.c ssb_chipco_pll_write(cc, SSB_PMU1_PLLCTL1, 0x0C000C06); cc 693 drivers/ssb/driver_chipcommon_pmu.c ssb_chipco_pll_write(cc, SSB_PMU1_PLLCTL2, 0x0F600a08); cc 694 drivers/ssb/driver_chipcommon_pmu.c ssb_chipco_pll_write(cc, SSB_PMU1_PLLCTL3, 0x00000000); cc 695 drivers/ssb/driver_chipcommon_pmu.c ssb_chipco_pll_write(cc, SSB_PMU1_PLLCTL4, 0x2001E920); cc 696 drivers/ssb/driver_chipcommon_pmu.c ssb_chipco_pll_write(cc, SSB_PMU1_PLLCTL5, 0x88888815); cc 698 drivers/ssb/driver_chipcommon_pmu.c ssb_chipco_pll_write(cc, SSB_PMU1_PLLCTL0, 0x11100008); cc 699 drivers/ssb/driver_chipcommon_pmu.c ssb_chipco_pll_write(cc, SSB_PMU1_PLLCTL1, 0x0c000c06); cc 700 drivers/ssb/driver_chipcommon_pmu.c ssb_chipco_pll_write(cc, SSB_PMU1_PLLCTL2, 0x03000a08); cc 701 drivers/ssb/driver_chipcommon_pmu.c ssb_chipco_pll_write(cc, SSB_PMU1_PLLCTL3, 0x00000000); cc 702 drivers/ssb/driver_chipcommon_pmu.c ssb_chipco_pll_write(cc, SSB_PMU1_PLLCTL4, 0x200005c0); cc 703 drivers/ssb/driver_chipcommon_pmu.c ssb_chipco_pll_write(cc, SSB_PMU1_PLLCTL5, 0x88888855); cc 708 drivers/ssb/driver_chipcommon_pmu.c dev_err(cc->dev->dev, cc 710 drivers/ssb/driver_chipcommon_pmu.c cc->dev->bus->chip_id); cc 714 drivers/ssb/driver_chipcommon_pmu.c chipco_set32(cc, SSB_CHIPCO_PMU_CTL, pmu_ctl); cc 72 drivers/ssb/driver_chipcommon_sflash.c static void ssb_sflash_cmd(struct ssb_chipcommon *cc, u32 opcode) cc 75 drivers/ssb/driver_chipcommon_sflash.c chipco_write32(cc, SSB_CHIPCO_FLASHCTL, cc 78 drivers/ssb/driver_chipcommon_sflash.c if (!(chipco_read32(cc, SSB_CHIPCO_FLASHCTL) & cc 83 drivers/ssb/driver_chipcommon_sflash.c dev_err(cc->dev->dev, "SFLASH control command failed (timeout)!\n"); cc 87 drivers/ssb/driver_chipcommon_sflash.c int ssb_sflash_init(struct ssb_chipcommon *cc) cc 89 drivers/ssb/driver_chipcommon_sflash.c struct ssb_sflash *sflash = &cc->dev->bus->mipscore.sflash; cc 93 drivers/ssb/driver_chipcommon_sflash.c switch (cc->capabilities & SSB_CHIPCO_CAP_FLASHT) { cc 95 drivers/ssb/driver_chipcommon_sflash.c ssb_sflash_cmd(cc, SSB_CHIPCO_FLASHCTL_ST_DP); cc 97 drivers/ssb/driver_chipcommon_sflash.c chipco_write32(cc, SSB_CHIPCO_FLASHADDR, 0); cc 98 drivers/ssb/driver_chipcommon_sflash.c ssb_sflash_cmd(cc, SSB_CHIPCO_FLASHCTL_ST_RES); cc 99 drivers/ssb/driver_chipcommon_sflash.c id = chipco_read32(cc, SSB_CHIPCO_FLASHDATA); cc 101 drivers/ssb/driver_chipcommon_sflash.c chipco_write32(cc, SSB_CHIPCO_FLASHADDR, 1); cc 102 drivers/ssb/driver_chipcommon_sflash.c ssb_sflash_cmd(cc, SSB_CHIPCO_FLASHCTL_ST_RES); cc 103 drivers/ssb/driver_chipcommon_sflash.c id2 = chipco_read32(cc, SSB_CHIPCO_FLASHDATA); cc 129 drivers/ssb/driver_chipcommon_sflash.c ssb_sflash_cmd(cc, SSB_CHIPCO_FLASHCTL_AT_STATUS); cc 130 drivers/ssb/driver_chipcommon_sflash.c id = chipco_read32(cc, SSB_CHIPCO_FLASHDATA) & 0x3c; cc 1151 drivers/ssb/main.c struct ssb_chipcommon *cc; cc 1160 drivers/ssb/main.c cc = &bus->chipco; cc 1162 drivers/ssb/main.c if (!cc->dev) cc 1164 drivers/ssb/main.c if (cc->dev->id.revision < 5) cc 1167 drivers/ssb/main.c ssb_chipco_set_clockmode(cc, SSB_CLKMODE_SLOW); cc 276 drivers/ssb/scan.c u32 idhi, cc, rev, tmp; cc 291 drivers/ssb/scan.c cc = (idhi & SSB_IDHIGH_CC) >> SSB_IDHIGH_CC_SHIFT; cc 296 drivers/ssb/scan.c if (cc == SSB_DEV_CHIPCOMMON) { cc 192 drivers/ssb/ssb_private.h extern u32 ssb_pmu_get_cpu_clock(struct ssb_chipcommon *cc); cc 193 drivers/ssb/ssb_private.h extern u32 ssb_pmu_get_controlclock(struct ssb_chipcommon *cc); cc 194 drivers/ssb/ssb_private.h extern u32 ssb_pmu_get_alp_clock(struct ssb_chipcommon *cc); cc 202 drivers/ssb/ssb_private.h int ssb_sflash_init(struct ssb_chipcommon *cc); cc 204 drivers/ssb/ssb_private.h static inline int ssb_sflash_init(struct ssb_chipcommon *cc) cc 160 drivers/staging/media/imx/imx-ic-prp.c const struct imx_media_pixfmt *cc; cc 182 drivers/staging/media/imx/imx-ic-prp.c cc = imx_media_find_ipu_format(sdformat->format.code, cc 184 drivers/staging/media/imx/imx-ic-prp.c if (!cc) { cc 186 drivers/staging/media/imx/imx-ic-prp.c cc = imx_media_find_ipu_format(code, CS_SEL_ANY); cc 187 drivers/staging/media/imx/imx-ic-prp.c sdformat->format.code = cc->codes[0]; cc 79 drivers/staging/media/imx/imx-ic-prpencvf.c const struct imx_media_pixfmt *cc[PRPENCVF_NUM_PADS]; cc 360 drivers/staging/media/imx/imx-ic-prpencvf.c outcc = vdev->cc; cc 461 drivers/staging/media/imx/imx-ic-prpencvf.c incc = priv->cc[PRPENCVF_SINK_PAD]; cc 462 drivers/staging/media/imx/imx-ic-prpencvf.c outcc = vdev->cc; cc 590 drivers/staging/media/imx/imx-ic-prpencvf.c incc = priv->cc[PRPENCVF_SINK_PAD]; cc 591 drivers/staging/media/imx/imx-ic-prpencvf.c outcc = vdev->cc; cc 884 drivers/staging/media/imx/imx-ic-prpencvf.c const struct imx_media_pixfmt **cc) cc 888 drivers/staging/media/imx/imx-ic-prpencvf.c *cc = imx_media_find_ipu_format(sdformat->format.code, CS_SEL_ANY); cc 889 drivers/staging/media/imx/imx-ic-prpencvf.c if (!*cc) { cc 893 drivers/staging/media/imx/imx-ic-prpencvf.c *cc = imx_media_find_ipu_format(code, CS_SEL_ANY); cc 894 drivers/staging/media/imx/imx-ic-prpencvf.c sdformat->format.code = (*cc)->codes[0]; cc 927 drivers/staging/media/imx/imx-ic-prpencvf.c const struct imx_media_pixfmt *cc; cc 941 drivers/staging/media/imx/imx-ic-prpencvf.c prp_try_fmt(priv, cfg, sdformat, &cc); cc 961 drivers/staging/media/imx/imx-ic-prpencvf.c priv->cc[PRPENCVF_SRC_PAD] = outcc; cc 965 drivers/staging/media/imx/imx-ic-prpencvf.c priv->cc[sdformat->pad] = cc; cc 978 drivers/staging/media/imx/imx-ic-prpencvf.c const struct imx_media_pixfmt *cc; cc 991 drivers/staging/media/imx/imx-ic-prpencvf.c prp_try_fmt(priv, cfg, &format, &cc); cc 1003 drivers/staging/media/imx/imx-ic-prpencvf.c prp_try_fmt(priv, cfg, &format, &cc); cc 1260 drivers/staging/media/imx/imx-ic-prpencvf.c &priv->cc[i]); cc 86 drivers/staging/media/imx/imx-media-capture.c const struct imx_media_pixfmt *cc; cc 94 drivers/staging/media/imx/imx-media-capture.c cc = imx_media_find_format(fsize->pixel_format, CS_SEL_ANY, true); cc 95 drivers/staging/media/imx/imx-media-capture.c if (!cc) cc 98 drivers/staging/media/imx/imx-media-capture.c fse.code = cc->codes[0]; cc 126 drivers/staging/media/imx/imx-media-capture.c const struct imx_media_pixfmt *cc; cc 136 drivers/staging/media/imx/imx-media-capture.c cc = imx_media_find_format(fival->pixel_format, CS_SEL_ANY, true); cc 137 drivers/staging/media/imx/imx-media-capture.c if (!cc) cc 140 drivers/staging/media/imx/imx-media-capture.c fie.code = cc->codes[0]; cc 210 drivers/staging/media/imx/imx-media-capture.c const struct imx_media_pixfmt *cc, *cc_src; cc 220 drivers/staging/media/imx/imx-media-capture.c cc = imx_media_find_format(fourcc, cs_sel, false); cc 221 drivers/staging/media/imx/imx-media-capture.c if (!cc) { cc 223 drivers/staging/media/imx/imx-media-capture.c cc = imx_media_find_format(fourcc, cs_sel, false); cc 231 drivers/staging/media/imx/imx-media-capture.c cc = cc_src; cc 248 drivers/staging/media/imx/imx-media-capture.c imx_media_mbus_fmt_to_pix_fmt(&f->fmt.pix, &fmt_src->format, cc); cc 251 drivers/staging/media/imx/imx-media-capture.c *retcc = cc; cc 297 drivers/staging/media/imx/imx-media-capture.c ret = __capture_try_fmt_vid_cap(priv, &fmt_src, f, &priv->vdev.cc, cc 535 drivers/staging/media/imx/imx-media-capture.c const struct imx_media_pixfmt *cc; cc 548 drivers/staging/media/imx/imx-media-capture.c ret = __capture_try_fmt_vid_cap(priv, &fmt_src, &f, &cc, &compose); cc 554 drivers/staging/media/imx/imx-media-capture.c priv->vdev.cc->cs != cc->cs || cc 799 drivers/staging/media/imx/imx-media-capture.c vdev->cc = imx_media_find_format(vdev->fmt.fmt.pix.pixelformat, cc 77 drivers/staging/media/imx/imx-media-csi.c const struct imx_media_pixfmt *cc[CSI_NUM_PADS]; cc 420 drivers/staging/media/imx/imx-media-csi.c incc = priv->cc[CSI_SINK_PAD]; cc 714 drivers/staging/media/imx/imx-media-csi.c incc = priv->cc[CSI_SINK_PAD]; cc 1424 drivers/staging/media/imx/imx-media-csi.c const struct imx_media_pixfmt **cc) cc 1443 drivers/staging/media/imx/imx-media-csi.c *cc = incc; cc 1448 drivers/staging/media/imx/imx-media-csi.c *cc = imx_media_find_ipu_format(sdformat->format.code, cc 1450 drivers/staging/media/imx/imx-media-csi.c if (!*cc) { cc 1452 drivers/staging/media/imx/imx-media-csi.c *cc = imx_media_find_ipu_format(code, cs_sel); cc 1453 drivers/staging/media/imx/imx-media-csi.c sdformat->format.code = (*cc)->codes[0]; cc 1469 drivers/staging/media/imx/imx-media-csi.c *cc = imx_media_find_mbus_format(sdformat->format.code, cc 1471 drivers/staging/media/imx/imx-media-csi.c if (!*cc) { cc 1474 drivers/staging/media/imx/imx-media-csi.c *cc = imx_media_find_mbus_format(code, cc 1476 drivers/staging/media/imx/imx-media-csi.c sdformat->format.code = (*cc)->codes[0]; cc 1507 drivers/staging/media/imx/imx-media-csi.c const struct imx_media_pixfmt *cc; cc 1531 drivers/staging/media/imx/imx-media-csi.c csi_try_fmt(priv, &upstream_ep, cfg, sdformat, crop, compose, &cc); cc 1555 drivers/staging/media/imx/imx-media-csi.c priv->cc[pad] = outcc; cc 1560 drivers/staging/media/imx/imx-media-csi.c priv->cc[sdformat->pad] = cc; cc 1772 drivers/staging/media/imx/imx-media-csi.c &priv->cc[i]); cc 459 drivers/staging/media/imx/imx-media-utils.c const struct imx_media_pixfmt **cc) cc 477 drivers/staging/media/imx/imx-media-utils.c if (cc) cc 478 drivers/staging/media/imx/imx-media-utils.c *cc = lcc; cc 527 drivers/staging/media/imx/imx-media-utils.c const struct imx_media_pixfmt *cc; cc 530 drivers/staging/media/imx/imx-media-utils.c cc = imx_media_find_mbus_format(tryfmt->code, CS_SEL_ANY, true); cc 531 drivers/staging/media/imx/imx-media-utils.c if (!cc) cc 532 drivers/staging/media/imx/imx-media-utils.c cc = imx_media_find_ipu_format(tryfmt->code, CS_SEL_ANY); cc 533 drivers/staging/media/imx/imx-media-utils.c if (cc && cc->cs == IPUV3_COLORSPACE_RGB) cc 576 drivers/staging/media/imx/imx-media-utils.c const struct imx_media_pixfmt *cc) cc 581 drivers/staging/media/imx/imx-media-utils.c if (!cc) { cc 582 drivers/staging/media/imx/imx-media-utils.c cc = imx_media_find_ipu_format(mbus->code, CS_SEL_ANY); cc 583 drivers/staging/media/imx/imx-media-utils.c if (!cc) cc 584 drivers/staging/media/imx/imx-media-utils.c cc = imx_media_find_mbus_format(mbus->code, CS_SEL_ANY, cc 586 drivers/staging/media/imx/imx-media-utils.c if (!cc) cc 594 drivers/staging/media/imx/imx-media-utils.c if (cc->ipufmt && cc->cs == IPUV3_COLORSPACE_YUV) { cc 598 drivers/staging/media/imx/imx-media-utils.c cc = imx_media_find_mbus_format(code, CS_SEL_YUV, false); cc 605 drivers/staging/media/imx/imx-media-utils.c if (cc->planar) cc 608 drivers/staging/media/imx/imx-media-utils.c stride = round_up((width * cc->bpp) >> 3, 8); cc 612 drivers/staging/media/imx/imx-media-utils.c pix->pixelformat = cc->fourcc; cc 619 drivers/staging/media/imx/imx-media-utils.c pix->sizeimage = cc->planar ? ((stride * pix->height * cc->bpp) >> 3) : cc 100 drivers/staging/media/imx/imx-media-vdic.c const struct imx_media_pixfmt *cc[VDIC_NUM_PADS]; cc 301 drivers/staging/media/imx/imx-media-vdic.c incc = priv->cc[VDIC_SINK_PAD_IDMAC]; cc 582 drivers/staging/media/imx/imx-media-vdic.c const struct imx_media_pixfmt **cc) cc 586 drivers/staging/media/imx/imx-media-vdic.c *cc = imx_media_find_ipu_format(sdformat->format.code, CS_SEL_YUV); cc 587 drivers/staging/media/imx/imx-media-vdic.c if (!*cc) { cc 591 drivers/staging/media/imx/imx-media-vdic.c *cc = imx_media_find_ipu_format(code, CS_SEL_YUV); cc 592 drivers/staging/media/imx/imx-media-vdic.c sdformat->format.code = (*cc)->codes[0]; cc 625 drivers/staging/media/imx/imx-media-vdic.c const struct imx_media_pixfmt *cc; cc 639 drivers/staging/media/imx/imx-media-vdic.c vdic_try_fmt(priv, cfg, sdformat, &cc); cc 660 drivers/staging/media/imx/imx-media-vdic.c priv->cc[VDIC_SRC_PAD_DIRECT] = outcc; cc 664 drivers/staging/media/imx/imx-media-vdic.c priv->cc[sdformat->pad] = cc; cc 864 drivers/staging/media/imx/imx-media-vdic.c &priv->cc[i]); cc 94 drivers/staging/media/imx/imx-media.h const struct imx_media_pixfmt *cc; cc 172 drivers/staging/media/imx/imx-media.h const struct imx_media_pixfmt **cc); cc 179 drivers/staging/media/imx/imx-media.h const struct imx_media_pixfmt *cc); cc 174 drivers/staging/media/imx/imx7-media-csi.c const struct imx_media_pixfmt *cc[IMX7_CSI_PADS_NUM]; cc 993 drivers/staging/media/imx/imx7-media-csi.c const struct imx_media_pixfmt **cc) cc 1013 drivers/staging/media/imx/imx7-media-csi.c *cc = in_cc; cc 1019 drivers/staging/media/imx/imx7-media-csi.c *cc = imx_media_find_mbus_format(sdformat->format.code, cc 1021 drivers/staging/media/imx/imx7-media-csi.c if (!*cc) { cc 1023 drivers/staging/media/imx/imx7-media-csi.c *cc = imx_media_find_mbus_format(code, CS_SEL_ANY, cc 1025 drivers/staging/media/imx/imx7-media-csi.c sdformat->format.code = (*cc)->codes[0]; cc 1047 drivers/staging/media/imx/imx7-media-csi.c const struct imx_media_pixfmt *cc; cc 1062 drivers/staging/media/imx/imx7-media-csi.c ret = imx7_csi_try_fmt(csi, cfg, sdformat, &cc); cc 1088 drivers/staging/media/imx/imx7-media-csi.c csi->cc[IMX7_CSI_PAD_SRC] = outcc; cc 1092 drivers/staging/media/imx/imx7-media-csi.c csi->cc[sdformat->pad] = cc; cc 1113 drivers/staging/media/imx/imx7-media-csi.c &csi->cc[i]); cc 1148 drivers/staging/media/imx/imx7-media-csi.c &csi->cc[i]); cc 28 drivers/staging/most/cdev/cdev.c struct core_component cc; cc 53 drivers/staging/most/cdev/cdev.c return channel_has_mbo(c->iface, c->channel_id, &comp.cc) > 0; cc 59 drivers/staging/most/cdev/cdev.c *mbo = most_get_mbo(c->iface, c->channel_id, &comp.cc); cc 91 drivers/staging/most/cdev/cdev.c most_stop_channel(c->iface, c->channel_id, &comp.cc); cc 150 drivers/staging/most/cdev/cdev.c ret = most_start_channel(c->iface, c->channel_id, &comp.cc); cc 496 drivers/staging/most/cdev/cdev.c .cc = { cc 525 drivers/staging/most/cdev/cdev.c err = most_register_component(&comp.cc); cc 528 drivers/staging/most/cdev/cdev.c err = most_register_configfs_subsys(&comp.cc); cc 534 drivers/staging/most/cdev/cdev.c most_deregister_component(&comp.cc); cc 549 drivers/staging/most/cdev/cdev.c most_deregister_configfs_subsys(&comp.cc); cc 550 drivers/staging/most/cdev/cdev.c most_deregister_component(&comp.cc); cc 1147 drivers/staging/rts5208/sd.c u16 cc = ((u16)buf[0] << 8) | buf[1]; cc 1150 drivers/staging/rts5208/sd.c cc); cc 1151 drivers/staging/rts5208/sd.c if ((cc == 0) || (cc > 800)) cc 1159 drivers/staging/rts5208/sd.c if ((cc > 400) || (func_to_switch > CURRENT_LIMIT_400)) { cc 1609 drivers/staging/speakup/main.c int cc; cc 1629 drivers/staging/speakup/main.c cc = 0; cc 1632 drivers/staging/speakup/main.c cc++; cc 1633 drivers/staging/speakup/main.c return cc; cc 1123 drivers/staging/unisys/visornic/visornic_main.c int i = 0, cc, numreposted; cc 1142 drivers/staging/unisys/visornic/visornic_main.c for (cc = 0, numreposted = 0; cc < copy.numrcvbufs; cc++) { cc 1144 drivers/staging/unisys/visornic/visornic_main.c if (devdata->rcvbuf[i] != copy.rcvbuf[cc]) cc 1197 drivers/staging/unisys/visornic/visornic_main.c int cc, currsize, off; cc 1293 drivers/staging/unisys/visornic/visornic_main.c for (cc = 1, prev = NULL; cc 1294 drivers/staging/unisys/visornic/visornic_main.c cc < cmdrsp->net.rcv.numrcvbufs; cc++) { cc 1295 drivers/staging/unisys/visornic/visornic_main.c curr = (struct sk_buff *)cmdrsp->net.rcv.rcvbuf[cc]; cc 734 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c struct vchiq_io_copy_callback_context *cc = context; cc 739 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c if (!cc->elements_to_go) cc 742 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c if (!cc->element->size) { cc 743 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c cc->elements_to_go--; cc 744 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c cc->element++; cc 745 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c cc->element_offset = 0; cc 749 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c bytes_this_round = min(cc->element->size - cc->element_offset, cc 753 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c cc->element->data + cc->element_offset, cc 757 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c cc->element_offset += bytes_this_round; cc 760 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c if (cc->element_offset == cc->element->size) { cc 761 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c cc->elements_to_go--; cc 762 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c cc->element++; cc 763 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_arm.c cc->element_offset = 0; cc 836 drivers/tty/vt/vt_ioctl.c ushort ll,cc; cc 840 drivers/tty/vt/vt_ioctl.c get_user(cc, &vtsizes->v_cols)) cc 850 drivers/tty/vt/vt_ioctl.c vc_resize(vc_cons[i].d, cc, ll); cc 217 drivers/usb/host/fhci-q.c u32 cc = td->status; cc 223 drivers/usb/host/fhci-q.c cc == USB_TD_RX_DATA_UNDERUN)) cc 224 drivers/usb/host/fhci-q.c cc = USB_TD_OK; cc 234 drivers/usb/host/fhci-q.c status_to_error(cc); cc 249 drivers/usb/host/fhci-q.c cc == USB_TD_RX_DATA_UNDERUN) { cc 251 drivers/usb/host/fhci-q.c cc = USB_TD_OK; cc 253 drivers/usb/host/fhci-q.c if (cc != USB_TD_OK) { cc 255 drivers/usb/host/fhci-q.c urb->status = status_to_error(cc); cc 31 drivers/usb/host/imx21-dbg.c int frame, struct td *td, int cc, int len) {} cc 132 drivers/usb/host/imx21-dbg.c int frame, struct td *td, int cc, int len) cc 143 drivers/usb/host/imx21-dbg.c trace->cc = cc; cc 150 drivers/usb/host/imx21-dbg.c if (found && cc) { cc 387 drivers/usb/host/imx21-dbg.c trace->cc, cc 630 drivers/usb/host/imx21-hcd.c int cc; cc 635 drivers/usb/host/imx21-hcd.c cc = (etd_readl(imx21, etd_num, 3) >> DW3_COMPCODE0) & 0xf; cc 641 drivers/usb/host/imx21-hcd.c if (dir_in && (cc == TD_DATAUNDERRUN)) cc 642 drivers/usb/host/imx21-hcd.c cc = TD_CC_NOERROR; cc 644 drivers/usb/host/imx21-hcd.c if (cc == TD_NOTACCESSED) cc 648 drivers/usb/host/imx21-hcd.c imx21_hc_get_frame(hcd), td, cc, bytes_xfrd); cc 649 drivers/usb/host/imx21-hcd.c if (cc) { cc 654 drivers/usb/host/imx21-hcd.c cc, imx21_hc_get_frame(hcd), td->frame, cc 668 drivers/usb/host/imx21-hcd.c urb->iso_frame_desc[isoc_index].status = cc_to_error[cc]; cc 1038 drivers/usb/host/imx21-hcd.c int cc; cc 1045 drivers/usb/host/imx21-hcd.c cc = (etd_readl(imx21, etd_num, 2) >> DW2_COMPCODE) & 0xf; cc 1074 drivers/usb/host/imx21-hcd.c && (cc == TD_DATAUNDERRUN)) cc 1075 drivers/usb/host/imx21-hcd.c cc = TD_CC_NOERROR; cc 1077 drivers/usb/host/imx21-hcd.c if (cc != 0) cc 1078 drivers/usb/host/imx21-hcd.c dev_vdbg(imx21->dev, "cc is 0x%x\n", cc); cc 1080 drivers/usb/host/imx21-hcd.c etd_done = (cc_to_error[cc] != 0); /* stop if error */ cc 1128 drivers/usb/host/imx21-hcd.c nonisoc_urb_completed_for_etd(imx21, etd, cc_to_error[cc]); cc 1335 drivers/usb/host/imx21-hcd.c int cc; cc 1343 drivers/usb/host/imx21-hcd.c cc = etd_readl(imx21, etd_num, 2) >> DW2_COMPCODE; cc 1344 drivers/usb/host/imx21-hcd.c if (cc == TD_NOTACCESSED) cc 1355 drivers/usb/host/imx21-hcd.c cc); cc 400 drivers/usb/host/imx21-hcd.h int cc; cc 356 drivers/usb/host/isp116x-hcd.c u8 cc; cc 364 drivers/usb/host/isp116x-hcd.c cc = PTD_GET_CC(ptd); cc 373 drivers/usb/host/isp116x-hcd.c if (cc == TD_DATAUNDERRUN) { cc 377 drivers/usb/host/isp116x-hcd.c cc = TD_CC_NOERROR; cc 390 drivers/usb/host/isp116x-hcd.c if (cc != TD_CC_NOERROR && cc != TD_NOTACCESSED cc 391 drivers/usb/host/isp116x-hcd.c && (++ep->error_count >= 3 || cc == TD_CC_STALL cc 392 drivers/usb/host/isp116x-hcd.c || cc == TD_DATAOVERRUN)) { cc 393 drivers/usb/host/isp116x-hcd.c status = cc_to_error[cc]; cc 409 drivers/usb/host/isp116x-hcd.c && (cc == TD_CC_NOERROR || cc == TD_NOTACCESSED)) cc 426 drivers/usb/host/isp116x-hcd.c || (cc != TD_CC_NOERROR && cc < 0x0E)) cc 447 drivers/usb/host/isp116x-hcd.c || (cc != TD_CC_NOERROR && cc < 0x0E)) cc 461 drivers/usb/host/isp116x-hcd.c || (cc != TD_CC_NOERROR && cc < 0x0E)) cc 507 drivers/usb/host/isp1362-hcd.c u8 cc; cc 513 drivers/usb/host/isp1362-hcd.c cc = PTD_GET_CC(ptd); cc 514 drivers/usb/host/isp1362-hcd.c if (cc == PTD_NOTACCESSED) { cc 517 drivers/usb/host/isp1362-hcd.c cc = PTD_DEVNOTRESP; cc 529 drivers/usb/host/isp1362-hcd.c if (cc == PTD_DATAUNDERRUN) { cc 534 drivers/usb/host/isp1362-hcd.c cc = PTD_CC_NOERROR; cc 561 drivers/usb/host/isp1362-hcd.c if (cc != PTD_CC_NOERROR) { cc 562 drivers/usb/host/isp1362-hcd.c if (++ep->error_count >= 3 || cc == PTD_CC_STALL || cc == PTD_DATAOVERRUN) { cc 563 drivers/usb/host/isp1362-hcd.c urbstat = cc_to_error[cc]; cc 565 drivers/usb/host/isp1362-hcd.c __func__, ep->num_req, ep->nextpid, urbstat, cc, cc 301 drivers/usb/host/ohci-dbg.c int cc = (psw >> 12) & 0x0f; cc 303 drivers/usb/host/ohci-dbg.c psw, cc, cc 304 drivers/usb/host/ohci-dbg.c (cc >= 0x0e) ? "OFFSET" : "SIZE", cc 759 drivers/usb/host/ohci-q.c int cc = 0; cc 773 drivers/usb/host/ohci-q.c cc = (tdPSW >> 12) & 0xF; cc 781 drivers/usb/host/ohci-q.c if (cc == TD_DATAUNDERRUN) cc 782 drivers/usb/host/ohci-q.c cc = TD_CC_NOERROR; cc 787 drivers/usb/host/ohci-q.c urb->iso_frame_desc [td->index].status = cc_to_error [cc]; cc 789 drivers/usb/host/ohci-q.c if (cc != TD_CC_NOERROR) cc 792 drivers/usb/host/ohci-q.c urb, td, 1 + td->index, dlen, cc); cc 802 drivers/usb/host/ohci-q.c cc = TD_CC_GET (tdINFO); cc 805 drivers/usb/host/ohci-q.c if (cc == TD_DATAUNDERRUN cc 807 drivers/usb/host/ohci-q.c cc = TD_CC_NOERROR; cc 808 drivers/usb/host/ohci-q.c if (cc != TD_CC_NOERROR && cc < 0x0E) cc 809 drivers/usb/host/ohci-q.c status = cc_to_error[cc]; cc 821 drivers/usb/host/ohci-q.c if (cc != TD_CC_NOERROR && cc < 0x0E) cc 824 drivers/usb/host/ohci-q.c urb, td, 1 + td->index, cc, cc 833 drivers/usb/host/ohci-q.c static void ed_halted(struct ohci_hcd *ohci, struct td *td, int cc) cc 878 drivers/usb/host/ohci-q.c switch (cc) { cc 894 drivers/usb/host/ohci-q.c cc, cc_to_error [cc]); cc 948 drivers/usb/host/ohci-q.c int cc; cc 957 drivers/usb/host/ohci-q.c cc = TD_CC_GET (hc32_to_cpup (ohci, &td->hwINFO)); cc 963 drivers/usb/host/ohci-q.c if (cc != TD_CC_NOERROR cc 965 drivers/usb/host/ohci-q.c ed_halted(ohci, td, cc); cc 597 drivers/usb/typec/tcpm/fusb302.c static int tcpm_set_cc(struct tcpc_dev *dev, enum typec_cc_status cc) cc 609 drivers/usb/typec/tcpm/fusb302.c switch (cc) { cc 625 drivers/usb/typec/tcpm/fusb302.c typec_cc_status_name[cc]); cc 630 drivers/usb/typec/tcpm/fusb302.c fusb302_log(chip, "cc := %s", typec_cc_status_name[cc]); cc 649 drivers/usb/typec/tcpm/fusb302.c ret = fusb302_set_src_current(chip, cc_src_current[cc]); cc 652 drivers/usb/typec/tcpm/fusb302.c typec_cc_status_name[cc], ret); cc 657 drivers/usb/typec/tcpm/fusb302.c switch (cc) { cc 661 drivers/usb/typec/tcpm/fusb302.c rd_mda = rd_mda_value[cc_src_current[cc]]; cc 914 drivers/usb/typec/tcpm/fusb302.c enum typec_cc_status cc) cc 934 drivers/usb/typec/tcpm/fusb302.c ret = fusb302_set_src_current(chip, cc_src_current[cc]); cc 937 drivers/usb/typec/tcpm/fusb302.c typec_cc_status_name[cc], ret); cc 1236 drivers/usb/typec/tcpm/fusb302.c enum typec_cc_status *cc) cc 1266 drivers/usb/typec/tcpm/fusb302.c *cc = TYPEC_CC_OPEN; cc 1282 drivers/usb/typec/tcpm/fusb302.c *cc = TYPEC_CC_RD; cc 1284 drivers/usb/typec/tcpm/fusb302.c *cc = TYPEC_CC_RA; cc 56 drivers/usb/typec/tcpm/tcpci.c static int tcpci_set_cc(struct tcpc_dev *tcpc, enum typec_cc_status cc) cc 62 drivers/usb/typec/tcpm/tcpci.c switch (cc) { cc 105 drivers/usb/typec/tcpm/tcpci.c enum typec_cc_status cc) cc 116 drivers/usb/typec/tcpm/tcpci.c ret = tcpci->data->start_drp_toggling(tcpci, tcpci->data, cc); cc 121 drivers/usb/typec/tcpm/tcpci.c switch (cc) { cc 137 drivers/usb/typec/tcpm/tcpci.c if (cc == TYPEC_CC_RD) cc 150 drivers/usb/typec/tcpm/tcpci.c static enum typec_cc_status tcpci_to_typec_cc(unsigned int cc, bool sink) cc 152 drivers/usb/typec/tcpm/tcpci.c switch (cc) { cc 132 drivers/usb/typec/tcpm/tcpci.h enum typec_cc_status cc); cc 118 drivers/usb/typec/tcpm/tcpci_rt1711h.c enum typec_cc_status cc) cc 124 drivers/usb/typec/tcpm/tcpci_rt1711h.c switch (cc) { cc 140 drivers/usb/typec/tcpm/tcpci_rt1711h.c if (cc == TYPEC_CC_RD) cc 340 drivers/usb/typec/tcpm/tcpm.c #define tcpm_cc_is_sink(cc) \ cc 341 drivers/usb/typec/tcpm/tcpm.c ((cc) == TYPEC_CC_RP_DEF || (cc) == TYPEC_CC_RP_1_5 || \ cc 342 drivers/usb/typec/tcpm/tcpm.c (cc) == TYPEC_CC_RP_3_0) cc 348 drivers/usb/typec/tcpm/tcpm.c #define tcpm_cc_is_source(cc) ((cc) == TYPEC_CC_RD) cc 349 drivers/usb/typec/tcpm/tcpm.c #define tcpm_cc_is_audio(cc) ((cc) == TYPEC_CC_RA) cc 350 drivers/usb/typec/tcpm/tcpm.c #define tcpm_cc_is_open(cc) ((cc) == TYPEC_CC_OPEN) cc 711 drivers/usb/typec/tcpm/tcpm.c enum typec_cc_status cc; cc 714 drivers/usb/typec/tcpm/tcpm.c cc = port->polarity ? port->cc2 : port->cc1; cc 715 drivers/usb/typec/tcpm/tcpm.c switch (cc) { cc 2560 drivers/usb/typec/tcpm/tcpm.c static bool tcpm_start_toggling(struct tcpm_port *port, enum typec_cc_status cc) cc 2568 drivers/usb/typec/tcpm/tcpm.c ret = port->tcpc->start_toggling(port->tcpc, port->port_type, cc); cc 2572 drivers/usb/typec/tcpm/tcpm.c static void tcpm_set_cc(struct tcpm_port *port, enum typec_cc_status cc) cc 2574 drivers/usb/typec/tcpm/tcpm.c tcpm_log(port, "cc:=%d", cc); cc 2575 drivers/usb/typec/tcpm/tcpm.c port->cc_req = cc; cc 2576 drivers/usb/typec/tcpm/tcpm.c port->tcpc->set_cc(port->tcpc, cc); cc 2842 drivers/usb/typec/tcpm/tcpm.c static enum typec_pwr_opmode tcpm_get_pwr_opmode(enum typec_cc_status cc) cc 2844 drivers/usb/typec/tcpm/tcpm.c switch (cc) { cc 246 drivers/usb/typec/tcpm/wcove.c static enum typec_cc_status wcove_to_typec_cc(unsigned int cc) cc 248 drivers/usb/typec/tcpm/wcove.c if (cc & UCSC_CC_STATUS_SNK_RP) { cc 249 drivers/usb/typec/tcpm/wcove.c if (cc & UCSC_CC_STATUS_PWRDEFSNK) cc 251 drivers/usb/typec/tcpm/wcove.c else if (cc & UCSC_CC_STATUS_PWR_1P5A_SNK) cc 253 drivers/usb/typec/tcpm/wcove.c else if (cc & UCSC_CC_STATUS_PWR_3A_SNK) cc 256 drivers/usb/typec/tcpm/wcove.c switch (UCSC_CC_STATUS_RX(cc)) { cc 290 drivers/usb/typec/tcpm/wcove.c static int wcove_set_cc(struct tcpc_dev *tcpc, enum typec_cc_status cc) cc 295 drivers/usb/typec/tcpm/wcove.c switch (cc) { cc 421 drivers/usb/typec/tcpm/wcove.c enum typec_cc_status cc) cc 431 drivers/usb/typec/tcpm/wcove.c switch (cc) { cc 970 drivers/video/fbdev/sstfb.c u8 cr0, cc; cc 993 drivers/video/fbdev/sstfb.c cc = dac_i_read(DACREG_CC_I); cc 999 drivers/video/fbdev/sstfb.c (cc & 0x0f) | DACREG_CC_CLKA | DACREG_CC_CLKA_C); cc 1005 drivers/video/fbdev/sstfb.c (cc & 0xf0) | DACREG_CC_CLKB | DACREG_CC_CLKB_D); cc 1908 fs/cifs/smb2pdu.c parse_query_id_ctxt(struct create_context *cc, struct smb2_file_all_info *buf) cc 1910 fs/cifs/smb2pdu.c struct create_on_disk_id *pdisk_id = (struct create_on_disk_id *)cc; cc 1924 fs/cifs/smb2pdu.c struct create_context *cc; cc 1932 fs/cifs/smb2pdu.c cc = (struct create_context *)data_offset; cc 1939 fs/cifs/smb2pdu.c name = le16_to_cpu(cc->NameOffset) + (char *)cc; cc 1940 fs/cifs/smb2pdu.c if (le16_to_cpu(cc->NameLength) == 4 && cc 1942 fs/cifs/smb2pdu.c *oplock = server->ops->parse_lease_buf(cc, epoch, cc 1944 fs/cifs/smb2pdu.c else if (buf && (le16_to_cpu(cc->NameLength) == 4) && cc 1946 fs/cifs/smb2pdu.c parse_query_id_ctxt(cc, buf); cc 1948 fs/cifs/smb2pdu.c next = le32_to_cpu(cc->Next); cc 1952 fs/cifs/smb2pdu.c cc = (struct create_context *)((char *)cc + next); cc 116 fs/fuse/cuse.c struct cuse_conn *cc = NULL, *pos; cc 124 fs/fuse/cuse.c cc = pos; cc 130 fs/fuse/cuse.c if (!cc) cc 137 fs/fuse/cuse.c rc = fuse_do_open(&cc->fc, 0, file, 0); cc 139 fs/fuse/cuse.c fuse_conn_put(&cc->fc); cc 158 fs/fuse/cuse.c struct cuse_conn *cc = fc_to_cc(ff->fc); cc 161 fs/fuse/cuse.c if (cc->unrestricted_ioctl) cc 171 fs/fuse/cuse.c struct cuse_conn *cc = fc_to_cc(ff->fc); cc 174 fs/fuse/cuse.c if (cc->unrestricted_ioctl) cc 321 fs/fuse/cuse.c struct cuse_conn *cc = fc_to_cc(fc), *pos; cc 338 fs/fuse/cuse.c cc->unrestricted_ioctl = arg->flags & CUSE_UNRESTRICTED_IOCTL; cc 367 fs/fuse/cuse.c dev_set_drvdata(dev, cc); cc 396 fs/fuse/cuse.c cc->dev = dev; cc 397 fs/fuse/cuse.c cc->cdev = cdev; cc 400 fs/fuse/cuse.c list_add(&cc->list, cuse_conntbl_head(devt)); cc 423 fs/fuse/cuse.c static int cuse_send_init(struct cuse_conn *cc) cc 427 fs/fuse/cuse.c struct fuse_conn *fc = &cc->fc; cc 475 fs/fuse/cuse.c struct cuse_conn *cc = fc_to_cc(fc); cc 476 fs/fuse/cuse.c kfree_rcu(cc, fc.rcu); cc 497 fs/fuse/cuse.c struct cuse_conn *cc; cc 501 fs/fuse/cuse.c cc = kzalloc(sizeof(*cc), GFP_KERNEL); cc 502 fs/fuse/cuse.c if (!cc) cc 509 fs/fuse/cuse.c fuse_conn_init(&cc->fc, file->f_cred->user_ns, &fuse_dev_fiq_ops, NULL); cc 511 fs/fuse/cuse.c fud = fuse_dev_alloc_install(&cc->fc); cc 513 fs/fuse/cuse.c kfree(cc); cc 517 fs/fuse/cuse.c INIT_LIST_HEAD(&cc->list); cc 518 fs/fuse/cuse.c cc->fc.release = cuse_fc_release; cc 520 fs/fuse/cuse.c cc->fc.initialized = 1; cc 521 fs/fuse/cuse.c rc = cuse_send_init(cc); cc 524 fs/fuse/cuse.c fuse_conn_put(&cc->fc); cc 546 fs/fuse/cuse.c struct cuse_conn *cc = fc_to_cc(fud->fc); cc 551 fs/fuse/cuse.c list_del_init(&cc->list); cc 555 fs/fuse/cuse.c if (cc->dev) cc 556 fs/fuse/cuse.c device_unregister(cc->dev); cc 557 fs/fuse/cuse.c if (cc->cdev) { cc 558 fs/fuse/cuse.c unregister_chrdev_region(cc->cdev->dev, 1); cc 559 fs/fuse/cuse.c cdev_del(cc->cdev); cc 562 fs/fuse/cuse.c fuse_conn_put(&cc->fc); cc 581 fs/fuse/cuse.c struct cuse_conn *cc = dev_get_drvdata(dev); cc 583 fs/fuse/cuse.c return sprintf(buf, "%d\n", atomic_read(&cc->fc.num_waiting)); cc 591 fs/fuse/cuse.c struct cuse_conn *cc = dev_get_drvdata(dev); cc 593 fs/fuse/cuse.c fuse_abort_conn(&cc->fc); cc 102 fs/hfsplus/unicode.c static u16 *hfsplus_compose_lookup(u16 *p, u16 cc) cc 108 fs/hfsplus/unicode.c if (!e || cc < p[s * 2] || cc > p[e * 2]) cc 112 fs/hfsplus/unicode.c if (cc > p[i * 2]) cc 114 fs/hfsplus/unicode.c else if (cc < p[i * 2]) cc 129 fs/hfsplus/unicode.c u16 cc, c0, c1; cc 147 fs/hfsplus/unicode.c cc = ce1[0]; cc 149 fs/hfsplus/unicode.c cc = 0; cc 150 fs/hfsplus/unicode.c if (cc) { cc 152 fs/hfsplus/unicode.c if (cc != 0xffff) cc 159 fs/hfsplus/unicode.c cc = (c0 - Hangul_LBase) * Hangul_VCount; cc 160 fs/hfsplus/unicode.c cc = (cc + c1) * Hangul_TCount; cc 161 fs/hfsplus/unicode.c cc += Hangul_SBase; cc 168 fs/hfsplus/unicode.c cc += c1; cc 217 fs/hfsplus/unicode.c cc = ce2[0]; cc 218 fs/hfsplus/unicode.c if (cc) { cc 227 fs/hfsplus/unicode.c cc = 0x2400; cc 230 fs/hfsplus/unicode.c cc = ':'; cc 233 fs/hfsplus/unicode.c cc = c0; cc 236 fs/hfsplus/unicode.c res = nls->uni2char(cc, op, len); cc 500 fs/overlayfs/copy_up.c static int ovl_prep_cu_creds(struct dentry *dentry, struct ovl_cu_creds *cc) cc 504 fs/overlayfs/copy_up.c cc->old = cc->new = NULL; cc 505 fs/overlayfs/copy_up.c err = security_inode_copy_up(dentry, &cc->new); cc 509 fs/overlayfs/copy_up.c if (cc->new) cc 510 fs/overlayfs/copy_up.c cc->old = override_creds(cc->new); cc 515 fs/overlayfs/copy_up.c static void ovl_revert_cu_creds(struct ovl_cu_creds *cc) cc 517 fs/overlayfs/copy_up.c if (cc->new) { cc 518 fs/overlayfs/copy_up.c revert_creds(cc->old); cc 519 fs/overlayfs/copy_up.c put_cred(cc->new); cc 532 fs/overlayfs/copy_up.c struct ovl_cu_creds cc; cc 545 fs/overlayfs/copy_up.c err = ovl_prep_cu_creds(c->dentry, &cc); cc 550 fs/overlayfs/copy_up.c ovl_revert_cu_creds(&cc); cc 598 fs/overlayfs/copy_up.c struct ovl_cu_creds cc; cc 601 fs/overlayfs/copy_up.c err = ovl_prep_cu_creds(c->dentry, &cc); cc 606 fs/overlayfs/copy_up.c ovl_revert_cu_creds(&cc); cc 116 fs/ubifs/compress.c err = crypto_comp_compress(compr->cc, in_buf, in_len, out_buf, cc 179 fs/ubifs/compress.c err = crypto_comp_decompress(compr->cc, in_buf, in_len, out_buf, cc 200 fs/ubifs/compress.c compr->cc = crypto_alloc_comp(compr->capi_name, 0, 0); cc 201 fs/ubifs/compress.c if (IS_ERR(compr->cc)) { cc 203 fs/ubifs/compress.c current->pid, compr->name, PTR_ERR(compr->cc)); cc 204 fs/ubifs/compress.c return PTR_ERR(compr->cc); cc 219 fs/ubifs/compress.c crypto_free_comp(compr->cc); cc 841 fs/ubifs/ubifs.h struct crypto_comp *cc; cc 417 include/linux/bcma/bcma.h static inline void bcma_mask32(struct bcma_device *cc, u16 offset, u32 mask) cc 419 include/linux/bcma/bcma.h bcma_write32(cc, offset, bcma_read32(cc, offset) & mask); cc 421 include/linux/bcma/bcma.h static inline void bcma_set32(struct bcma_device *cc, u16 offset, u32 set) cc 423 include/linux/bcma/bcma.h bcma_write32(cc, offset, bcma_read32(cc, offset) | set); cc 425 include/linux/bcma/bcma.h static inline void bcma_maskset32(struct bcma_device *cc, cc 428 include/linux/bcma/bcma.h bcma_write32(cc, offset, (bcma_read32(cc, offset) & mask) | set); cc 430 include/linux/bcma/bcma.h static inline void bcma_mask16(struct bcma_device *cc, u16 offset, u16 mask) cc 432 include/linux/bcma/bcma.h bcma_write16(cc, offset, bcma_read16(cc, offset) & mask); cc 434 include/linux/bcma/bcma.h static inline void bcma_set16(struct bcma_device *cc, u16 offset, u16 set) cc 436 include/linux/bcma/bcma.h bcma_write16(cc, offset, bcma_read16(cc, offset) | set); cc 438 include/linux/bcma/bcma.h static inline void bcma_maskset16(struct bcma_device *cc, cc 441 include/linux/bcma/bcma.h bcma_write16(cc, offset, (bcma_read16(cc, offset) & mask) | set); cc 480 include/linux/bcma/bcma.h extern u32 bcma_chipco_pll_read(struct bcma_drv_cc *cc, u32 offset); cc 658 include/linux/bcma/bcma_driver_chipcommon.h #define bcma_cc_read32(cc, offset) \ cc 659 include/linux/bcma/bcma_driver_chipcommon.h bcma_read32((cc)->core, offset) cc 660 include/linux/bcma/bcma_driver_chipcommon.h #define bcma_cc_write32(cc, offset, val) \ cc 661 include/linux/bcma/bcma_driver_chipcommon.h bcma_write32((cc)->core, offset, val) cc 663 include/linux/bcma/bcma_driver_chipcommon.h #define bcma_cc_mask32(cc, offset, mask) \ cc 664 include/linux/bcma/bcma_driver_chipcommon.h bcma_cc_write32(cc, offset, bcma_cc_read32(cc, offset) & (mask)) cc 665 include/linux/bcma/bcma_driver_chipcommon.h #define bcma_cc_set32(cc, offset, set) \ cc 666 include/linux/bcma/bcma_driver_chipcommon.h bcma_cc_write32(cc, offset, bcma_cc_read32(cc, offset) | (set)) cc 667 include/linux/bcma/bcma_driver_chipcommon.h #define bcma_cc_maskset32(cc, offset, mask, set) \ cc 668 include/linux/bcma/bcma_driver_chipcommon.h bcma_cc_write32(cc, offset, (bcma_cc_read32(cc, offset) & (mask)) | (set)) cc 671 include/linux/bcma/bcma_driver_chipcommon.h #define bcma_pmu_read32(cc, offset) \ cc 672 include/linux/bcma/bcma_driver_chipcommon.h bcma_read32((cc)->pmu.core, offset) cc 673 include/linux/bcma/bcma_driver_chipcommon.h #define bcma_pmu_write32(cc, offset, val) \ cc 674 include/linux/bcma/bcma_driver_chipcommon.h bcma_write32((cc)->pmu.core, offset, val) cc 676 include/linux/bcma/bcma_driver_chipcommon.h #define bcma_pmu_mask32(cc, offset, mask) \ cc 677 include/linux/bcma/bcma_driver_chipcommon.h bcma_pmu_write32(cc, offset, bcma_pmu_read32(cc, offset) & (mask)) cc 678 include/linux/bcma/bcma_driver_chipcommon.h #define bcma_pmu_set32(cc, offset, set) \ cc 679 include/linux/bcma/bcma_driver_chipcommon.h bcma_pmu_write32(cc, offset, bcma_pmu_read32(cc, offset) | (set)) cc 680 include/linux/bcma/bcma_driver_chipcommon.h #define bcma_pmu_maskset32(cc, offset, mask, set) \ cc 681 include/linux/bcma/bcma_driver_chipcommon.h bcma_pmu_write32(cc, offset, (bcma_pmu_read32(cc, offset) & (mask)) | (set)) cc 683 include/linux/bcma/bcma_driver_chipcommon.h extern u32 bcma_chipco_watchdog_timer_set(struct bcma_drv_cc *cc, u32 ticks); cc 685 include/linux/bcma/bcma_driver_chipcommon.h extern u32 bcma_chipco_get_alp_clock(struct bcma_drv_cc *cc); cc 687 include/linux/bcma/bcma_driver_chipcommon.h void bcma_chipco_irq_mask(struct bcma_drv_cc *cc, u32 mask, u32 value); cc 689 include/linux/bcma/bcma_driver_chipcommon.h u32 bcma_chipco_irq_status(struct bcma_drv_cc *cc, u32 mask); cc 692 include/linux/bcma/bcma_driver_chipcommon.h u32 bcma_chipco_gpio_in(struct bcma_drv_cc *cc, u32 mask); cc 693 include/linux/bcma/bcma_driver_chipcommon.h u32 bcma_chipco_gpio_out(struct bcma_drv_cc *cc, u32 mask, u32 value); cc 694 include/linux/bcma/bcma_driver_chipcommon.h u32 bcma_chipco_gpio_outen(struct bcma_drv_cc *cc, u32 mask, u32 value); cc 695 include/linux/bcma/bcma_driver_chipcommon.h u32 bcma_chipco_gpio_control(struct bcma_drv_cc *cc, u32 mask, u32 value); cc 696 include/linux/bcma/bcma_driver_chipcommon.h u32 bcma_chipco_gpio_intmask(struct bcma_drv_cc *cc, u32 mask, u32 value); cc 697 include/linux/bcma/bcma_driver_chipcommon.h u32 bcma_chipco_gpio_polarity(struct bcma_drv_cc *cc, u32 mask, u32 value); cc 698 include/linux/bcma/bcma_driver_chipcommon.h u32 bcma_chipco_gpio_pullup(struct bcma_drv_cc *cc, u32 mask, u32 value); cc 699 include/linux/bcma/bcma_driver_chipcommon.h u32 bcma_chipco_gpio_pulldown(struct bcma_drv_cc *cc, u32 mask, u32 value); cc 702 include/linux/bcma/bcma_driver_chipcommon.h extern void bcma_chipco_pll_write(struct bcma_drv_cc *cc, u32 offset, cc 704 include/linux/bcma/bcma_driver_chipcommon.h extern void bcma_chipco_pll_maskset(struct bcma_drv_cc *cc, u32 offset, cc 706 include/linux/bcma/bcma_driver_chipcommon.h extern void bcma_chipco_chipctl_maskset(struct bcma_drv_cc *cc, cc 708 include/linux/bcma/bcma_driver_chipcommon.h extern void bcma_chipco_regctl_maskset(struct bcma_drv_cc *cc, cc 710 include/linux/bcma/bcma_driver_chipcommon.h extern void bcma_pmu_spuravoid_pllupdate(struct bcma_drv_cc *cc, int spuravoid); cc 712 include/linux/bcma/bcma_driver_chipcommon.h extern u32 bcma_pmu_get_bus_clock(struct bcma_drv_cc *cc); cc 30 include/linux/mlx5/eq.h struct mlx5_eqe *mlx5_eq_get_eqe(struct mlx5_eq *eq, u32 cc); cc 31 include/linux/mlx5/eq.h void mlx5_eq_update_ci(struct mlx5_eq *eq, u32 cc, bool arm); cc 41 include/linux/mlx5/eq.h static inline u32 mlx5_eq_update_cc(struct mlx5_eq *eq, u32 cc) cc 43 include/linux/mlx5/eq.h if (unlikely(cc >= MLX5_NUM_SPARE_EQE)) { cc 44 include/linux/mlx5/eq.h mlx5_eq_update_ci(eq, cc, 0); cc 45 include/linux/mlx5/eq.h cc = 0; cc 47 include/linux/mlx5/eq.h return cc; cc 3527 include/linux/mlx5/mlx5_ifc.h u8 cc[0x1]; cc 1210 include/linux/of.h #define of_for_each_phandle(it, err, np, ln, cn, cc) \ cc 1211 include/linux/of.h for (of_phandle_iterator_init((it), (np), (ln), (cn), (cc)), \ cc 114 include/linux/platform_data/brcmfmac.h char cc[BRCMFMAC_COUNTRY_BUF_SZ]; cc 599 include/linux/ssb/ssb_driver_chipcommon.h static inline bool ssb_chipco_available(struct ssb_chipcommon *cc) cc 601 include/linux/ssb/ssb_driver_chipcommon.h return (cc->dev != NULL); cc 605 include/linux/ssb/ssb_driver_chipcommon.h #define chipco_read32(cc, offset) ssb_read32((cc)->dev, offset) cc 606 include/linux/ssb/ssb_driver_chipcommon.h #define chipco_write32(cc, offset, val) ssb_write32((cc)->dev, offset, val) cc 608 include/linux/ssb/ssb_driver_chipcommon.h #define chipco_mask32(cc, offset, mask) \ cc 609 include/linux/ssb/ssb_driver_chipcommon.h chipco_write32(cc, offset, chipco_read32(cc, offset) & (mask)) cc 610 include/linux/ssb/ssb_driver_chipcommon.h #define chipco_set32(cc, offset, set) \ cc 611 include/linux/ssb/ssb_driver_chipcommon.h chipco_write32(cc, offset, chipco_read32(cc, offset) | (set)) cc 612 include/linux/ssb/ssb_driver_chipcommon.h #define chipco_maskset32(cc, offset, mask, set) \ cc 613 include/linux/ssb/ssb_driver_chipcommon.h chipco_write32(cc, offset, (chipco_read32(cc, offset) & (mask)) | (set)) cc 615 include/linux/ssb/ssb_driver_chipcommon.h extern void ssb_chipcommon_init(struct ssb_chipcommon *cc); cc 617 include/linux/ssb/ssb_driver_chipcommon.h extern void ssb_chipco_suspend(struct ssb_chipcommon *cc); cc 618 include/linux/ssb/ssb_driver_chipcommon.h extern void ssb_chipco_resume(struct ssb_chipcommon *cc); cc 620 include/linux/ssb/ssb_driver_chipcommon.h extern void ssb_chipco_get_clockcpu(struct ssb_chipcommon *cc, cc 622 include/linux/ssb/ssb_driver_chipcommon.h extern void ssb_chipco_get_clockcontrol(struct ssb_chipcommon *cc, cc 624 include/linux/ssb/ssb_driver_chipcommon.h extern void ssb_chipco_timing_init(struct ssb_chipcommon *cc, cc 633 include/linux/ssb/ssb_driver_chipcommon.h extern void ssb_chipco_set_clockmode(struct ssb_chipcommon *cc, cc 636 include/linux/ssb/ssb_driver_chipcommon.h extern u32 ssb_chipco_watchdog_timer_set(struct ssb_chipcommon *cc, u32 ticks); cc 638 include/linux/ssb/ssb_driver_chipcommon.h void ssb_chipco_irq_mask(struct ssb_chipcommon *cc, u32 mask, u32 value); cc 640 include/linux/ssb/ssb_driver_chipcommon.h u32 ssb_chipco_irq_status(struct ssb_chipcommon *cc, u32 mask); cc 643 include/linux/ssb/ssb_driver_chipcommon.h u32 ssb_chipco_gpio_in(struct ssb_chipcommon *cc, u32 mask); cc 644 include/linux/ssb/ssb_driver_chipcommon.h u32 ssb_chipco_gpio_out(struct ssb_chipcommon *cc, u32 mask, u32 value); cc 645 include/linux/ssb/ssb_driver_chipcommon.h u32 ssb_chipco_gpio_outen(struct ssb_chipcommon *cc, u32 mask, u32 value); cc 646 include/linux/ssb/ssb_driver_chipcommon.h u32 ssb_chipco_gpio_control(struct ssb_chipcommon *cc, u32 mask, u32 value); cc 647 include/linux/ssb/ssb_driver_chipcommon.h u32 ssb_chipco_gpio_intmask(struct ssb_chipcommon *cc, u32 mask, u32 value); cc 648 include/linux/ssb/ssb_driver_chipcommon.h u32 ssb_chipco_gpio_polarity(struct ssb_chipcommon *cc, u32 mask, u32 value); cc 649 include/linux/ssb/ssb_driver_chipcommon.h u32 ssb_chipco_gpio_pullup(struct ssb_chipcommon *cc, u32 mask, u32 value); cc 650 include/linux/ssb/ssb_driver_chipcommon.h u32 ssb_chipco_gpio_pulldown(struct ssb_chipcommon *cc, u32 mask, u32 value); cc 653 include/linux/ssb/ssb_driver_chipcommon.h extern int ssb_chipco_serial_init(struct ssb_chipcommon *cc, cc 658 include/linux/ssb/ssb_driver_chipcommon.h extern void ssb_pmu_init(struct ssb_chipcommon *cc); cc 667 include/linux/ssb/ssb_driver_chipcommon.h void ssb_pmu_set_ldo_voltage(struct ssb_chipcommon *cc, cc 669 include/linux/ssb/ssb_driver_chipcommon.h void ssb_pmu_set_ldo_paref(struct ssb_chipcommon *cc, bool on); cc 670 include/linux/ssb/ssb_driver_chipcommon.h void ssb_pmu_spuravoid_pllupdate(struct ssb_chipcommon *cc, int spuravoid); cc 31 include/linux/timecounter.h u64 (*read)(const struct cyclecounter *cc); cc 56 include/linux/timecounter.h const struct cyclecounter *cc; cc 70 include/linux/timecounter.h static inline u64 cyclecounter_cyc2ns(const struct cyclecounter *cc, cc 75 include/linux/timecounter.h ns = (ns * cc->mult) + *frac; cc 77 include/linux/timecounter.h return ns >> cc->shift; cc 100 include/linux/timecounter.h const struct cyclecounter *cc, cc 130 include/linux/usb/tcpm.h int (*set_cc)(struct tcpc_dev *dev, enum typec_cc_status cc); cc 143 include/linux/usb/tcpm.h enum typec_cc_status cc); cc 155 include/media/dvb_demux.h int cc; cc 532 include/sound/hdaudio.h struct cyclecounter cc; cc 9 kernel/time/timecounter.c const struct cyclecounter *cc, cc 12 kernel/time/timecounter.c tc->cc = cc; cc 13 kernel/time/timecounter.c tc->cycle_last = cc->read(cc); cc 15 kernel/time/timecounter.c tc->mask = (1ULL << cc->shift) - 1; cc 37 kernel/time/timecounter.c cycle_now = tc->cc->read(tc->cc); cc 40 kernel/time/timecounter.c cycle_delta = (cycle_now - tc->cycle_last) & tc->cc->mask; cc 43 kernel/time/timecounter.c ns_offset = cyclecounter_cyc2ns(tc->cc, cycle_delta, cc 69 kernel/time/timecounter.c static u64 cc_cyc2ns_backwards(const struct cyclecounter *cc, cc 74 kernel/time/timecounter.c ns = ((ns * cc->mult) - frac) >> cc->shift; cc 82 kernel/time/timecounter.c u64 delta = (cycle_tstamp - tc->cycle_last) & tc->cc->mask; cc 90 kernel/time/timecounter.c if (delta > tc->cc->mask / 2) { cc 91 kernel/time/timecounter.c delta = (tc->cycle_last - cycle_tstamp) & tc->cc->mask; cc 92 kernel/time/timecounter.c nsec -= cc_cyc2ns_backwards(tc->cc, delta, tc->mask, frac); cc 94 kernel/time/timecounter.c nsec += cyclecounter_cyc2ns(tc->cc, delta, tc->mask, &frac); cc 109 lib/fonts/fonts.c int i, c, cc, res; cc 113 lib/fonts/fonts.c cc = -10000; cc 139 lib/fonts/fonts.c if (c > cc) { cc 140 lib/fonts/fonts.c cc = c; cc 205 mm/compaction.c static inline bool isolation_suitable(struct compact_control *cc, cc 208 mm/compaction.c if (cc->ignore_skip_hint) cc 388 mm/compaction.c static bool test_and_set_skip(struct compact_control *cc, struct page *page, cc 394 mm/compaction.c if (cc->ignore_skip_hint) cc 401 mm/compaction.c if (!skip && !cc->no_set_skip_hint) cc 407 mm/compaction.c static void update_cached_migrate(struct compact_control *cc, unsigned long pfn) cc 409 mm/compaction.c struct zone *zone = cc->zone; cc 414 mm/compaction.c if (cc->no_set_skip_hint) cc 419 mm/compaction.c if (cc->mode != MIGRATE_ASYNC && cc 428 mm/compaction.c static void update_pageblock_skip(struct compact_control *cc, cc 431 mm/compaction.c struct zone *zone = cc->zone; cc 433 mm/compaction.c if (cc->no_set_skip_hint) cc 446 mm/compaction.c static inline bool isolation_suitable(struct compact_control *cc, cc 457 mm/compaction.c static inline void update_pageblock_skip(struct compact_control *cc, cc 462 mm/compaction.c static void update_cached_migrate(struct compact_control *cc, unsigned long pfn) cc 466 mm/compaction.c static bool test_and_set_skip(struct compact_control *cc, struct page *page, cc 483 mm/compaction.c struct compact_control *cc) cc 486 mm/compaction.c if (cc->mode == MIGRATE_ASYNC && !cc->contended) { cc 490 mm/compaction.c cc->contended = true; cc 513 mm/compaction.c unsigned long flags, bool *locked, struct compact_control *cc) cc 521 mm/compaction.c cc->contended = true; cc 535 mm/compaction.c static unsigned long isolate_freepages_block(struct compact_control *cc, cc 566 mm/compaction.c && compact_unlock_should_abort(&cc->zone->lock, flags, cc 567 mm/compaction.c &locked, cc)) cc 601 mm/compaction.c locked = compact_lock_irqsave(&cc->zone->lock, cc 602 mm/compaction.c &flags, cc); cc 617 mm/compaction.c cc->nr_freepages += isolated; cc 620 mm/compaction.c if (!strict && cc->nr_migratepages <= cc->nr_freepages) { cc 638 mm/compaction.c spin_unlock_irqrestore(&cc->zone->lock, flags); cc 661 mm/compaction.c cc->total_free_scanned += nr_scanned; cc 682 mm/compaction.c isolate_freepages_range(struct compact_control *cc, cc 690 mm/compaction.c if (block_start_pfn < cc->zone->zone_start_pfn) cc 691 mm/compaction.c block_start_pfn = cc->zone->zone_start_pfn; cc 714 mm/compaction.c block_end_pfn, cc->zone)) cc 717 mm/compaction.c isolated = isolate_freepages_block(cc, &isolate_start_pfn, cc 782 mm/compaction.c isolate_migratepages_block(struct compact_control *cc, unsigned long low_pfn, cc 785 mm/compaction.c pg_data_t *pgdat = cc->zone->zone_pgdat; cc 803 mm/compaction.c if (cc->mode == MIGRATE_ASYNC) cc 814 mm/compaction.c if (cc->direct_compaction && (cc->mode == MIGRATE_ASYNC)) { cc 816 mm/compaction.c next_skip_pfn = block_end_pfn(low_pfn, cc->order); cc 841 mm/compaction.c next_skip_pfn = block_end_pfn(low_pfn, cc->order); cc 851 mm/compaction.c flags, &locked, cc)) { cc 869 mm/compaction.c if (!cc->ignore_skip_hint && get_pageblock_skip(page)) { cc 948 mm/compaction.c if (!(cc->gfp_mask & __GFP_FS) && page_mapping(page)) cc 954 mm/compaction.c &flags, cc); cc 959 mm/compaction.c if (test_and_set_skip(cc, page, low_pfn)) cc 992 mm/compaction.c list_add(&page->lru, &cc->migratepages); cc 993 mm/compaction.c cc->nr_migratepages++; cc 1002 mm/compaction.c if (cc->nr_migratepages == COMPACT_CLUSTER_MAX && cc 1003 mm/compaction.c !cc->rescan && !cc->contended) { cc 1023 mm/compaction.c putback_movable_pages(&cc->migratepages); cc 1024 mm/compaction.c cc->nr_migratepages = 0; cc 1034 mm/compaction.c next_skip_pfn += 1UL << cc->order; cc 1057 mm/compaction.c if (low_pfn == end_pfn && (!nr_isolated || cc->rescan)) { cc 1060 mm/compaction.c update_cached_migrate(cc, low_pfn); cc 1067 mm/compaction.c cc->total_migrate_scanned += nr_scanned; cc 1085 mm/compaction.c isolate_migratepages_range(struct compact_control *cc, unsigned long start_pfn, cc 1093 mm/compaction.c if (block_start_pfn < cc->zone->zone_start_pfn) cc 1094 mm/compaction.c block_start_pfn = cc->zone->zone_start_pfn; cc 1104 mm/compaction.c block_end_pfn, cc->zone)) cc 1107 mm/compaction.c pfn = isolate_migratepages_block(cc, pfn, block_end_pfn, cc 1113 mm/compaction.c if (cc->nr_migratepages == COMPACT_CLUSTER_MAX) cc 1123 mm/compaction.c static bool suitable_migration_source(struct compact_control *cc, cc 1131 mm/compaction.c if ((cc->mode != MIGRATE_ASYNC) || !cc->direct_compaction) cc 1136 mm/compaction.c if (cc->migratetype == MIGRATE_MOVABLE) cc 1139 mm/compaction.c return block_mt == cc->migratetype; cc 1143 mm/compaction.c static bool suitable_migration_target(struct compact_control *cc, cc 1157 mm/compaction.c if (cc->ignore_block_suitable) cc 1169 mm/compaction.c freelist_scan_limit(struct compact_control *cc) cc 1173 mm/compaction.c return (COMPACT_CLUSTER_MAX >> min(shift, cc->fast_search_fail)) + 1; cc 1180 mm/compaction.c static inline bool compact_scanners_met(struct compact_control *cc) cc 1182 mm/compaction.c return (cc->free_pfn >> pageblock_order) cc 1183 mm/compaction.c <= (cc->migrate_pfn >> pageblock_order); cc 1222 mm/compaction.c fast_isolate_around(struct compact_control *cc, unsigned long pfn, unsigned long nr_isolated) cc 1228 mm/compaction.c if (cc->nr_freepages >= cc->nr_migratepages) cc 1232 mm/compaction.c if (cc->direct_compaction && cc->mode == MIGRATE_ASYNC) cc 1237 mm/compaction.c end_pfn = min(pageblock_end_pfn(pfn), zone_end_pfn(cc->zone)) - 1; cc 1241 mm/compaction.c isolate_freepages_block(cc, &start_pfn, pfn, &cc->freepages, 1, false); cc 1242 mm/compaction.c if (cc->nr_freepages >= cc->nr_migratepages) cc 1249 mm/compaction.c isolate_freepages_block(cc, &start_pfn, end_pfn, &cc->freepages, 1, false); cc 1252 mm/compaction.c if (cc->nr_freepages < cc->nr_migratepages) cc 1257 mm/compaction.c static int next_search_order(struct compact_control *cc, int order) cc 1261 mm/compaction.c order = cc->order - 1; cc 1264 mm/compaction.c if (order == cc->search_order) { cc 1265 mm/compaction.c cc->search_order--; cc 1266 mm/compaction.c if (cc->search_order < 0) cc 1267 mm/compaction.c cc->search_order = cc->order - 1; cc 1275 mm/compaction.c fast_isolate_freepages(struct compact_control *cc) cc 1277 mm/compaction.c unsigned int limit = min(1U, freelist_scan_limit(cc) >> 1); cc 1287 mm/compaction.c if (cc->order <= 0) cc 1288 mm/compaction.c return cc->free_pfn; cc 1294 mm/compaction.c if (cc->free_pfn >= cc->zone->compact_init_free_pfn) { cc 1303 mm/compaction.c distance = (cc->free_pfn - cc->migrate_pfn); cc 1304 mm/compaction.c low_pfn = pageblock_start_pfn(cc->free_pfn - (distance >> 2)); cc 1305 mm/compaction.c min_pfn = pageblock_start_pfn(cc->free_pfn - (distance >> 1)); cc 1314 mm/compaction.c cc->search_order = min_t(unsigned int, cc->order - 1, cc->search_order); cc 1316 mm/compaction.c for (order = cc->search_order; cc 1318 mm/compaction.c order = next_search_order(cc, order)) { cc 1319 mm/compaction.c struct free_area *area = &cc->zone->free_area[order]; cc 1328 mm/compaction.c spin_lock_irqsave(&cc->zone->lock, flags); cc 1341 mm/compaction.c cc->fast_search_fail = 0; cc 1342 mm/compaction.c cc->search_order = order; cc 1374 mm/compaction.c cc->nr_freepages += nr_isolated; cc 1375 mm/compaction.c list_add_tail(&page->lru, &cc->freepages); cc 1379 mm/compaction.c order = cc->search_order + 1; cc 1384 mm/compaction.c spin_unlock_irqrestore(&cc->zone->lock, flags); cc 1395 mm/compaction.c cc->fast_search_fail++; cc 1404 mm/compaction.c cc->free_pfn = highest; cc 1406 mm/compaction.c if (cc->direct_compaction && pfn_valid(min_pfn)) { cc 1408 mm/compaction.c cc->free_pfn = min_pfn; cc 1414 mm/compaction.c if (highest && highest >= cc->zone->compact_cached_free_pfn) { cc 1416 mm/compaction.c cc->zone->compact_cached_free_pfn = highest; cc 1419 mm/compaction.c cc->total_free_scanned += nr_scanned; cc 1421 mm/compaction.c return cc->free_pfn; cc 1424 mm/compaction.c fast_isolate_around(cc, low_pfn, nr_isolated); cc 1432 mm/compaction.c static void isolate_freepages(struct compact_control *cc) cc 1434 mm/compaction.c struct zone *zone = cc->zone; cc 1440 mm/compaction.c struct list_head *freelist = &cc->freepages; cc 1444 mm/compaction.c isolate_start_pfn = fast_isolate_freepages(cc); cc 1445 mm/compaction.c if (cc->nr_freepages) cc 1459 mm/compaction.c isolate_start_pfn = cc->free_pfn; cc 1463 mm/compaction.c low_pfn = pageblock_end_pfn(cc->migrate_pfn); cc 1464 mm/compaction.c stride = cc->mode == MIGRATE_ASYNC ? COMPACT_CLUSTER_MAX : 1; cc 1490 mm/compaction.c if (!suitable_migration_target(cc, page)) cc 1494 mm/compaction.c if (!isolation_suitable(cc, page)) cc 1498 mm/compaction.c nr_isolated = isolate_freepages_block(cc, &isolate_start_pfn, cc 1503 mm/compaction.c update_pageblock_skip(cc, page, block_start_pfn); cc 1506 mm/compaction.c if (cc->nr_freepages >= cc->nr_migratepages) { cc 1538 mm/compaction.c cc->free_pfn = isolate_start_pfn; cc 1552 mm/compaction.c struct compact_control *cc = (struct compact_control *)data; cc 1555 mm/compaction.c if (list_empty(&cc->freepages)) { cc 1556 mm/compaction.c isolate_freepages(cc); cc 1558 mm/compaction.c if (list_empty(&cc->freepages)) cc 1562 mm/compaction.c freepage = list_entry(cc->freepages.next, struct page, lru); cc 1564 mm/compaction.c cc->nr_freepages--; cc 1576 mm/compaction.c struct compact_control *cc = (struct compact_control *)data; cc 1578 mm/compaction.c list_add(&page->lru, &cc->freepages); cc 1579 mm/compaction.c cc->nr_freepages++; cc 1596 mm/compaction.c update_fast_start_pfn(struct compact_control *cc, unsigned long pfn) cc 1598 mm/compaction.c if (cc->fast_start_pfn == ULONG_MAX) cc 1601 mm/compaction.c if (!cc->fast_start_pfn) cc 1602 mm/compaction.c cc->fast_start_pfn = pfn; cc 1604 mm/compaction.c cc->fast_start_pfn = min(cc->fast_start_pfn, pfn); cc 1608 mm/compaction.c reinit_migrate_pfn(struct compact_control *cc) cc 1610 mm/compaction.c if (!cc->fast_start_pfn || cc->fast_start_pfn == ULONG_MAX) cc 1611 mm/compaction.c return cc->migrate_pfn; cc 1613 mm/compaction.c cc->migrate_pfn = cc->fast_start_pfn; cc 1614 mm/compaction.c cc->fast_start_pfn = ULONG_MAX; cc 1616 mm/compaction.c return cc->migrate_pfn; cc 1624 mm/compaction.c static unsigned long fast_find_migrateblock(struct compact_control *cc) cc 1626 mm/compaction.c unsigned int limit = freelist_scan_limit(cc); cc 1629 mm/compaction.c unsigned long pfn = cc->migrate_pfn; cc 1634 mm/compaction.c if (cc->ignore_skip_hint) cc 1642 mm/compaction.c if (pfn != cc->zone->zone_start_pfn && pfn != pageblock_start_pfn(pfn)) cc 1650 mm/compaction.c if (cc->order <= PAGE_ALLOC_COSTLY_ORDER) cc 1659 mm/compaction.c if (cc->direct_compaction && cc->migratetype != MIGRATE_MOVABLE) cc 1668 mm/compaction.c distance = (cc->free_pfn - cc->migrate_pfn) >> 1; cc 1669 mm/compaction.c if (cc->migrate_pfn != cc->zone->zone_start_pfn) cc 1671 mm/compaction.c high_pfn = pageblock_start_pfn(cc->migrate_pfn + distance); cc 1673 mm/compaction.c for (order = cc->order - 1; cc 1674 mm/compaction.c order >= PAGE_ALLOC_COSTLY_ORDER && pfn == cc->migrate_pfn && nr_scanned < limit; cc 1676 mm/compaction.c struct free_area *area = &cc->zone->free_area[order]; cc 1684 mm/compaction.c spin_lock_irqsave(&cc->zone->lock, flags); cc 1708 mm/compaction.c update_fast_start_pfn(cc, free_pfn); cc 1710 mm/compaction.c cc->fast_search_fail = 0; cc 1716 mm/compaction.c cc->fast_search_fail++; cc 1721 mm/compaction.c spin_unlock_irqrestore(&cc->zone->lock, flags); cc 1724 mm/compaction.c cc->total_migrate_scanned += nr_scanned; cc 1730 mm/compaction.c if (pfn == cc->migrate_pfn) cc 1731 mm/compaction.c pfn = reinit_migrate_pfn(cc); cc 1741 mm/compaction.c static isolate_migrate_t isolate_migratepages(struct compact_control *cc) cc 1749 mm/compaction.c (cc->mode != MIGRATE_SYNC ? ISOLATE_ASYNC_MIGRATE : 0); cc 1757 mm/compaction.c low_pfn = fast_find_migrateblock(cc); cc 1759 mm/compaction.c if (block_start_pfn < cc->zone->zone_start_pfn) cc 1760 mm/compaction.c block_start_pfn = cc->zone->zone_start_pfn; cc 1767 mm/compaction.c fast_find_block = low_pfn != cc->migrate_pfn && !cc->fast_search_fail; cc 1776 mm/compaction.c for (; block_end_pfn <= cc->free_pfn; cc 1791 mm/compaction.c block_end_pfn, cc->zone); cc 1803 mm/compaction.c !fast_find_block && !isolation_suitable(cc, page)) cc 1814 mm/compaction.c if (!suitable_migration_source(cc, page)) { cc 1815 mm/compaction.c update_cached_migrate(cc, block_end_pfn); cc 1820 mm/compaction.c low_pfn = isolate_migratepages_block(cc, low_pfn, cc 1835 mm/compaction.c cc->migrate_pfn = low_pfn; cc 1837 mm/compaction.c return cc->nr_migratepages ? ISOLATE_SUCCESS : ISOLATE_NONE; cc 1849 mm/compaction.c static enum compact_result __compact_finished(struct compact_control *cc) cc 1852 mm/compaction.c const int migratetype = cc->migratetype; cc 1856 mm/compaction.c if (compact_scanners_met(cc)) { cc 1858 mm/compaction.c reset_cached_positions(cc->zone); cc 1866 mm/compaction.c if (cc->direct_compaction) cc 1867 mm/compaction.c cc->zone->compact_blockskip_flush = true; cc 1869 mm/compaction.c if (cc->whole_zone) cc 1875 mm/compaction.c if (is_via_compact_memory(cc->order)) cc 1884 mm/compaction.c if (!IS_ALIGNED(cc->migrate_pfn, pageblock_nr_pages)) cc 1889 mm/compaction.c for (order = cc->order; order < MAX_ORDER; order++) { cc 1890 mm/compaction.c struct free_area *area = &cc->zone->free_area[order]; cc 1922 mm/compaction.c if (cc->mode == MIGRATE_ASYNC || cc 1923 mm/compaction.c IS_ALIGNED(cc->migrate_pfn, cc 1933 mm/compaction.c if (cc->contended || fatal_signal_pending(current)) cc 1939 mm/compaction.c static enum compact_result compact_finished(struct compact_control *cc) cc 1943 mm/compaction.c ret = __compact_finished(cc); cc 1944 mm/compaction.c trace_mm_compaction_finished(cc->zone, cc->order, ret); cc 2072 mm/compaction.c compact_zone(struct compact_control *cc, struct capture_control *capc) cc 2075 mm/compaction.c unsigned long start_pfn = cc->zone->zone_start_pfn; cc 2076 mm/compaction.c unsigned long end_pfn = zone_end_pfn(cc->zone); cc 2078 mm/compaction.c const bool sync = cc->mode != MIGRATE_ASYNC; cc 2085 mm/compaction.c cc->total_migrate_scanned = 0; cc 2086 mm/compaction.c cc->total_free_scanned = 0; cc 2087 mm/compaction.c cc->nr_migratepages = 0; cc 2088 mm/compaction.c cc->nr_freepages = 0; cc 2089 mm/compaction.c INIT_LIST_HEAD(&cc->freepages); cc 2090 mm/compaction.c INIT_LIST_HEAD(&cc->migratepages); cc 2092 mm/compaction.c cc->migratetype = gfpflags_to_migratetype(cc->gfp_mask); cc 2093 mm/compaction.c ret = compaction_suitable(cc->zone, cc->order, cc->alloc_flags, cc 2094 mm/compaction.c cc->classzone_idx); cc 2106 mm/compaction.c if (compaction_restarting(cc->zone, cc->order)) cc 2107 mm/compaction.c __reset_isolation_suitable(cc->zone); cc 2115 mm/compaction.c cc->fast_start_pfn = 0; cc 2116 mm/compaction.c if (cc->whole_zone) { cc 2117 mm/compaction.c cc->migrate_pfn = start_pfn; cc 2118 mm/compaction.c cc->free_pfn = pageblock_start_pfn(end_pfn - 1); cc 2120 mm/compaction.c cc->migrate_pfn = cc->zone->compact_cached_migrate_pfn[sync]; cc 2121 mm/compaction.c cc->free_pfn = cc->zone->compact_cached_free_pfn; cc 2122 mm/compaction.c if (cc->free_pfn < start_pfn || cc->free_pfn >= end_pfn) { cc 2123 mm/compaction.c cc->free_pfn = pageblock_start_pfn(end_pfn - 1); cc 2124 mm/compaction.c cc->zone->compact_cached_free_pfn = cc->free_pfn; cc 2126 mm/compaction.c if (cc->migrate_pfn < start_pfn || cc->migrate_pfn >= end_pfn) { cc 2127 mm/compaction.c cc->migrate_pfn = start_pfn; cc 2128 mm/compaction.c cc->zone->compact_cached_migrate_pfn[0] = cc->migrate_pfn; cc 2129 mm/compaction.c cc->zone->compact_cached_migrate_pfn[1] = cc->migrate_pfn; cc 2132 mm/compaction.c if (cc->migrate_pfn <= cc->zone->compact_init_migrate_pfn) cc 2133 mm/compaction.c cc->whole_zone = true; cc 2147 mm/compaction.c cc->zone->compact_cached_migrate_pfn[0] == cc->zone->compact_cached_migrate_pfn[1]; cc 2149 mm/compaction.c trace_mm_compaction_begin(start_pfn, cc->migrate_pfn, cc 2150 mm/compaction.c cc->free_pfn, end_pfn, sync); cc 2154 mm/compaction.c while ((ret = compact_finished(cc)) == COMPACT_CONTINUE) { cc 2156 mm/compaction.c unsigned long start_pfn = cc->migrate_pfn; cc 2166 mm/compaction.c cc->rescan = false; cc 2169 mm/compaction.c cc->rescan = true; cc 2172 mm/compaction.c switch (isolate_migratepages(cc)) { cc 2175 mm/compaction.c putback_movable_pages(&cc->migratepages); cc 2176 mm/compaction.c cc->nr_migratepages = 0; cc 2181 mm/compaction.c cc->zone->compact_cached_migrate_pfn[1] = cc 2182 mm/compaction.c cc->zone->compact_cached_migrate_pfn[0]; cc 2197 mm/compaction.c err = migrate_pages(&cc->migratepages, compaction_alloc, cc 2198 mm/compaction.c compaction_free, (unsigned long)cc, cc->mode, cc 2201 mm/compaction.c trace_mm_compaction_migratepages(cc->nr_migratepages, err, cc 2202 mm/compaction.c &cc->migratepages); cc 2205 mm/compaction.c cc->nr_migratepages = 0; cc 2207 mm/compaction.c putback_movable_pages(&cc->migratepages); cc 2212 mm/compaction.c if (err == -ENOMEM && !compact_scanners_met(cc)) { cc 2220 mm/compaction.c if (cc->direct_compaction && cc 2221 mm/compaction.c (cc->mode == MIGRATE_ASYNC)) { cc 2222 mm/compaction.c cc->migrate_pfn = block_end_pfn( cc 2223 mm/compaction.c cc->migrate_pfn - 1, cc->order); cc 2237 mm/compaction.c if (cc->order > 0 && last_migrated_pfn) { cc 2240 mm/compaction.c block_start_pfn(cc->migrate_pfn, cc->order); cc 2245 mm/compaction.c drain_local_pages(cc->zone); cc 2264 mm/compaction.c if (cc->nr_freepages > 0) { cc 2265 mm/compaction.c unsigned long free_pfn = release_freepages(&cc->freepages); cc 2267 mm/compaction.c cc->nr_freepages = 0; cc 2275 mm/compaction.c if (free_pfn > cc->zone->compact_cached_free_pfn) cc 2276 mm/compaction.c cc->zone->compact_cached_free_pfn = free_pfn; cc 2279 mm/compaction.c count_compact_events(COMPACTMIGRATE_SCANNED, cc->total_migrate_scanned); cc 2280 mm/compaction.c count_compact_events(COMPACTFREE_SCANNED, cc->total_free_scanned); cc 2282 mm/compaction.c trace_mm_compaction_end(start_pfn, cc->migrate_pfn, cc 2283 mm/compaction.c cc->free_pfn, end_pfn, sync, ret); cc 2294 mm/compaction.c struct compact_control cc = { cc 2309 mm/compaction.c .cc = &cc, cc 2316 mm/compaction.c ret = compact_zone(&cc, &capc); cc 2318 mm/compaction.c VM_BUG_ON(!list_empty(&cc.freepages)); cc 2319 mm/compaction.c VM_BUG_ON(!list_empty(&cc.migratepages)); cc 2414 mm/compaction.c struct compact_control cc = { cc 2429 mm/compaction.c cc.zone = zone; cc 2431 mm/compaction.c compact_zone(&cc, NULL); cc 2433 mm/compaction.c VM_BUG_ON(!list_empty(&cc.freepages)); cc 2434 mm/compaction.c VM_BUG_ON(!list_empty(&cc.migratepages)); cc 2528 mm/compaction.c struct compact_control cc = { cc 2536 mm/compaction.c trace_mm_compaction_kcompactd_wake(pgdat->node_id, cc.order, cc 2537 mm/compaction.c cc.classzone_idx); cc 2540 mm/compaction.c for (zoneid = 0; zoneid <= cc.classzone_idx; zoneid++) { cc 2547 mm/compaction.c if (compaction_deferred(zone, cc.order)) cc 2550 mm/compaction.c if (compaction_suitable(zone, cc.order, 0, zoneid) != cc 2557 mm/compaction.c cc.zone = zone; cc 2558 mm/compaction.c status = compact_zone(&cc, NULL); cc 2561 mm/compaction.c compaction_defer_reset(zone, cc.order, false); cc 2575 mm/compaction.c defer_compaction(zone, cc.order); cc 2579 mm/compaction.c cc.total_migrate_scanned); cc 2581 mm/compaction.c cc.total_free_scanned); cc 2583 mm/compaction.c VM_BUG_ON(!list_empty(&cc.freepages)); cc 2584 mm/compaction.c VM_BUG_ON(!list_empty(&cc.migratepages)); cc 2592 mm/compaction.c if (pgdat->kcompactd_max_order <= cc.order) cc 2594 mm/compaction.c if (pgdat->kcompactd_classzone_idx >= cc.classzone_idx) cc 213 mm/internal.h struct compact_control *cc; cc 218 mm/internal.h isolate_freepages_range(struct compact_control *cc, cc 221 mm/internal.h isolate_migratepages_range(struct compact_control *cc, cc 830 mm/page_alloc.c capc->cc->zone == zone && cc 831 mm/page_alloc.c capc->cc->direct_compaction ? capc : NULL; cc 838 mm/page_alloc.c if (!capc || order != capc->cc->order) cc 8318 mm/page_alloc.c static int __alloc_contig_migrate_range(struct compact_control *cc, cc 8329 mm/page_alloc.c while (pfn < end || !list_empty(&cc->migratepages)) { cc 8335 mm/page_alloc.c if (list_empty(&cc->migratepages)) { cc 8336 mm/page_alloc.c cc->nr_migratepages = 0; cc 8337 mm/page_alloc.c pfn = isolate_migratepages_range(cc, pfn, end); cc 8348 mm/page_alloc.c nr_reclaimed = reclaim_clean_pages_from_list(cc->zone, cc 8349 mm/page_alloc.c &cc->migratepages); cc 8350 mm/page_alloc.c cc->nr_migratepages -= nr_reclaimed; cc 8352 mm/page_alloc.c ret = migrate_pages(&cc->migratepages, alloc_migrate_target, cc 8353 mm/page_alloc.c NULL, 0, cc->mode, MR_CONTIG_RANGE); cc 8356 mm/page_alloc.c putback_movable_pages(&cc->migratepages); cc 8390 mm/page_alloc.c struct compact_control cc = { cc 8399 mm/page_alloc.c INIT_LIST_HEAD(&cc.migratepages); cc 8440 mm/page_alloc.c ret = __alloc_contig_migrate_range(&cc, start, end); cc 8496 mm/page_alloc.c outer_end = isolate_freepages_range(&cc, outer_start, end); cc 1696 mm/zsmalloc.c struct zs_compact_control *cc) cc 1700 mm/zsmalloc.c struct page *s_page = cc->s_page; cc 1701 mm/zsmalloc.c struct page *d_page = cc->d_page; cc 1702 mm/zsmalloc.c int obj_idx = cc->obj_idx; cc 1739 mm/zsmalloc.c cc->s_page = s_page; cc 1740 mm/zsmalloc.c cc->obj_idx = obj_idx; cc 2267 mm/zsmalloc.c struct zs_compact_control cc; cc 2277 mm/zsmalloc.c cc.obj_idx = 0; cc 2278 mm/zsmalloc.c cc.s_page = get_first_page(src_zspage); cc 2281 mm/zsmalloc.c cc.d_page = get_first_page(dst_zspage); cc 2286 mm/zsmalloc.c if (!migrate_zspage(pool, class, &cc)) cc 699 net/ipv4/inet_diag.c static int valid_cc(const void *bc, int len, int cc) cc 704 net/ipv4/inet_diag.c if (cc > len) cc 706 net/ipv4/inet_diag.c if (cc == len) cc 126 net/sunrpc/xprtrdma/svc_rdma_rw.c struct svc_rdma_chunk_ctxt *cc) cc 128 net/sunrpc/xprtrdma/svc_rdma_rw.c cc->cc_rdma = rdma; cc 131 net/sunrpc/xprtrdma/svc_rdma_rw.c INIT_LIST_HEAD(&cc->cc_rwctxts); cc 132 net/sunrpc/xprtrdma/svc_rdma_rw.c cc->cc_sqecount = 0; cc 135 net/sunrpc/xprtrdma/svc_rdma_rw.c static void svc_rdma_cc_release(struct svc_rdma_chunk_ctxt *cc, cc 138 net/sunrpc/xprtrdma/svc_rdma_rw.c struct svcxprt_rdma *rdma = cc->cc_rdma; cc 141 net/sunrpc/xprtrdma/svc_rdma_rw.c while ((ctxt = svc_rdma_next_ctxt(&cc->cc_rwctxts)) != NULL) { cc 205 net/sunrpc/xprtrdma/svc_rdma_rw.c struct svc_rdma_chunk_ctxt *cc = cc 207 net/sunrpc/xprtrdma/svc_rdma_rw.c struct svcxprt_rdma *rdma = cc->cc_rdma; cc 209 net/sunrpc/xprtrdma/svc_rdma_rw.c container_of(cc, struct svc_rdma_write_info, wi_cc); cc 213 net/sunrpc/xprtrdma/svc_rdma_rw.c atomic_add(cc->cc_sqecount, &rdma->sc_sq_avail); cc 263 net/sunrpc/xprtrdma/svc_rdma_rw.c struct svc_rdma_chunk_ctxt *cc = cc 265 net/sunrpc/xprtrdma/svc_rdma_rw.c struct svcxprt_rdma *rdma = cc->cc_rdma; cc 267 net/sunrpc/xprtrdma/svc_rdma_rw.c container_of(cc, struct svc_rdma_read_info, ri_cc); cc 271 net/sunrpc/xprtrdma/svc_rdma_rw.c atomic_add(cc->cc_sqecount, &rdma->sc_sq_avail); cc 298 net/sunrpc/xprtrdma/svc_rdma_rw.c static int svc_rdma_post_chunk_ctxt(struct svc_rdma_chunk_ctxt *cc) cc 300 net/sunrpc/xprtrdma/svc_rdma_rw.c struct svcxprt_rdma *rdma = cc->cc_rdma; cc 308 net/sunrpc/xprtrdma/svc_rdma_rw.c if (cc->cc_sqecount > rdma->sc_sq_depth) cc 312 net/sunrpc/xprtrdma/svc_rdma_rw.c cqe = &cc->cc_cqe; cc 313 net/sunrpc/xprtrdma/svc_rdma_rw.c list_for_each(tmp, &cc->cc_rwctxts) { cc 323 net/sunrpc/xprtrdma/svc_rdma_rw.c if (atomic_sub_return(cc->cc_sqecount, cc 332 net/sunrpc/xprtrdma/svc_rdma_rw.c atomic_add(cc->cc_sqecount, &rdma->sc_sq_avail); cc 334 net/sunrpc/xprtrdma/svc_rdma_rw.c atomic_read(&rdma->sc_sq_avail) > cc->cc_sqecount); cc 345 net/sunrpc/xprtrdma/svc_rdma_rw.c atomic_add(cc->cc_sqecount, &rdma->sc_sq_avail); cc 407 net/sunrpc/xprtrdma/svc_rdma_rw.c struct svc_rdma_chunk_ctxt *cc = &info->wi_cc; cc 408 net/sunrpc/xprtrdma/svc_rdma_rw.c struct svcxprt_rdma *rdma = cc->cc_rdma; cc 442 net/sunrpc/xprtrdma/svc_rdma_rw.c list_add(&ctxt->rw_list, &cc->cc_rwctxts); cc 443 net/sunrpc/xprtrdma/svc_rdma_rw.c cc->cc_sqecount += ret; cc 603 net/sunrpc/xprtrdma/svc_rdma_rw.c struct svc_rdma_chunk_ctxt *cc = &info->ri_cc; cc 610 net/sunrpc/xprtrdma/svc_rdma_rw.c ctxt = svc_rdma_get_rw_ctxt(cc->cc_rdma, sge_no); cc 642 net/sunrpc/xprtrdma/svc_rdma_rw.c ret = rdma_rw_ctx_init(&ctxt->rw_ctx, cc->cc_rdma->sc_qp, cc 643 net/sunrpc/xprtrdma/svc_rdma_rw.c cc->cc_rdma->sc_port_num, cc 649 net/sunrpc/xprtrdma/svc_rdma_rw.c list_add(&ctxt->rw_list, &cc->cc_rwctxts); cc 650 net/sunrpc/xprtrdma/svc_rdma_rw.c cc->cc_sqecount += ret; cc 662 net/sunrpc/xprtrdma/svc_rdma_rw.c trace_svcrdma_dma_map_rwctx(cc->cc_rdma, ret); cc 663 net/sunrpc/xprtrdma/svc_rdma_rw.c svc_rdma_put_rw_ctxt(cc->cc_rdma, ctxt); cc 1852 scripts/dtc/checks.c struct check *cc = check_table[i]; cc 1855 scripts/dtc/checks.c for (j = 0; j < cc->num_prereqs; j++) cc 1856 scripts/dtc/checks.c if (cc->prereq[j] == c) cc 1857 scripts/dtc/checks.c disable_warning_error(cc, warn, error); cc 774 sound/aoa/fabrics/layout.c struct codec_connection *cc; cc 801 sound/aoa/fabrics/layout.c cc = cci->connections; cc 802 sound/aoa/fabrics/layout.c if (!cc) cc 808 sound/aoa/fabrics/layout.c codec->fabric_data = cc; cc 810 sound/aoa/fabrics/layout.c while (cc->connected) { cc 811 sound/aoa/fabrics/layout.c codec->connected |= 1<<cc->codec_bit; cc 812 sound/aoa/fabrics/layout.c cc++; cc 897 sound/aoa/fabrics/layout.c struct codec_connection *cc; cc 904 sound/aoa/fabrics/layout.c cc = codec->fabric_data; cc 916 sound/aoa/fabrics/layout.c while (cc->connected) { cc 917 sound/aoa/fabrics/layout.c if (cc->connected & CC_SPEAKERS) { cc 924 sound/aoa/fabrics/layout.c if (cc->connected & CC_HEADPHONE) { cc 946 sound/aoa/fabrics/layout.c if (cc->connected & CC_LINEOUT) { cc 950 sound/aoa/fabrics/layout.c if (cc->connected & CC_LINEOUT_LABELLED_HEADPHONE) cc 964 sound/aoa/fabrics/layout.c if (cc->connected & CC_LINEOUT_LABELLED_HEADPHONE) cc 971 sound/aoa/fabrics/layout.c if (cc->connected & CC_LINEOUT_LABELLED_HEADPHONE) cc 979 sound/aoa/fabrics/layout.c cc++; cc 513 sound/hda/hdac_stream.c static u64 azx_cc_read(const struct cyclecounter *cc) cc 515 sound/hda/hdac_stream.c struct hdac_stream *azx_dev = container_of(cc, struct hdac_stream, cc); cc 524 sound/hda/hdac_stream.c struct cyclecounter *cc = &azx_dev->cc; cc 527 sound/hda/hdac_stream.c cc->read = azx_cc_read; cc 528 sound/hda/hdac_stream.c cc->mask = CLOCKSOURCE_MASK(32); cc 540 sound/hda/hdac_stream.c cc->mult = 125; /* saturation after 195 years */ cc 541 sound/hda/hdac_stream.c cc->shift = 0; cc 544 sound/hda/hdac_stream.c timecounter_init(tc, cc, nsec); cc 78 sound/isa/sb/emu8000_patch.c unsigned char cc; cc 79 sound/isa/sb/emu8000_patch.c get_user(cc, (unsigned char __user *)buf + offset); cc 80 sound/isa/sb/emu8000_patch.c c = cc << 8; /* convert 8bit -> 16bit */ cc 85 sound/isa/sb/emu8000_patch.c unsigned short cc; cc 86 sound/isa/sb/emu8000_patch.c get_user(cc, (unsigned short __user *)buf + offset); cc 87 sound/isa/sb/emu8000_patch.c c = swab16(cc); cc 125 sound/pci/asihpi/asihpi.c struct clk_cache cc; cc 2373 sound/pci/asihpi/asihpi.c struct clk_cache *clkcache = &asihpi->cc; cc 2392 sound/pci/asihpi/asihpi.c struct clk_cache *clkcache = &asihpi->cc; cc 2420 sound/pci/asihpi/asihpi.c struct clk_cache *clkcache = &asihpi->cc; cc 2527 sound/pci/asihpi/asihpi.c clkcache = &asihpi->cc; cc 7 tools/arch/x86/include/asm/rmwcc.h #define __GEN_RMWcc(fullop, var, cc, ...) \ cc 9 tools/arch/x86/include/asm/rmwcc.h asm_volatile_goto (fullop "; j" cc " %l[cc_label]" \ cc 17 tools/arch/x86/include/asm/rmwcc.h #define GEN_UNARY_RMWcc(op, var, arg0, cc) \ cc 18 tools/arch/x86/include/asm/rmwcc.h __GEN_RMWcc(op " " arg0, var, cc) cc 20 tools/arch/x86/include/asm/rmwcc.h #define GEN_BINARY_RMWcc(op, var, vcon, val, arg0, cc) \ cc 21 tools/arch/x86/include/asm/rmwcc.h __GEN_RMWcc(op " %1, " arg0, var, cc, vcon (val)) cc 25 tools/arch/x86/include/asm/rmwcc.h #define __GEN_RMWcc(fullop, var, cc, ...) \ cc 28 tools/arch/x86/include/asm/rmwcc.h asm volatile (fullop "; set" cc " %1" \ cc 34 tools/arch/x86/include/asm/rmwcc.h #define GEN_UNARY_RMWcc(op, var, arg0, cc) \ cc 35 tools/arch/x86/include/asm/rmwcc.h __GEN_RMWcc(op " " arg0, var, cc) cc 37 tools/arch/x86/include/asm/rmwcc.h #define GEN_BINARY_RMWcc(op, var, vcon, val, arg0, cc) \ cc 38 tools/arch/x86/include/asm/rmwcc.h __GEN_RMWcc(op " %2, " arg0, var, cc, vcon (val)) cc 13 tools/testing/selftests/bpf/prog_tests/sockopt_sk.c char cc[16]; /* TCP_CA_NAME_MAX */ cc 106 tools/testing/selftests/bpf/prog_tests/sockopt_sk.c strcpy(buf.cc, "nv"); cc 114 tools/testing/selftests/bpf/prog_tests/sockopt_sk.c optlen = sizeof(buf.cc); cc 121 tools/testing/selftests/bpf/prog_tests/sockopt_sk.c if (strcmp(buf.cc, "cubic") != 0) { cc 123 tools/testing/selftests/bpf/prog_tests/sockopt_sk.c buf.cc, "cubic"); cc 56 virt/kvm/arm/arch_timer.c return timecounter->cc->read(timecounter->cc); cc 133 virt/kvm/arm/arch_timer.c ns = cyclecounter_cyc2ns(timecounter->cc, cc 904 virt/kvm/arm/arch_timer.c if (!timecounter->cc) {