ps 9 arch/alpha/include/asm/ptrace.h #define user_mode(regs) (((regs)->ps & 8) != 0) ps 46 arch/alpha/include/uapi/asm/ptrace.h unsigned long ps; ps 90 arch/alpha/kernel/irq_alpha.c printk(KERN_CRIT "PC = %016lx PS=%04lx\n", regs->pc, regs->ps); ps 212 arch/alpha/kernel/process.c regs->ps = 8; ps 69 arch/alpha/kernel/traps.c regs->pc, regs->r26, regs->ps, print_tainted()); ps 179 arch/alpha/kernel/traps.c if (regs->ps & 8) ps 238 arch/alpha/kernel/traps.c if ((regs->ps & ~IPL_MAX) == 0) { ps 424 arch/alpha/kernel/traps.c unsigned long ps, pc, gp, a0, a1, a2; ps 605 arch/alpha/kernel/traps.c pc, una_reg(26), regs->ps); ps 321 arch/arm/mach-omap2/powerdomain.c int pwrdm_register_pwrdms(struct powerdomain **ps) ps 328 arch/arm/mach-omap2/powerdomain.c if (!ps) ps 331 arch/arm/mach-omap2/powerdomain.c for (p = ps; *p; p++) ps 469 arch/arm64/mm/hugetlbpage.c unsigned long ps = memparse(opt, &opt); ps 471 arch/arm64/mm/hugetlbpage.c switch (ps) { ps 478 arch/arm64/mm/hugetlbpage.c add_huge_page_size(ps); ps 483 arch/arm64/mm/hugetlbpage.c pr_err("hugepagesz: Unsupported page size %lu K\n", ps >> 10); ps 169 arch/ia64/include/asm/processor.h __u64 ps : 6; /* 2-7 */ ps 180 arch/ia64/include/asm/processor.h __u64 ps : 6; /* log page size */ ps 727 arch/ia64/kernel/palinfo.c unsigned long ps:6; ps 778 arch/ia64/kernel/palinfo.c pgm = -1 << (itir_reg->ps - 12); ps 791 arch/ia64/kernel/palinfo.c bitvector_process(m, 1<< itir_reg->ps); ps 687 arch/mips/include/asm/octeon/cvmx-pci-defs.h uint32_t ps:2; ps 689 arch/mips/include/asm/octeon/cvmx-pci-defs.h uint32_t ps:2; ps 20 arch/mips/math-emu/me-debugfs.c struct mips_fpu_emulator_stats *ps; ps 23 arch/mips/math-emu/me-debugfs.c ps = &per_cpu(fpuemustats, cpu); ps 24 arch/mips/math-emu/me-debugfs.c pv = (void *)ps + (unsigned long)data; ps 118 arch/powerpc/boot/addnote.c unsigned long ph, ps, np; ps 152 arch/powerpc/boot/addnote.c ps = GET_16(E_PHENTSIZE); ps 154 arch/powerpc/boot/addnote.c if (ph < E_HSIZE || ps < PH_HSIZE || np < 1) ps 156 arch/powerpc/boot/addnote.c if (ph + (np + 2) * ps + nnote + nnote2 > n) ps 165 arch/powerpc/boot/addnote.c ph += ps; ps 169 arch/powerpc/boot/addnote.c for (i = 0; i < 2 * ps + nnote + nnote2; ++i) ps 174 arch/powerpc/boot/addnote.c ns = ph + 2 * ps; ps 197 arch/powerpc/boot/addnote.c ph += ps; ps 132 arch/powerpc/kvm/book3s_64_mmu_radix.c int ret, level, ps; ps 190 arch/powerpc/kvm/book3s_64_mmu_radix.c for (ps = MMU_PAGE_4K; ps < MMU_PAGE_COUNT; ++ps) ps 191 arch/powerpc/kvm/book3s_64_mmu_radix.c if (offset == mmu_psize_defs[ps].shift) ps 193 arch/powerpc/kvm/book3s_64_mmu_radix.c gpte->page_size = ps; ps 537 arch/powerpc/mm/nohash/tlb.c unsigned int ps, sps; ps 541 arch/powerpc/mm/nohash/tlb.c ps = eptcfg & 0x1f; ps 543 arch/powerpc/mm/nohash/tlb.c if (!ps || !sps) ps 548 arch/powerpc/mm/nohash/tlb.c if (ps == (def->shift - 10)) ps 551 arch/powerpc/mm/nohash/tlb.c def->ind = ps + 10; ps 142 arch/powerpc/mm/ptdump/hashpagetable.c static int calculate_pagesize(struct pg_state *st, int ps, char s[]) ps 147 arch/powerpc/mm/ptdump/hashpagetable.c while (ps > 9 && unit[1]) { ps 148 arch/powerpc/mm/ptdump/hashpagetable.c ps -= 10; ps 151 arch/powerpc/mm/ptdump/hashpagetable.c seq_printf(st->seq, " %s_ps: %i%c\t", s, 1<<ps, *unit); ps 152 arch/powerpc/mm/ptdump/hashpagetable.c return ps; ps 19 arch/riscv/mm/hugetlbpage.c unsigned long ps = memparse(opt, &opt); ps 21 arch/riscv/mm/hugetlbpage.c if (ps == HPAGE_SIZE) { ps 23 arch/riscv/mm/hugetlbpage.c } else if (IS_ENABLED(CONFIG_64BIT) && ps == PUD_SIZE) { ps 27 arch/riscv/mm/hugetlbpage.c pr_err("hugepagesz: Unsupported page size %lu M\n", ps >> 20); ps 21 arch/s390/include/uapi/asm/runtime_instr.h __u32 ps : 1; ps 1254 arch/s390/kernel/ptrace.c cb->ps == 1 && ps 58 arch/s390/kernel/runtime_instr.c cb->ps = 1; ps 41 arch/sparc/include/asm/ide.h u16 *ps = dst; ps 44 arch/sparc/include/asm/ide.h if(((unsigned long)ps) & 0x2) { ps 45 arch/sparc/include/asm/ide.h *ps++ = __raw_readw(port); ps 48 arch/sparc/include/asm/ide.h pi = (u32 *)ps; ps 57 arch/sparc/include/asm/ide.h ps = (u16 *)pi; ps 59 arch/sparc/include/asm/ide.h *ps++ = __raw_readw(port); ps 71 arch/sparc/include/asm/ide.h const u16 *ps = src; ps 75 arch/sparc/include/asm/ide.h __raw_writew(*ps++, port); ps 78 arch/sparc/include/asm/ide.h pi = (const u32 *)ps; ps 87 arch/sparc/include/asm/ide.h ps = (const u16 *)pi; ps 89 arch/sparc/include/asm/ide.h __raw_writew(*ps, port); ps 121 arch/sparc/lib/PeeCeeI.c u16 *ps = dst; ps 124 arch/sparc/lib/PeeCeeI.c if (((unsigned long)ps) & 0x2) { ps 125 arch/sparc/lib/PeeCeeI.c *ps++ = __raw_readw(addr); ps 128 arch/sparc/lib/PeeCeeI.c pi = (u32 *)ps; ps 137 arch/sparc/lib/PeeCeeI.c ps = (u16 *)pi; ps 139 arch/sparc/lib/PeeCeeI.c *ps = __raw_readw(addr); ps 155 arch/sparc/lib/PeeCeeI.c u16 *ps; ps 160 arch/sparc/lib/PeeCeeI.c ps = dst; ps 163 arch/sparc/lib/PeeCeeI.c *ps++ = l; ps 164 arch/sparc/lib/PeeCeeI.c pi = (u32 *)ps; ps 170 arch/sparc/lib/PeeCeeI.c ps = (u16 *)pi; ps 171 arch/sparc/lib/PeeCeeI.c *ps = l; ps 179 arch/sparc/lib/PeeCeeI.c ps = (u16 *)pb; ps 180 arch/sparc/lib/PeeCeeI.c *ps++ = ((l >> 8) & 0xffff); ps 181 arch/sparc/lib/PeeCeeI.c pi = (u32 *)ps; ps 202 arch/sparc/lib/PeeCeeI.c ps = (u16 *)pi; ps 203 arch/sparc/lib/PeeCeeI.c *ps++ = ((l >> 8) & 0xffff); ps 204 arch/sparc/lib/PeeCeeI.c pb = (u8 *)ps; ps 383 arch/x86/kernel/amd_gart_64.c struct scatterlist *s, *ps, *start_sg, *sgmap; ps 398 arch/x86/kernel/amd_gart_64.c ps = NULL; /* shut up gcc */ ps 417 arch/x86/kernel/amd_gart_64.c (ps->offset + ps->length) % PAGE_SIZE) { ps 434 arch/x86/kernel/amd_gart_64.c ps = s; ps 115 arch/x86/kernel/irq_32.c struct page *ph, *ps; ps 123 arch/x86/kernel/irq_32.c ps = alloc_pages_node(node, THREADINFO_GFP, THREAD_SIZE_ORDER); ps 124 arch/x86/kernel/irq_32.c if (!ps) { ps 130 arch/x86/kernel/irq_32.c per_cpu(softirq_stack_ptr, cpu) = page_address(ps); ps 86 arch/x86/kvm/i8254.c struct kvm_kpit_state *ps = &pit->pit_state; ps 88 arch/x86/kvm/i8254.c if (!ps->period) ps 100 arch/x86/kvm/i8254.c remaining = hrtimer_get_remaining(&ps->timer); ps 101 arch/x86/kvm/i8254.c elapsed = ps->period - ktime_to_ns(remaining); ps 198 arch/x86/kvm/i8254.c static inline struct kvm_pit *pit_state_to_pit(struct kvm_kpit_state *ps) ps 200 arch/x86/kvm/i8254.c return container_of(ps, struct kvm_pit, pit_state); ps 205 arch/x86/kvm/i8254.c struct kvm_kpit_state *ps = container_of(kian, struct kvm_kpit_state, ps 207 arch/x86/kvm/i8254.c struct kvm_pit *pit = pit_state_to_pit(ps); ps 209 arch/x86/kvm/i8254.c atomic_set(&ps->irq_ack, 1); ps 214 arch/x86/kvm/i8254.c if (atomic_dec_if_positive(&ps->pending) > 0) ps 245 arch/x86/kvm/i8254.c struct kvm_kpit_state *ps = &pit->pit_state; ps 247 arch/x86/kvm/i8254.c if (atomic_read(&ps->reinject) && !atomic_xchg(&ps->irq_ack, 0)) ps 269 arch/x86/kvm/i8254.c struct kvm_kpit_state *ps = container_of(data, struct kvm_kpit_state, timer); ps 270 arch/x86/kvm/i8254.c struct kvm_pit *pt = pit_state_to_pit(ps); ps 272 arch/x86/kvm/i8254.c if (atomic_read(&ps->reinject)) ps 273 arch/x86/kvm/i8254.c atomic_inc(&ps->pending); ps 277 arch/x86/kvm/i8254.c if (ps->is_periodic) { ps 278 arch/x86/kvm/i8254.c hrtimer_add_expires_ns(&ps->timer, ps->period); ps 292 arch/x86/kvm/i8254.c struct kvm_kpit_state *ps = &pit->pit_state; ps 295 arch/x86/kvm/i8254.c if (atomic_read(&ps->reinject) == reinject) ps 301 arch/x86/kvm/i8254.c kvm_register_irq_ack_notifier(kvm, &ps->irq_ack_notifier); ps 304 arch/x86/kvm/i8254.c kvm_unregister_irq_ack_notifier(kvm, &ps->irq_ack_notifier); ps 308 arch/x86/kvm/i8254.c atomic_set(&ps->reinject, reinject); ps 313 arch/x86/kvm/i8254.c struct kvm_kpit_state *ps = &pit->pit_state; ps 318 arch/x86/kvm/i8254.c ps->flags & KVM_PIT_FLAGS_HPET_LEGACY) ps 326 arch/x86/kvm/i8254.c hrtimer_cancel(&ps->timer); ps 328 arch/x86/kvm/i8254.c ps->period = interval; ps 329 arch/x86/kvm/i8254.c ps->is_periodic = is_period; ps 338 arch/x86/kvm/i8254.c if (ps->is_periodic) { ps 341 arch/x86/kvm/i8254.c if (ps->period < min_period) { ps 345 arch/x86/kvm/i8254.c ps->period, min_period); ps 346 arch/x86/kvm/i8254.c ps->period = min_period; ps 350 arch/x86/kvm/i8254.c hrtimer_start(&ps->timer, ktime_add_ns(ktime_get(), interval), ps 356 arch/x86/kvm/i8254.c struct kvm_kpit_state *ps = &pit->pit_state; ps 367 arch/x86/kvm/i8254.c ps->channels[channel].count = val; ps 370 arch/x86/kvm/i8254.c ps->channels[channel].count_load_time = ktime_get(); ps 376 arch/x86/kvm/i8254.c switch (ps->channels[0].mode) { ps 4650 arch/x86/kvm/x86.c static int kvm_vm_ioctl_get_pit(struct kvm *kvm, struct kvm_pit_state *ps) ps 4654 arch/x86/kvm/x86.c BUILD_BUG_ON(sizeof(*ps) != sizeof(kps->channels)); ps 4657 arch/x86/kvm/x86.c memcpy(ps, &kps->channels, sizeof(*ps)); ps 4662 arch/x86/kvm/x86.c static int kvm_vm_ioctl_set_pit(struct kvm *kvm, struct kvm_pit_state *ps) ps 4668 arch/x86/kvm/x86.c memcpy(&pit->pit_state.channels, ps, sizeof(*ps)); ps 4670 arch/x86/kvm/x86.c kvm_pit_load_count(pit, i, ps->channels[i].count, 0); ps 4675 arch/x86/kvm/x86.c static int kvm_vm_ioctl_get_pit2(struct kvm *kvm, struct kvm_pit_state2 *ps) ps 4678 arch/x86/kvm/x86.c memcpy(ps->channels, &kvm->arch.vpit->pit_state.channels, ps 4679 arch/x86/kvm/x86.c sizeof(ps->channels)); ps 4680 arch/x86/kvm/x86.c ps->flags = kvm->arch.vpit->pit_state.flags; ps 4682 arch/x86/kvm/x86.c memset(&ps->reserved, 0, sizeof(ps->reserved)); ps 4686 arch/x86/kvm/x86.c static int kvm_vm_ioctl_set_pit2(struct kvm *kvm, struct kvm_pit_state2 *ps) ps 4695 arch/x86/kvm/x86.c cur_legacy = ps->flags & KVM_PIT_FLAGS_HPET_LEGACY; ps 4698 arch/x86/kvm/x86.c memcpy(&pit->pit_state.channels, &ps->channels, ps 4700 arch/x86/kvm/x86.c pit->pit_state.flags = ps->flags; ps 4902 arch/x86/kvm/x86.c struct kvm_pit_state ps; ps 5033 arch/x86/kvm/x86.c if (copy_from_user(&u.ps, argp, sizeof(struct kvm_pit_state))) ps 5038 arch/x86/kvm/x86.c r = kvm_vm_ioctl_get_pit(kvm, &u.ps); ps 5042 arch/x86/kvm/x86.c if (copy_to_user(argp, &u.ps, sizeof(struct kvm_pit_state))) ps 5049 arch/x86/kvm/x86.c if (copy_from_user(&u.ps, argp, sizeof(u.ps))) ps 5054 arch/x86/kvm/x86.c r = kvm_vm_ioctl_set_pit(kvm, &u.ps); ps 191 arch/x86/mm/hugetlbpage.c unsigned long ps = memparse(opt, &opt); ps 192 arch/x86/mm/hugetlbpage.c if (ps == PMD_SIZE) { ps 194 arch/x86/mm/hugetlbpage.c } else if (ps == PUD_SIZE && boot_cpu_has(X86_FEATURE_GBPAGES)) { ps 199 arch/x86/mm/hugetlbpage.c ps >> 20); ps 200 arch/xtensa/include/asm/processor.h (regs)->ps = USER_PS_VALUE; \ ps 54 arch/xtensa/include/asm/ptrace.h unsigned long ps; /* 8 */ ps 88 arch/xtensa/include/asm/ptrace.h # define user_mode(regs) (((regs)->ps & 0x00000020)!=0) ps 45 arch/xtensa/include/uapi/asm/ptrace.h __u32 ps; ps 33 arch/xtensa/kernel/asm-offsets.c DEFINE(PT_PS, offsetof (struct pt_regs, ps)); ps 47 arch/xtensa/kernel/ptrace.c .ps = regs->ps & ~(1 << PS_EXCM_BIT), ps 87 arch/xtensa/kernel/ptrace.c regs->ps = (regs->ps & ~ps_mask) | (newregs.ps & ps_mask); ps 296 arch/xtensa/kernel/ptrace.c tmp = (regs->ps & ~(1 << PS_EXCM_BIT)); ps 142 arch/xtensa/kernel/signal.c COPY(ps); ps 178 arch/xtensa/kernel/signal.c unsigned long ps; ps 200 arch/xtensa/kernel/signal.c err |= __get_user(ps, &sc->sc_ps); ps 201 arch/xtensa/kernel/signal.c regs->ps = (regs->ps & ~PS_CALLINC_MASK) | (ps & PS_CALLINC_MASK); ps 338 arch/xtensa/kernel/signal.c unsigned long sp, ra, tp, ps; ps 389 arch/xtensa/kernel/signal.c ps = regs->ps; ps 394 arch/xtensa/kernel/signal.c if (ps & PS_WOE_MASK) { ps 398 arch/xtensa/kernel/signal.c ps = (ps & ~(PS_CALLINC_MASK | PS_OWB_MASK)) | ps 408 arch/xtensa/kernel/signal.c regs->ps = ps; ps 49 arch/xtensa/kernel/stacktrace.c !(regs->ps & PS_WOE_MASK))) ps 245 arch/xtensa/kernel/traps.c if ((regs->ps & PS_INTLEVEL_MASK) < LOCKLEVEL) ps 471 arch/xtensa/kernel/traps.c regs->pc, regs->ps, regs->depc, regs->excvaddr); ps 128 block/blk-iolatency.c struct percentile_stats ps; ps 196 block/blk-iolatency.c stat->ps.total = 0; ps 197 block/blk-iolatency.c stat->ps.missed = 0; ps 207 block/blk-iolatency.c sum->ps.total += stat->ps.total; ps 208 block/blk-iolatency.c sum->ps.missed += stat->ps.missed; ps 219 block/blk-iolatency.c stat->ps.missed++; ps 220 block/blk-iolatency.c stat->ps.total++; ps 230 block/blk-iolatency.c u64 thresh = div64_u64(stat->ps.total, 10); ps 232 block/blk-iolatency.c return stat->ps.missed < thresh; ps 241 block/blk-iolatency.c return stat->ps.total; ps 905 block/blk-iolatency.c (unsigned long long)stat.ps.missed, ps 906 block/blk-iolatency.c (unsigned long long)stat.ps.total); ps 908 block/blk-iolatency.c (unsigned long long)stat.ps.missed, ps 909 block/blk-iolatency.c (unsigned long long)stat.ps.total, ps 460 drivers/acpi/scan.c struct acpi_device_power_state *ps = &device->power.states[i]; ps 461 drivers/acpi/scan.c acpi_power_resources_list_free(&ps->resources); ps 908 drivers/acpi/scan.c struct acpi_device_power_state *ps = &device->power.states[state]; ps 913 drivers/acpi/scan.c INIT_LIST_HEAD(&ps->resources); ps 923 drivers/acpi/scan.c acpi_extract_power_resources(package, 0, &ps->resources); ps 931 drivers/acpi/scan.c ps->flags.explicit_set = 1; ps 934 drivers/acpi/scan.c if (!list_empty(&ps->resources) || ps->flags.explicit_set) ps 935 drivers/acpi/scan.c ps->flags.valid = 1; ps 937 drivers/acpi/scan.c ps->power = -1; /* Unknown - driver assigned */ ps 938 drivers/acpi/scan.c ps->latency = -1; /* Unknown - driver assigned */ ps 4356 drivers/block/drbd/drbd_receiver.c static union drbd_state convert_state(union drbd_state ps) ps 4371 drivers/block/drbd/drbd_receiver.c ms.i = ps.i; ps 4373 drivers/block/drbd/drbd_receiver.c ms.conn = c_tab[ps.conn]; ps 4374 drivers/block/drbd/drbd_receiver.c ms.peer = ps.role; ps 4375 drivers/block/drbd/drbd_receiver.c ms.role = ps.peer; ps 4376 drivers/block/drbd/drbd_receiver.c ms.pdsk = ps.disk; ps 4377 drivers/block/drbd/drbd_receiver.c ms.disk = ps.pdsk; ps 4378 drivers/block/drbd/drbd_receiver.c ms.peer_isp = (ps.aftr_isp | ps.user_isp); ps 28 drivers/clk/bcm/clk-cygnus.c #define AON_VAL(o, pw, ps, is) { .offset = o, .pwr_width = pw, \ ps 29 drivers/clk/bcm/clk-cygnus.c .pwr_shift = ps, .iso_shift = is } ps 26 drivers/clk/bcm/clk-ns2.c #define AON_VAL(o, pw, ps, is) { .offset = o, .pwr_width = pw, \ ps 27 drivers/clk/bcm/clk-ns2.c .pwr_shift = ps, .iso_shift = is } ps 26 drivers/clk/bcm/clk-nsp.c #define AON_VAL(o, pw, ps, is) { .offset = o, .pwr_width = pw, \ ps 27 drivers/clk/bcm/clk-nsp.c .pwr_shift = ps, .iso_shift = is } ps 16 drivers/clk/bcm/clk-sr.c #define AON_VAL(o, pw, ps, is) { .offset = o, .pwr_width = pw, \ ps 17 drivers/clk/bcm/clk-sr.c .pwr_shift = ps, .iso_shift = is } ps 26 drivers/clocksource/timer-fsl-ftm.c unsigned long ps; ps 55 drivers/clocksource/timer-fsl-ftm.c val |= priv->ps | FTM_SC_CLK(1); ps 205 drivers/clocksource/timer-fsl-ftm.c freq / (1 << priv->ps), ps 222 drivers/clocksource/timer-fsl-ftm.c sched_clock_register(ftm_read_sched_clock, 16, freq / (1 << priv->ps)); ps 224 drivers/clocksource/timer-fsl-ftm.c freq / (1 << priv->ps), 300, 16, ps 284 drivers/clocksource/timer-fsl-ftm.c priv->ps = 0; ps 292 drivers/clocksource/timer-fsl-ftm.c HZ * (1 << priv->ps++)); ps 295 drivers/clocksource/timer-fsl-ftm.c if (priv->ps > FTM_PS_MAX) { ps 297 drivers/clocksource/timer-fsl-ftm.c priv->ps, FTM_PS_MAX); ps 283 drivers/dma/imx-sdma.c u32 ps; ps 268 drivers/gpu/drm/amd/amdgpu/amdgpu_dpm.h #define amdgpu_dpm_print_power_state(adev, ps) \ ps 269 drivers/gpu/drm/amd/amdgpu/amdgpu_dpm.h ((adev)->powerplay.pp_funcs->print_power_state((adev)->powerplay.pp_handle, (ps))) ps 383 drivers/gpu/drm/amd/amdgpu/amdgpu_dpm.h struct amdgpu_ps *ps; ps 2463 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c struct amdgpu_ps *ps; ps 2486 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c ps = &adev->pm.dpm.ps[i]; ps 2487 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c ui_class = ps->class & ATOM_PPLIB_CLASSIFICATION_UI_MASK; ps 2492 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c if (ps->caps & ATOM_PPLIB_SINGLE_DISPLAY_ONLY) { ps 2494 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c return ps; ps 2496 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c return ps; ps 2501 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c if (ps->caps & ATOM_PPLIB_SINGLE_DISPLAY_ONLY) { ps 2503 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c return ps; ps 2505 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c return ps; ps 2510 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c if (ps->caps & ATOM_PPLIB_SINGLE_DISPLAY_ONLY) { ps 2512 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c return ps; ps 2514 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c return ps; ps 2524 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c if (ps->class & ATOM_PPLIB_CLASSIFICATION_SDSTATE) ps 2525 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c return ps; ps 2528 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c if (ps->class & ATOM_PPLIB_CLASSIFICATION_HDSTATE) ps 2529 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c return ps; ps 2532 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c if (ps->class & ATOM_PPLIB_CLASSIFICATION_HD2STATE) ps 2533 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c return ps; ps 2536 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c if (ps->class2 & ATOM_PPLIB_CLASSIFICATION2_MVC) ps 2537 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c return ps; ps 2542 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c if (ps->class & ATOM_PPLIB_CLASSIFICATION_THERMAL) ps 2543 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c return ps; ps 2546 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c if (ps->class & ATOM_PPLIB_CLASSIFICATION_ACPI) ps 2547 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c return ps; ps 2550 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c if (ps->class2 & ATOM_PPLIB_CLASSIFICATION2_ULV) ps 2551 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c return ps; ps 2554 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c if (ps->class & ATOM_PPLIB_CLASSIFICATION_3DPERFORMANCE) ps 2555 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c return ps; ps 2595 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c struct amdgpu_ps *ps; ps 2612 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c ps = amdgpu_dpm_pick_power_state(adev, dpm_state); ps 2613 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c if (ps) ps 2614 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c adev->pm.dpm.requested_ps = ps; ps 2626 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c ps->vce_active = adev->pm.dpm.vce_active; ps 2713 drivers/gpu/drm/amd/amdgpu/amdgpu_pm.c amdgpu_dpm_print_power_state(adev, &adev->pm.dpm.ps[i]); ps 59 drivers/gpu/drm/amd/amdgpu/atom.c uint32_t *ps, *ws; ps 221 drivers/gpu/drm/amd/amdgpu/atom.c val = get_unaligned_le32((u32 *)&ctx->ps[idx]); ps 494 drivers/gpu/drm/amd/amdgpu/atom.c ctx->ps[idx] = cpu_to_le32(val); ps 623 drivers/gpu/drm/amd/amdgpu/atom.c r = amdgpu_atom_execute_table_locked(ctx->ctx, idx, ctx->ps + ctx->ps_shift); ps 1204 drivers/gpu/drm/amd/amdgpu/atom.c int len, ws, ps, ptr; ps 1214 drivers/gpu/drm/amd/amdgpu/atom.c ps = CU8(base + ATOM_CT_PS_PTR) & ATOM_CT_PS_MASK; ps 1217 drivers/gpu/drm/amd/amdgpu/atom.c SDEBUG(">> execute %04X (len %d, WS %d, PS %d)\n", base, len, ws, ps); ps 1220 drivers/gpu/drm/amd/amdgpu/atom.c ectx.ps_shift = ps / 4; ps 1222 drivers/gpu/drm/amd/amdgpu/atom.c ectx.ps = params; ps 1239 drivers/gpu/drm/amd/amdgpu/atom.c base, len, ws, ps, ptr - 1); ps 1360 drivers/gpu/drm/amd/amdgpu/atom.c uint32_t ps[16]; ps 1363 drivers/gpu/drm/amd/amdgpu/atom.c memset(ps, 0, 64); ps 1365 drivers/gpu/drm/amd/amdgpu/atom.c ps[0] = cpu_to_le32(CU32(hwi + ATOM_FWI_DEFSCLK_PTR)); ps 1366 drivers/gpu/drm/amd/amdgpu/atom.c ps[1] = cpu_to_le32(CU32(hwi + ATOM_FWI_DEFMCLK_PTR)); ps 1367 drivers/gpu/drm/amd/amdgpu/atom.c if (!ps[0] || !ps[1]) ps 1372 drivers/gpu/drm/amd/amdgpu/atom.c ret = amdgpu_atom_execute_table(ctx, ATOM_CMD_INIT, ps); ps 1376 drivers/gpu/drm/amd/amdgpu/atom.c memset(ps, 0, 64); ps 372 drivers/gpu/drm/amd/amdgpu/kv_dpm.c struct kv_ps *ps = rps->ps_priv; ps 374 drivers/gpu/drm/amd/amdgpu/kv_dpm.c return ps; ps 2208 drivers/gpu/drm/amd/amdgpu/kv_dpm.c struct kv_ps *ps = kv_get_ps(new_rps); ps 2252 drivers/gpu/drm/amd/amdgpu/kv_dpm.c ps->need_dfs_bypass = true; ps 2254 drivers/gpu/drm/amd/amdgpu/kv_dpm.c for (i = 0; i < ps->num_levels; i++) { ps 2255 drivers/gpu/drm/amd/amdgpu/kv_dpm.c if (ps->levels[i].sclk < sclk) ps 2256 drivers/gpu/drm/amd/amdgpu/kv_dpm.c ps->levels[i].sclk = sclk; ps 2260 drivers/gpu/drm/amd/amdgpu/kv_dpm.c for (i = 0; i < ps->num_levels; i++) { ps 2263 drivers/gpu/drm/amd/amdgpu/kv_dpm.c kv_convert_8bit_index_to_voltage(adev, ps->levels[i].vddc_index))) { ps 2265 drivers/gpu/drm/amd/amdgpu/kv_dpm.c ps->levels[i].sclk = table->entries[limit].clk; ps 2272 drivers/gpu/drm/amd/amdgpu/kv_dpm.c for (i = 0; i < ps->num_levels; i++) { ps 2275 drivers/gpu/drm/amd/amdgpu/kv_dpm.c kv_convert_8bit_index_to_voltage(adev, ps->levels[i].vddc_index))) { ps 2277 drivers/gpu/drm/amd/amdgpu/kv_dpm.c ps->levels[i].sclk = table->entries[limit].sclk_frequency; ps 2283 drivers/gpu/drm/amd/amdgpu/kv_dpm.c for (i = 0; i < ps->num_levels; i++) { ps 2284 drivers/gpu/drm/amd/amdgpu/kv_dpm.c ps->levels[i].sclk = stable_p_state_sclk; ps 2298 drivers/gpu/drm/amd/amdgpu/kv_dpm.c ps->dpm0_pg_nb_ps_lo = 0x1; ps 2299 drivers/gpu/drm/amd/amdgpu/kv_dpm.c ps->dpm0_pg_nb_ps_hi = 0x0; ps 2300 drivers/gpu/drm/amd/amdgpu/kv_dpm.c ps->dpmx_nb_ps_lo = 0x1; ps 2301 drivers/gpu/drm/amd/amdgpu/kv_dpm.c ps->dpmx_nb_ps_hi = 0x0; ps 2303 drivers/gpu/drm/amd/amdgpu/kv_dpm.c ps->dpm0_pg_nb_ps_lo = 0x3; ps 2304 drivers/gpu/drm/amd/amdgpu/kv_dpm.c ps->dpm0_pg_nb_ps_hi = 0x0; ps 2305 drivers/gpu/drm/amd/amdgpu/kv_dpm.c ps->dpmx_nb_ps_lo = 0x3; ps 2306 drivers/gpu/drm/amd/amdgpu/kv_dpm.c ps->dpmx_nb_ps_hi = 0x0; ps 2312 drivers/gpu/drm/amd/amdgpu/kv_dpm.c ps->dpm0_pg_nb_ps_lo = force_high ? 0x2 : 0x3; ps 2313 drivers/gpu/drm/amd/amdgpu/kv_dpm.c ps->dpm0_pg_nb_ps_hi = 0x2; ps 2314 drivers/gpu/drm/amd/amdgpu/kv_dpm.c ps->dpmx_nb_ps_lo = force_high ? 0x2 : 0x3; ps 2315 drivers/gpu/drm/amd/amdgpu/kv_dpm.c ps->dpmx_nb_ps_hi = 0x2; ps 2644 drivers/gpu/drm/amd/amdgpu/kv_dpm.c struct kv_ps *ps) ps 2648 drivers/gpu/drm/amd/amdgpu/kv_dpm.c ps->num_levels = 1; ps 2649 drivers/gpu/drm/amd/amdgpu/kv_dpm.c ps->levels[0] = pi->boot_pl; ps 2657 drivers/gpu/drm/amd/amdgpu/kv_dpm.c struct kv_ps *ps = kv_get_ps(rps); ps 2673 drivers/gpu/drm/amd/amdgpu/kv_dpm.c kv_patch_boot_state(adev, ps); ps 2684 drivers/gpu/drm/amd/amdgpu/kv_dpm.c struct kv_ps *ps = kv_get_ps(rps); ps 2685 drivers/gpu/drm/amd/amdgpu/kv_dpm.c struct kv_pl *pl = &ps->levels[index]; ps 2693 drivers/gpu/drm/amd/amdgpu/kv_dpm.c ps->num_levels = index + 1; ps 2716 drivers/gpu/drm/amd/amdgpu/kv_dpm.c struct kv_ps *ps; ps 2735 drivers/gpu/drm/amd/amdgpu/kv_dpm.c adev->pm.dpm.ps = kcalloc(state_array->ucNumEntries, ps 2738 drivers/gpu/drm/amd/amdgpu/kv_dpm.c if (!adev->pm.dpm.ps) ps 2747 drivers/gpu/drm/amd/amdgpu/kv_dpm.c ps = kzalloc(sizeof(struct kv_ps), GFP_KERNEL); ps 2748 drivers/gpu/drm/amd/amdgpu/kv_dpm.c if (ps == NULL) { ps 2749 drivers/gpu/drm/amd/amdgpu/kv_dpm.c kfree(adev->pm.dpm.ps); ps 2752 drivers/gpu/drm/amd/amdgpu/kv_dpm.c adev->pm.dpm.ps[i].ps_priv = ps; ps 2765 drivers/gpu/drm/amd/amdgpu/kv_dpm.c &adev->pm.dpm.ps[i], k, ps 2769 drivers/gpu/drm/amd/amdgpu/kv_dpm.c kv_parse_pplib_non_clock_info(adev, &adev->pm.dpm.ps[i], ps 2896 drivers/gpu/drm/amd/amdgpu/kv_dpm.c struct kv_ps *ps = kv_get_ps(rps); ps 2902 drivers/gpu/drm/amd/amdgpu/kv_dpm.c for (i = 0; i < ps->num_levels; i++) { ps 2903 drivers/gpu/drm/amd/amdgpu/kv_dpm.c struct kv_pl *pl = &ps->levels[i]; ps 2916 drivers/gpu/drm/amd/amdgpu/kv_dpm.c kfree(adev->pm.dpm.ps[i].ps_priv); ps 2918 drivers/gpu/drm/amd/amdgpu/kv_dpm.c kfree(adev->pm.dpm.ps); ps 1975 drivers/gpu/drm/amd/amdgpu/si_dpm.c struct si_ps *ps = aps->ps_priv; ps 1977 drivers/gpu/drm/amd/amdgpu/si_dpm.c return ps; ps 3430 drivers/gpu/drm/amd/amdgpu/si_dpm.c struct si_ps *ps = si_get_ps(rps); ps 3490 drivers/gpu/drm/amd/amdgpu/si_dpm.c for (i = ps->performance_level_count - 2; i >= 0; i--) { ps 3491 drivers/gpu/drm/amd/amdgpu/si_dpm.c if (ps->performance_levels[i].vddc > ps->performance_levels[i+1].vddc) ps 3492 drivers/gpu/drm/amd/amdgpu/si_dpm.c ps->performance_levels[i].vddc = ps->performance_levels[i+1].vddc; ps 3495 drivers/gpu/drm/amd/amdgpu/si_dpm.c for (i = 0; i < ps->performance_level_count; i++) { ps 3496 drivers/gpu/drm/amd/amdgpu/si_dpm.c if (ps->performance_levels[i].mclk > max_limits->mclk) ps 3497 drivers/gpu/drm/amd/amdgpu/si_dpm.c ps->performance_levels[i].mclk = max_limits->mclk; ps 3498 drivers/gpu/drm/amd/amdgpu/si_dpm.c if (ps->performance_levels[i].sclk > max_limits->sclk) ps 3499 drivers/gpu/drm/amd/amdgpu/si_dpm.c ps->performance_levels[i].sclk = max_limits->sclk; ps 3500 drivers/gpu/drm/amd/amdgpu/si_dpm.c if (ps->performance_levels[i].vddc > max_limits->vddc) ps 3501 drivers/gpu/drm/amd/amdgpu/si_dpm.c ps->performance_levels[i].vddc = max_limits->vddc; ps 3502 drivers/gpu/drm/amd/amdgpu/si_dpm.c if (ps->performance_levels[i].vddci > max_limits->vddci) ps 3503 drivers/gpu/drm/amd/amdgpu/si_dpm.c ps->performance_levels[i].vddci = max_limits->vddci; ps 3515 drivers/gpu/drm/amd/amdgpu/si_dpm.c for (i = 0; i < ps->performance_level_count; i++) { ps 3517 drivers/gpu/drm/amd/amdgpu/si_dpm.c if (ps->performance_levels[i].sclk > max_sclk_vddc) ps 3518 drivers/gpu/drm/amd/amdgpu/si_dpm.c ps->performance_levels[i].sclk = max_sclk_vddc; ps 3521 drivers/gpu/drm/amd/amdgpu/si_dpm.c if (ps->performance_levels[i].mclk > max_mclk_vddci) ps 3522 drivers/gpu/drm/amd/amdgpu/si_dpm.c ps->performance_levels[i].mclk = max_mclk_vddci; ps 3525 drivers/gpu/drm/amd/amdgpu/si_dpm.c if (ps->performance_levels[i].mclk > max_mclk_vddc) ps 3526 drivers/gpu/drm/amd/amdgpu/si_dpm.c ps->performance_levels[i].mclk = max_mclk_vddc; ps 3529 drivers/gpu/drm/amd/amdgpu/si_dpm.c if (ps->performance_levels[i].mclk > max_mclk) ps 3530 drivers/gpu/drm/amd/amdgpu/si_dpm.c ps->performance_levels[i].mclk = max_mclk; ps 3533 drivers/gpu/drm/amd/amdgpu/si_dpm.c if (ps->performance_levels[i].sclk > max_sclk) ps 3534 drivers/gpu/drm/amd/amdgpu/si_dpm.c ps->performance_levels[i].sclk = max_sclk; ps 3541 drivers/gpu/drm/amd/amdgpu/si_dpm.c mclk = ps->performance_levels[ps->performance_level_count - 1].mclk; ps 3542 drivers/gpu/drm/amd/amdgpu/si_dpm.c vddci = ps->performance_levels[ps->performance_level_count - 1].vddci; ps 3544 drivers/gpu/drm/amd/amdgpu/si_dpm.c mclk = ps->performance_levels[0].mclk; ps 3545 drivers/gpu/drm/amd/amdgpu/si_dpm.c vddci = ps->performance_levels[0].vddci; ps 3549 drivers/gpu/drm/amd/amdgpu/si_dpm.c sclk = ps->performance_levels[ps->performance_level_count - 1].sclk; ps 3550 drivers/gpu/drm/amd/amdgpu/si_dpm.c vddc = ps->performance_levels[ps->performance_level_count - 1].vddc; ps 3552 drivers/gpu/drm/amd/amdgpu/si_dpm.c sclk = ps->performance_levels[0].sclk; ps 3553 drivers/gpu/drm/amd/amdgpu/si_dpm.c vddc = ps->performance_levels[0].vddc; ps 3564 drivers/gpu/drm/amd/amdgpu/si_dpm.c ps->performance_levels[0].sclk = sclk; ps 3565 drivers/gpu/drm/amd/amdgpu/si_dpm.c ps->performance_levels[0].mclk = mclk; ps 3566 drivers/gpu/drm/amd/amdgpu/si_dpm.c ps->performance_levels[0].vddc = vddc; ps 3567 drivers/gpu/drm/amd/amdgpu/si_dpm.c ps->performance_levels[0].vddci = vddci; ps 3570 drivers/gpu/drm/amd/amdgpu/si_dpm.c sclk = ps->performance_levels[0].sclk; ps 3571 drivers/gpu/drm/amd/amdgpu/si_dpm.c for (i = 1; i < ps->performance_level_count; i++) { ps 3572 drivers/gpu/drm/amd/amdgpu/si_dpm.c if (sclk < ps->performance_levels[i].sclk) ps 3573 drivers/gpu/drm/amd/amdgpu/si_dpm.c sclk = ps->performance_levels[i].sclk; ps 3575 drivers/gpu/drm/amd/amdgpu/si_dpm.c for (i = 0; i < ps->performance_level_count; i++) { ps 3576 drivers/gpu/drm/amd/amdgpu/si_dpm.c ps->performance_levels[i].sclk = sclk; ps 3577 drivers/gpu/drm/amd/amdgpu/si_dpm.c ps->performance_levels[i].vddc = vddc; ps 3580 drivers/gpu/drm/amd/amdgpu/si_dpm.c for (i = 1; i < ps->performance_level_count; i++) { ps 3581 drivers/gpu/drm/amd/amdgpu/si_dpm.c if (ps->performance_levels[i].sclk < ps->performance_levels[i - 1].sclk) ps 3582 drivers/gpu/drm/amd/amdgpu/si_dpm.c ps->performance_levels[i].sclk = ps->performance_levels[i - 1].sclk; ps 3583 drivers/gpu/drm/amd/amdgpu/si_dpm.c if (ps->performance_levels[i].vddc < ps->performance_levels[i - 1].vddc) ps 3584 drivers/gpu/drm/amd/amdgpu/si_dpm.c ps->performance_levels[i].vddc = ps->performance_levels[i - 1].vddc; ps 3589 drivers/gpu/drm/amd/amdgpu/si_dpm.c mclk = ps->performance_levels[0].mclk; ps 3590 drivers/gpu/drm/amd/amdgpu/si_dpm.c for (i = 1; i < ps->performance_level_count; i++) { ps 3591 drivers/gpu/drm/amd/amdgpu/si_dpm.c if (mclk < ps->performance_levels[i].mclk) ps 3592 drivers/gpu/drm/amd/amdgpu/si_dpm.c mclk = ps->performance_levels[i].mclk; ps 3594 drivers/gpu/drm/amd/amdgpu/si_dpm.c for (i = 0; i < ps->performance_level_count; i++) { ps 3595 drivers/gpu/drm/amd/amdgpu/si_dpm.c ps->performance_levels[i].mclk = mclk; ps 3596 drivers/gpu/drm/amd/amdgpu/si_dpm.c ps->performance_levels[i].vddci = vddci; ps 3599 drivers/gpu/drm/amd/amdgpu/si_dpm.c for (i = 1; i < ps->performance_level_count; i++) { ps 3600 drivers/gpu/drm/amd/amdgpu/si_dpm.c if (ps->performance_levels[i].mclk < ps->performance_levels[i - 1].mclk) ps 3601 drivers/gpu/drm/amd/amdgpu/si_dpm.c ps->performance_levels[i].mclk = ps->performance_levels[i - 1].mclk; ps 3602 drivers/gpu/drm/amd/amdgpu/si_dpm.c if (ps->performance_levels[i].vddci < ps->performance_levels[i - 1].vddci) ps 3603 drivers/gpu/drm/amd/amdgpu/si_dpm.c ps->performance_levels[i].vddci = ps->performance_levels[i - 1].vddci; ps 3607 drivers/gpu/drm/amd/amdgpu/si_dpm.c for (i = 0; i < ps->performance_level_count; i++) ps 3609 drivers/gpu/drm/amd/amdgpu/si_dpm.c &ps->performance_levels[i]); ps 3611 drivers/gpu/drm/amd/amdgpu/si_dpm.c for (i = 0; i < ps->performance_level_count; i++) { ps 3612 drivers/gpu/drm/amd/amdgpu/si_dpm.c if (ps->performance_levels[i].vddc < min_vce_voltage) ps 3613 drivers/gpu/drm/amd/amdgpu/si_dpm.c ps->performance_levels[i].vddc = min_vce_voltage; ps 3615 drivers/gpu/drm/amd/amdgpu/si_dpm.c ps->performance_levels[i].sclk, ps 3616 drivers/gpu/drm/amd/amdgpu/si_dpm.c max_limits->vddc, &ps->performance_levels[i].vddc); ps 3618 drivers/gpu/drm/amd/amdgpu/si_dpm.c ps->performance_levels[i].mclk, ps 3619 drivers/gpu/drm/amd/amdgpu/si_dpm.c max_limits->vddci, &ps->performance_levels[i].vddci); ps 3621 drivers/gpu/drm/amd/amdgpu/si_dpm.c ps->performance_levels[i].mclk, ps 3622 drivers/gpu/drm/amd/amdgpu/si_dpm.c max_limits->vddc, &ps->performance_levels[i].vddc); ps 3625 drivers/gpu/drm/amd/amdgpu/si_dpm.c max_limits->vddc, &ps->performance_levels[i].vddc); ps 3628 drivers/gpu/drm/amd/amdgpu/si_dpm.c for (i = 0; i < ps->performance_level_count; i++) { ps 3631 drivers/gpu/drm/amd/amdgpu/si_dpm.c &ps->performance_levels[i].vddc, ps 3632 drivers/gpu/drm/amd/amdgpu/si_dpm.c &ps->performance_levels[i].vddci); ps 3635 drivers/gpu/drm/amd/amdgpu/si_dpm.c ps->dc_compatible = true; ps 3636 drivers/gpu/drm/amd/amdgpu/si_dpm.c for (i = 0; i < ps->performance_level_count; i++) { ps 3637 drivers/gpu/drm/amd/amdgpu/si_dpm.c if (ps->performance_levels[i].vddc > adev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddc) ps 3638 drivers/gpu/drm/amd/amdgpu/si_dpm.c ps->dc_compatible = false; ps 3865 drivers/gpu/drm/amd/amdgpu/si_dpm.c struct si_ps *ps = si_get_ps(rps); ps 3866 drivers/gpu/drm/amd/amdgpu/si_dpm.c u32 levels = ps->performance_level_count; ps 5428 drivers/gpu/drm/amd/amdgpu/si_dpm.c struct si_ps *ps = si_get_ps(amdgpu_state); ps 5432 drivers/gpu/drm/amd/amdgpu/si_dpm.c for (i = 0; i < ps->performance_level_count - 1; i++) ps 5435 drivers/gpu/drm/amd/amdgpu/si_dpm.c smc_state->levels[ps->performance_level_count - 1].bSP = ps 7139 drivers/gpu/drm/amd/amdgpu/si_dpm.c struct si_ps *ps = si_get_ps(rps); ps 7141 drivers/gpu/drm/amd/amdgpu/si_dpm.c struct rv7xx_pl *pl = &ps->performance_levels[index]; ps 7144 drivers/gpu/drm/amd/amdgpu/si_dpm.c ps->performance_level_count = index + 1; ps 7228 drivers/gpu/drm/amd/amdgpu/si_dpm.c struct si_ps *ps; ps 7247 drivers/gpu/drm/amd/amdgpu/si_dpm.c adev->pm.dpm.ps = kcalloc(state_array->ucNumEntries, ps 7250 drivers/gpu/drm/amd/amdgpu/si_dpm.c if (!adev->pm.dpm.ps) ps 7259 drivers/gpu/drm/amd/amdgpu/si_dpm.c ps = kzalloc(sizeof(struct si_ps), GFP_KERNEL); ps 7260 drivers/gpu/drm/amd/amdgpu/si_dpm.c if (ps == NULL) { ps 7261 drivers/gpu/drm/amd/amdgpu/si_dpm.c kfree(adev->pm.dpm.ps); ps 7264 drivers/gpu/drm/amd/amdgpu/si_dpm.c adev->pm.dpm.ps[i].ps_priv = ps; ps 7265 drivers/gpu/drm/amd/amdgpu/si_dpm.c si_parse_pplib_non_clock_info(adev, &adev->pm.dpm.ps[i], ps 7280 drivers/gpu/drm/amd/amdgpu/si_dpm.c &adev->pm.dpm.ps[i], k, ps 7473 drivers/gpu/drm/amd/amdgpu/si_dpm.c if (adev->pm.dpm.ps) ps 7475 drivers/gpu/drm/amd/amdgpu/si_dpm.c kfree(adev->pm.dpm.ps[i].ps_priv); ps 7476 drivers/gpu/drm/amd/amdgpu/si_dpm.c kfree(adev->pm.dpm.ps); ps 7488 drivers/gpu/drm/amd/amdgpu/si_dpm.c struct si_ps *ps = si_get_ps(rps); ps 7494 drivers/gpu/drm/amd/amdgpu/si_dpm.c if (current_index >= ps->performance_level_count) { ps 7497 drivers/gpu/drm/amd/amdgpu/si_dpm.c pl = &ps->performance_levels[current_index]; ps 7899 drivers/gpu/drm/amd/amdgpu/si_dpm.c struct si_ps *ps = si_get_ps(rps); ps 7906 drivers/gpu/drm/amd/amdgpu/si_dpm.c for (i = 0; i < ps->performance_level_count; i++) { ps 7907 drivers/gpu/drm/amd/amdgpu/si_dpm.c pl = &ps->performance_levels[i]; ps 7989 drivers/gpu/drm/amd/amdgpu/si_dpm.c struct si_ps *ps = si_get_ps(rps); ps 8001 drivers/gpu/drm/amd/amdgpu/si_dpm.c if (pl_index < ps->performance_level_count) { ps 8002 drivers/gpu/drm/amd/amdgpu/si_dpm.c sclk = ps->performance_levels[pl_index].sclk; ps 8009 drivers/gpu/drm/amd/amdgpu/si_dpm.c if (pl_index < ps->performance_level_count) { ps 8010 drivers/gpu/drm/amd/amdgpu/si_dpm.c mclk = ps->performance_levels[pl_index].mclk; ps 200 drivers/gpu/drm/amd/display/dc/bios/command_table2.c struct dig_transmitter_control_ps_allocation_v1_6 ps = { { 0 } }; ps 202 drivers/gpu/drm/amd/display/dc/bios/command_table2.c ps.param.phyid = cmd->phy_id_to_atom(cntl->transmitter); ps 203 drivers/gpu/drm/amd/display/dc/bios/command_table2.c ps.param.action = (uint8_t)cntl->action; ps 206 drivers/gpu/drm/amd/display/dc/bios/command_table2.c ps.param.mode_laneset.dplaneset = (uint8_t)cntl->lane_settings; ps 208 drivers/gpu/drm/amd/display/dc/bios/command_table2.c ps.param.mode_laneset.digmode = ps 211 drivers/gpu/drm/amd/display/dc/bios/command_table2.c ps.param.lanenum = (uint8_t)cntl->lanes_number; ps 212 drivers/gpu/drm/amd/display/dc/bios/command_table2.c ps.param.hpdsel = cmd->hpd_sel_to_atom(cntl->hpd_sel); ps 213 drivers/gpu/drm/amd/display/dc/bios/command_table2.c ps.param.digfe_sel = cmd->dig_encoder_sel_to_atom(cntl->engine_id); ps 214 drivers/gpu/drm/amd/display/dc/bios/command_table2.c ps.param.connobj_id = (uint8_t)cntl->connector_obj_id.id; ps 215 drivers/gpu/drm/amd/display/dc/bios/command_table2.c ps.param.symclk_10khz = cntl->pixel_clock/10; ps 222 drivers/gpu/drm/amd/display/dc/bios/command_table2.c __func__, ps.param.symclk_10khz); ps 227 drivers/gpu/drm/amd/display/dc/bios/command_table2.c if (EXEC_BIOS_CMD_TABLE(dig1transmittercontrol, ps)) ps 596 drivers/gpu/drm/amd/display/dc/bios/command_table2.c struct enable_disp_power_gating_ps_allocation ps = { { 0 } }; ps 600 drivers/gpu/drm/amd/display/dc/bios/command_table2.c ps.param.disp_pipe_id = atom_crtc_id; ps 604 drivers/gpu/drm/amd/display/dc/bios/command_table2.c ps.param.enable = ps 607 drivers/gpu/drm/amd/display/dc/bios/command_table2.c if (EXEC_BIOS_CMD_TABLE(enabledisppowergating, ps.param)) ps 234 drivers/gpu/drm/amd/include/kgd_pp_interface.h void (*print_power_state)(void *handle, void *ps); ps 614 drivers/gpu/drm/amd/powerplay/amd_powerplay.c if (!hwmgr || !hwmgr->pm_en ||!hwmgr->ps) ps 623 drivers/gpu/drm/amd/powerplay/amd_powerplay.c ((unsigned long)hwmgr->ps + i * hwmgr->ps_size); ps 53 drivers/gpu/drm/amd/powerplay/hwmgr/pp_psm.c hwmgr->ps = kcalloc(table_entries, size, GFP_KERNEL); ps 54 drivers/gpu/drm/amd/powerplay/hwmgr/pp_psm.c if (hwmgr->ps == NULL) ps 59 drivers/gpu/drm/amd/powerplay/hwmgr/pp_psm.c kfree(hwmgr->ps); ps 60 drivers/gpu/drm/amd/powerplay/hwmgr/pp_psm.c hwmgr->ps = NULL; ps 67 drivers/gpu/drm/amd/powerplay/hwmgr/pp_psm.c kfree(hwmgr->ps); ps 69 drivers/gpu/drm/amd/powerplay/hwmgr/pp_psm.c hwmgr->ps = NULL; ps 73 drivers/gpu/drm/amd/powerplay/hwmgr/pp_psm.c state = hwmgr->ps; ps 99 drivers/gpu/drm/amd/powerplay/hwmgr/pp_psm.c if (!hwmgr->ps) ps 104 drivers/gpu/drm/amd/powerplay/hwmgr/pp_psm.c kfree(hwmgr->ps); ps 106 drivers/gpu/drm/amd/powerplay/hwmgr/pp_psm.c hwmgr->ps = NULL; ps 120 drivers/gpu/drm/amd/powerplay/hwmgr/pp_psm.c state = hwmgr->ps; ps 141 drivers/gpu/drm/amd/powerplay/hwmgr/pp_psm.c state = hwmgr->ps; ps 161 drivers/gpu/drm/amd/powerplay/hwmgr/pp_psm.c state = hwmgr->ps; ps 178 drivers/gpu/drm/amd/powerplay/hwmgr/pp_psm.c if (!hwmgr->ps) ps 193 drivers/gpu/drm/amd/powerplay/hwmgr/pp_psm.c if (!hwmgr->ps) ps 210 drivers/gpu/drm/amd/powerplay/hwmgr/pp_psm.c if (!hwmgr->ps) ps 214 drivers/gpu/drm/amd/powerplay/hwmgr/pp_psm.c *state = hwmgr->ps; ps 268 drivers/gpu/drm/amd/powerplay/hwmgr/pp_psm.c if (hwmgr->ps) ps 679 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c struct pp_power_state *ps, ps 685 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c ps->classification.ui_label = (le16_to_cpu(pnon_clock_info->usClassification) & ps 687 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c ps->classification.flags = make_classification_flags(hwmgr, ps 691 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c ps->classification.temporary_state = false; ps 692 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c ps->classification.to_be_deleted = false; ps 696 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c ps->validation.singleDisplayOnly = (0 != tmp); ps 701 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c ps->validation.disallowOnDC = (0 != tmp); ps 703 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c ps->pcie.lanes = ((le32_to_cpu(pnon_clock_info->ulCapsAndSettings) & ps 707 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c ps->pcie.lanes = 0; ps 709 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c ps->display.disableFrameModulation = false; ps 719 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c ps->display.refreshrateSource = PP_RefreshrateSource_Explicit; ps 720 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c ps->display.explicitRefreshrate = look_up[rrr_index]; ps 721 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c ps->display.limitRefreshrate = true; ps 723 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c if (ps->display.explicitRefreshrate == 0) ps 724 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c ps->display.limitRefreshrate = false; ps 726 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c ps->display.limitRefreshrate = false; ps 731 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c ps->display.enableVariBright = (0 != tmp); ps 736 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c ps->memory.dllOff = (0 != tmp); ps 738 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c ps->memory.m3arb = (le32_to_cpu(pnon_clock_info->ulCapsAndSettings) & ps 741 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c ps->temperatures.min = PP_TEMPERATURE_UNITS_PER_CENTIGRADES * ps 744 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c ps->temperatures.max = PP_TEMPERATURE_UNITS_PER_CENTIGRADES * ps 750 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c ps->software.disableLoadBalancing = tmp; ps 755 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c ps->software.enableSleepForTimestamps = (0 != tmp); ps 757 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c ps->validation.supportedPowerLevels = pnon_clock_info->ucRequiredPower; ps 760 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c ps->uvd_clocks.VCLK = le32_to_cpu(pnon_clock_info->ulVCLK); ps 761 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c ps->uvd_clocks.DCLK = le32_to_cpu(pnon_clock_info->ulDCLK); ps 763 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c ps->uvd_clocks.VCLK = 0; ps 764 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c ps->uvd_clocks.DCLK = 0; ps 887 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c struct pp_power_state *ps, ps 907 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c ps->classification.bios_index = entry_index; ps 926 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c result = init_non_clock_fields(hwmgr, ps, pnon_clock_arrays->ucEntrySize, pnon_clock_info); ps 932 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c res = func(hwmgr, &ps->hardware, i, pclock_info); ps 949 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c result = init_non_clock_fields(hwmgr, ps, ps 959 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c int res = func(hwmgr, &ps->hardware, i, pclock_info); ps 966 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c if ((0 == result) && (0 != (ps->classification.flags & PP_StateClassificationFlag_Boot))) { ps 968 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.c result = hwmgr->hwmgr_func->patch_boot_state(hwmgr, &(ps->hardware)); ps 44 drivers/gpu/drm/amd/powerplay/hwmgr/processpptables.h struct pp_power_state *ps, ps 779 drivers/gpu/drm/amd/powerplay/hwmgr/smu10_hwmgr.c unsigned long entry, struct pp_power_state *ps) ps 784 drivers/gpu/drm/amd/powerplay/hwmgr/smu10_hwmgr.c ps->hardware.magic = SMU10_Magic; ps 786 drivers/gpu/drm/amd/powerplay/hwmgr/smu10_hwmgr.c smu10_ps = cast_smu10_ps(&(ps->hardware)); ps 788 drivers/gpu/drm/amd/powerplay/hwmgr/smu10_hwmgr.c result = pp_tables_get_entry(hwmgr, entry, ps, ps 791 drivers/gpu/drm/amd/powerplay/hwmgr/smu10_hwmgr.c smu10_ps->uvd_clocks.vclk = ps->uvd_clocks.VCLK; ps 792 drivers/gpu/drm/amd/powerplay/hwmgr/smu10_hwmgr.c smu10_ps->uvd_clocks.dclk = ps->uvd_clocks.DCLK; ps 959 drivers/gpu/drm/amd/powerplay/hwmgr/smu10_hwmgr.c const struct smu10_power_state *ps = cast_const_smu10_ps(state); ps 961 drivers/gpu/drm/amd/powerplay/hwmgr/smu10_hwmgr.c clock_info->min_eng_clk = ps->levels[0].engine_clock / (1 << (ps->levels[0].ss_divider_index)); ps 962 drivers/gpu/drm/amd/powerplay/hwmgr/smu10_hwmgr.c clock_info->max_eng_clk = ps->levels[ps->level - 1].engine_clock / (1 << (ps->levels[ps->level - 1].ss_divider_index)); ps 3016 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c struct pp_power_state *ps; ps 3022 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps = hwmgr->request_ps; ps 3024 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c if (ps == NULL) ps 3027 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c smu7_ps = cast_phw_smu7_power_state(&ps->hardware); ps 3038 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c struct pp_power_state *ps; ps 3044 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps = hwmgr->request_ps; ps 3046 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c if (ps == NULL) ps 3049 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c smu7_ps = cast_phw_smu7_power_state(&ps->hardware); ps 3062 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c struct smu7_power_state *ps = (struct smu7_power_state *)hw_ps; ps 3095 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[0].memory_clock = data->vbios_boot_state.mclk_bootup_value; ps 3096 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[0].engine_clock = data->vbios_boot_state.sclk_bootup_value; ps 3097 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[0].pcie_gen = data->vbios_boot_state.pcie_gen_bootup_value; ps 3098 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[0].pcie_lane = data->vbios_boot_state.pcie_lane_bootup_value; ps 3220 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c struct smu7_power_state *ps; ps 3229 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps = (struct smu7_power_state *)(&state->hardware); ps 3251 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->dc_compatible = true; ps 3254 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c data->acpi_pcie_gen = ps->performance_levels[0].pcie_gen; ps 3256 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->uvd_clks.vclk = state->uvd_clocks.VCLK; ps 3257 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->uvd_clks.dclk = state->uvd_clocks.DCLK; ps 3265 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c for (i = 0; i < ps->performance_level_count; i++) { ps 3267 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[i].pcie_gen) ps 3269 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[i].pcie_gen; ps 3272 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[i].pcie_gen) ps 3274 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[i].pcie_gen; ps 3277 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[i].pcie_lane) ps 3279 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[i].pcie_lane; ps 3281 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[i].pcie_lane) ps 3283 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[i].pcie_lane; ps 3289 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c for (i = 0; i < ps->performance_level_count; i++) { ps 3291 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[i].pcie_gen) ps 3293 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[i].pcie_gen; ps 3296 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[i].pcie_gen) ps 3298 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[i].pcie_gen; ps 3301 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[i].pcie_lane) ps 3303 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[i].pcie_lane; ps 3306 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[i].pcie_lane) ps 3308 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[i].pcie_lane; ps 3323 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c struct smu7_power_state *ps = cast_phw_smu7_power_state(power_state); ps 3336 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c (ps->performance_level_count < smum_get_mac_definition(hwmgr, SMU_MAX_LEVELS_GRAPHICS)), ps 3341 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c (ps->performance_level_count < ps 3346 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c performance_level = &(ps->performance_levels ps 3347 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c [ps->performance_level_count++]); ps 3365 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c struct smu7_power_state *ps; ps 3374 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps = (struct smu7_power_state *)(&state->hardware); ps 3399 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->dc_compatible = true; ps 3402 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c data->acpi_pcie_gen = ps->performance_levels[0].pcie_gen; ps 3404 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->uvd_clks.vclk = state->uvd_clocks.VCLK; ps 3405 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->uvd_clks.dclk = state->uvd_clocks.DCLK; ps 3414 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c for (i = 0; i < ps->performance_level_count; i++) { ps 3416 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[i].pcie_gen) ps 3418 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[i].pcie_gen; ps 3421 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[i].pcie_gen) ps 3423 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[i].pcie_gen; ps 3426 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[i].pcie_lane) ps 3428 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[i].pcie_lane; ps 3431 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[i].pcie_lane) ps 3433 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[i].pcie_lane; ps 3439 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c for (i = 0; i < ps->performance_level_count; i++) { ps 3441 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[i].pcie_gen) ps 3443 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[i].pcie_gen; ps 3446 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[i].pcie_gen) ps 3448 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[i].pcie_gen; ps 3451 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[i].pcie_lane) ps 3453 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[i].pcie_lane; ps 3456 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[i].pcie_lane) ps 3458 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_levels[i].pcie_lane; ps 4587 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c struct pp_power_state *ps; ps 4593 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps = hwmgr->request_ps; ps 4595 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c if (ps == NULL) ps 4598 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c smu7_ps = cast_phw_smu7_power_state(&ps->hardware); ps 4629 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c struct pp_power_state *ps; ps 4635 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps = hwmgr->request_ps; ps 4637 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c if (ps == NULL) ps 4640 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c smu7_ps = cast_phw_smu7_power_state(&ps->hardware); ps 5076 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c const struct smu7_power_state *ps; ps 5084 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps = cast_const_phw_smu7_power_state(state); ps 5086 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c i = index > ps->performance_level_count - 1 ? ps 5087 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c ps->performance_level_count - 1 : index; ps 5089 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c level->coreClock = ps->performance_levels[i].engine_clock; ps 5090 drivers/gpu/drm/amd/powerplay/hwmgr/smu7_hwmgr.c level->memory_clock = ps->performance_levels[i].memory_clock; ps 1300 drivers/gpu/drm/amd/powerplay/hwmgr/smu8_hwmgr.c struct pp_power_state *ps; ps 1306 drivers/gpu/drm/amd/powerplay/hwmgr/smu8_hwmgr.c ps = hwmgr->request_ps; ps 1308 drivers/gpu/drm/amd/powerplay/hwmgr/smu8_hwmgr.c if (ps == NULL) ps 1311 drivers/gpu/drm/amd/powerplay/hwmgr/smu8_hwmgr.c smu8_ps = cast_smu8_power_state(&ps->hardware); ps 1374 drivers/gpu/drm/amd/powerplay/hwmgr/smu8_hwmgr.c unsigned long entry, struct pp_power_state *ps) ps 1379 drivers/gpu/drm/amd/powerplay/hwmgr/smu8_hwmgr.c ps->hardware.magic = smu8_magic; ps 1381 drivers/gpu/drm/amd/powerplay/hwmgr/smu8_hwmgr.c smu8_ps = cast_smu8_power_state(&(ps->hardware)); ps 1383 drivers/gpu/drm/amd/powerplay/hwmgr/smu8_hwmgr.c result = pp_tables_get_entry(hwmgr, entry, ps, ps 1386 drivers/gpu/drm/amd/powerplay/hwmgr/smu8_hwmgr.c smu8_ps->uvd_clocks.vclk = ps->uvd_clocks.VCLK; ps 1387 drivers/gpu/drm/amd/powerplay/hwmgr/smu8_hwmgr.c smu8_ps->uvd_clocks.dclk = ps->uvd_clocks.DCLK; ps 1555 drivers/gpu/drm/amd/powerplay/hwmgr/smu8_hwmgr.c const struct smu8_power_state *ps; ps 1564 drivers/gpu/drm/amd/powerplay/hwmgr/smu8_hwmgr.c ps = cast_const_smu8_power_state(state); ps 1566 drivers/gpu/drm/amd/powerplay/hwmgr/smu8_hwmgr.c level_index = index > ps->level - 1 ? ps->level - 1 : index; ps 1567 drivers/gpu/drm/amd/powerplay/hwmgr/smu8_hwmgr.c level->coreClock = ps->levels[level_index].engineClock; ps 1570 drivers/gpu/drm/amd/powerplay/hwmgr/smu8_hwmgr.c for (i = 1; i < ps->level; i++) { ps 1571 drivers/gpu/drm/amd/powerplay/hwmgr/smu8_hwmgr.c if (ps->levels[i].engineClock > data->dce_slow_sclk_threshold) { ps 1572 drivers/gpu/drm/amd/powerplay/hwmgr/smu8_hwmgr.c level->coreClock = ps->levels[i].engineClock; ps 1583 drivers/gpu/drm/amd/powerplay/hwmgr/smu8_hwmgr.c level->vddc = (smu8_convert_8Bit_index_to_voltage(hwmgr, ps->levels[level_index].vddcIndex) + 2) / 4; ps 1593 drivers/gpu/drm/amd/powerplay/hwmgr/smu8_hwmgr.c const struct smu8_power_state *ps = cast_const_smu8_power_state(state); ps 1595 drivers/gpu/drm/amd/powerplay/hwmgr/smu8_hwmgr.c clock_info->min_eng_clk = ps->levels[0].engineClock / (1 << (ps->levels[0].ssDividerIndex)); ps 1596 drivers/gpu/drm/amd/powerplay/hwmgr/smu8_hwmgr.c clock_info->max_eng_clk = ps->levels[ps->level - 1].engineClock / (1 << (ps->levels[ps->level - 1].ssDividerIndex)); ps 3101 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c struct vega10_power_state *ps; ps 3105 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c ps = cast_phw_vega10_power_state(&state->hardware); ps 3116 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c ps->dc_compatible = true; ps 3118 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c ps->uvd_clks.vclk = state->uvd_clocks.VCLK; ps 3119 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c ps->uvd_clks.dclk = state->uvd_clocks.DCLK; ps 3707 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c struct pp_power_state *ps; ps 3713 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c ps = hwmgr->request_ps; ps 3715 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c if (ps == NULL) ps 3718 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c vega10_ps = cast_phw_vega10_power_state(&ps->hardware); ps 3729 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c struct pp_power_state *ps; ps 3735 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c ps = hwmgr->request_ps; ps 3737 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c if (ps == NULL) ps 3740 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c vega10_ps = cast_phw_vega10_power_state(&ps->hardware); ps 4769 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c struct pp_power_state *ps; ps 4772 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c ps = hwmgr->request_ps; ps 4774 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c if (ps == NULL) ps 4777 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c vega10_ps = cast_phw_vega10_power_state(&ps->hardware); ps 4820 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c struct pp_power_state *ps; ps 4823 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c ps = hwmgr->request_ps; ps 4825 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c if (ps == NULL) ps 4828 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c vega10_ps = cast_phw_vega10_power_state(&ps->hardware); ps 5030 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c struct pp_power_state *ps = hwmgr->request_ps; ps 5040 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c if (!ps) ps 5043 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c vega10_ps = cast_phw_vega10_power_state(&ps->hardware); ps 5061 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c if (!hwmgr->ps) ps 5064 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c ps = (struct pp_power_state *)((unsigned long)(hwmgr->ps) + hwmgr->ps_size * (hwmgr->num_ps - 1)); ps 5065 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c vega10_ps = cast_phw_vega10_power_state(&ps->hardware); ps 5249 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c const struct vega10_power_state *ps; ps 5257 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c ps = cast_const_phw_vega10_power_state(state); ps 5259 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c i = index > ps->performance_level_count - 1 ? ps 5260 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c ps->performance_level_count - 1 : index; ps 5262 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c level->coreClock = ps->performance_levels[i].gfx_clock; ps 5263 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_hwmgr.c level->memory_clock = ps->performance_levels[i].mem_clock; ps 770 drivers/gpu/drm/amd/powerplay/inc/hwmgr.h struct pp_power_state *ps; ps 3131 drivers/gpu/drm/i915/intel_pm.c const struct intel_plane_state *ps = to_intel_plane_state(plane_state); ps 3134 drivers/gpu/drm/i915/intel_pm.c pristate = ps; ps 3136 drivers/gpu/drm/i915/intel_pm.c sprstate = ps; ps 3138 drivers/gpu/drm/i915/intel_pm.c curstate = ps; ps 1043 drivers/gpu/drm/nouveau/dispnv04/crtc.c struct nv04_page_flip_state *ps) ps 1068 drivers/gpu/drm/nouveau/dispnv04/crtc.c if (ps) ps 1069 drivers/gpu/drm/nouveau/dispnv04/crtc.c *ps = *s; ps 2083 drivers/gpu/drm/omapdrm/dss/dispc.c static s32 pixinc(int pixels, u8 ps) ps 2088 drivers/gpu/drm/omapdrm/dss/dispc.c return 1 + (pixels - 1) * ps; ps 2090 drivers/gpu/drm/omapdrm/dss/dispc.c return 1 - (-pixels + 1) * ps; ps 2102 drivers/gpu/drm/omapdrm/dss/dispc.c u8 ps; ps 2104 drivers/gpu/drm/omapdrm/dss/dispc.c ps = color_mode_to_bpp(fourcc) / 8; ps 2126 drivers/gpu/drm/omapdrm/dss/dispc.c *offset0 = field_offset * screen_width * ps; ps 2130 drivers/gpu/drm/omapdrm/dss/dispc.c (fieldmode ? screen_width : 0), ps); ps 2132 drivers/gpu/drm/omapdrm/dss/dispc.c *pix_inc = pixinc(x_predecim, 2 * ps); ps 2134 drivers/gpu/drm/omapdrm/dss/dispc.c *pix_inc = pixinc(x_predecim, ps); ps 61 drivers/gpu/drm/radeon/atom.c uint32_t *ps, *ws; ps 227 drivers/gpu/drm/radeon/atom.c val = get_unaligned_le32((u32 *)&ctx->ps[idx]); ps 500 drivers/gpu/drm/radeon/atom.c ctx->ps[idx] = cpu_to_le32(val); ps 629 drivers/gpu/drm/radeon/atom.c r = atom_execute_table_locked(ctx->ctx, idx, ctx->ps + ctx->ps_shift); ps 1161 drivers/gpu/drm/radeon/atom.c int len, ws, ps, ptr; ps 1171 drivers/gpu/drm/radeon/atom.c ps = CU8(base + ATOM_CT_PS_PTR) & ATOM_CT_PS_MASK; ps 1174 drivers/gpu/drm/radeon/atom.c SDEBUG(">> execute %04X (len %d, WS %d, PS %d)\n", base, len, ws, ps); ps 1177 drivers/gpu/drm/radeon/atom.c ectx.ps_shift = ps / 4; ps 1179 drivers/gpu/drm/radeon/atom.c ectx.ps = params; ps 1196 drivers/gpu/drm/radeon/atom.c base, len, ws, ps, ptr - 1); ps 1330 drivers/gpu/drm/radeon/atom.c uint32_t ps[16]; ps 1333 drivers/gpu/drm/radeon/atom.c memset(ps, 0, 64); ps 1335 drivers/gpu/drm/radeon/atom.c ps[0] = cpu_to_le32(CU32(hwi + ATOM_FWI_DEFSCLK_PTR)); ps 1336 drivers/gpu/drm/radeon/atom.c ps[1] = cpu_to_le32(CU32(hwi + ATOM_FWI_DEFMCLK_PTR)); ps 1337 drivers/gpu/drm/radeon/atom.c if (!ps[0] || !ps[1]) ps 1342 drivers/gpu/drm/radeon/atom.c ret = atom_execute_table(ctx, ATOM_CMD_INIT, ps); ps 1346 drivers/gpu/drm/radeon/atom.c memset(ps, 0, 64); ps 1350 drivers/gpu/drm/radeon/atom.c atom_execute_table(ctx, ATOM_CMD_SPDFANCNTL, ps); ps 2099 drivers/gpu/drm/radeon/btc_dpm.c struct rv7xx_ps *ps = rv770_get_ps(rps); ps 2117 drivers/gpu/drm/radeon/btc_dpm.c if (ps->high.mclk > max_limits->mclk) ps 2118 drivers/gpu/drm/radeon/btc_dpm.c ps->high.mclk = max_limits->mclk; ps 2119 drivers/gpu/drm/radeon/btc_dpm.c if (ps->high.sclk > max_limits->sclk) ps 2120 drivers/gpu/drm/radeon/btc_dpm.c ps->high.sclk = max_limits->sclk; ps 2121 drivers/gpu/drm/radeon/btc_dpm.c if (ps->high.vddc > max_limits->vddc) ps 2122 drivers/gpu/drm/radeon/btc_dpm.c ps->high.vddc = max_limits->vddc; ps 2123 drivers/gpu/drm/radeon/btc_dpm.c if (ps->high.vddci > max_limits->vddci) ps 2124 drivers/gpu/drm/radeon/btc_dpm.c ps->high.vddci = max_limits->vddci; ps 2126 drivers/gpu/drm/radeon/btc_dpm.c if (ps->medium.mclk > max_limits->mclk) ps 2127 drivers/gpu/drm/radeon/btc_dpm.c ps->medium.mclk = max_limits->mclk; ps 2128 drivers/gpu/drm/radeon/btc_dpm.c if (ps->medium.sclk > max_limits->sclk) ps 2129 drivers/gpu/drm/radeon/btc_dpm.c ps->medium.sclk = max_limits->sclk; ps 2130 drivers/gpu/drm/radeon/btc_dpm.c if (ps->medium.vddc > max_limits->vddc) ps 2131 drivers/gpu/drm/radeon/btc_dpm.c ps->medium.vddc = max_limits->vddc; ps 2132 drivers/gpu/drm/radeon/btc_dpm.c if (ps->medium.vddci > max_limits->vddci) ps 2133 drivers/gpu/drm/radeon/btc_dpm.c ps->medium.vddci = max_limits->vddci; ps 2135 drivers/gpu/drm/radeon/btc_dpm.c if (ps->low.mclk > max_limits->mclk) ps 2136 drivers/gpu/drm/radeon/btc_dpm.c ps->low.mclk = max_limits->mclk; ps 2137 drivers/gpu/drm/radeon/btc_dpm.c if (ps->low.sclk > max_limits->sclk) ps 2138 drivers/gpu/drm/radeon/btc_dpm.c ps->low.sclk = max_limits->sclk; ps 2139 drivers/gpu/drm/radeon/btc_dpm.c if (ps->low.vddc > max_limits->vddc) ps 2140 drivers/gpu/drm/radeon/btc_dpm.c ps->low.vddc = max_limits->vddc; ps 2141 drivers/gpu/drm/radeon/btc_dpm.c if (ps->low.vddci > max_limits->vddci) ps 2142 drivers/gpu/drm/radeon/btc_dpm.c ps->low.vddci = max_limits->vddci; ps 2148 drivers/gpu/drm/radeon/btc_dpm.c sclk = ps->low.sclk; ps 2149 drivers/gpu/drm/radeon/btc_dpm.c mclk = ps->high.mclk; ps 2150 drivers/gpu/drm/radeon/btc_dpm.c vddc = ps->low.vddc; ps 2151 drivers/gpu/drm/radeon/btc_dpm.c vddci = ps->high.vddci; ps 2153 drivers/gpu/drm/radeon/btc_dpm.c sclk = ps->low.sclk; ps 2154 drivers/gpu/drm/radeon/btc_dpm.c mclk = ps->low.mclk; ps 2155 drivers/gpu/drm/radeon/btc_dpm.c vddc = ps->low.vddc; ps 2156 drivers/gpu/drm/radeon/btc_dpm.c vddci = ps->low.vddci; ps 2160 drivers/gpu/drm/radeon/btc_dpm.c ps->low.sclk = sclk; ps 2161 drivers/gpu/drm/radeon/btc_dpm.c ps->low.mclk = mclk; ps 2162 drivers/gpu/drm/radeon/btc_dpm.c ps->low.vddc = vddc; ps 2163 drivers/gpu/drm/radeon/btc_dpm.c ps->low.vddci = vddci; ps 2166 drivers/gpu/drm/radeon/btc_dpm.c &ps->low.sclk, &ps->low.mclk); ps 2169 drivers/gpu/drm/radeon/btc_dpm.c if (ps->medium.sclk < ps->low.sclk) ps 2170 drivers/gpu/drm/radeon/btc_dpm.c ps->medium.sclk = ps->low.sclk; ps 2171 drivers/gpu/drm/radeon/btc_dpm.c if (ps->medium.vddc < ps->low.vddc) ps 2172 drivers/gpu/drm/radeon/btc_dpm.c ps->medium.vddc = ps->low.vddc; ps 2173 drivers/gpu/drm/radeon/btc_dpm.c if (ps->high.sclk < ps->medium.sclk) ps 2174 drivers/gpu/drm/radeon/btc_dpm.c ps->high.sclk = ps->medium.sclk; ps 2175 drivers/gpu/drm/radeon/btc_dpm.c if (ps->high.vddc < ps->medium.vddc) ps 2176 drivers/gpu/drm/radeon/btc_dpm.c ps->high.vddc = ps->medium.vddc; ps 2179 drivers/gpu/drm/radeon/btc_dpm.c mclk = ps->low.mclk; ps 2180 drivers/gpu/drm/radeon/btc_dpm.c if (mclk < ps->medium.mclk) ps 2181 drivers/gpu/drm/radeon/btc_dpm.c mclk = ps->medium.mclk; ps 2182 drivers/gpu/drm/radeon/btc_dpm.c if (mclk < ps->high.mclk) ps 2183 drivers/gpu/drm/radeon/btc_dpm.c mclk = ps->high.mclk; ps 2184 drivers/gpu/drm/radeon/btc_dpm.c ps->low.mclk = mclk; ps 2185 drivers/gpu/drm/radeon/btc_dpm.c ps->low.vddci = vddci; ps 2186 drivers/gpu/drm/radeon/btc_dpm.c ps->medium.mclk = mclk; ps 2187 drivers/gpu/drm/radeon/btc_dpm.c ps->medium.vddci = vddci; ps 2188 drivers/gpu/drm/radeon/btc_dpm.c ps->high.mclk = mclk; ps 2189 drivers/gpu/drm/radeon/btc_dpm.c ps->high.vddci = vddci; ps 2191 drivers/gpu/drm/radeon/btc_dpm.c if (ps->medium.mclk < ps->low.mclk) ps 2192 drivers/gpu/drm/radeon/btc_dpm.c ps->medium.mclk = ps->low.mclk; ps 2193 drivers/gpu/drm/radeon/btc_dpm.c if (ps->medium.vddci < ps->low.vddci) ps 2194 drivers/gpu/drm/radeon/btc_dpm.c ps->medium.vddci = ps->low.vddci; ps 2195 drivers/gpu/drm/radeon/btc_dpm.c if (ps->high.mclk < ps->medium.mclk) ps 2196 drivers/gpu/drm/radeon/btc_dpm.c ps->high.mclk = ps->medium.mclk; ps 2197 drivers/gpu/drm/radeon/btc_dpm.c if (ps->high.vddci < ps->medium.vddci) ps 2198 drivers/gpu/drm/radeon/btc_dpm.c ps->high.vddci = ps->medium.vddci; ps 2202 drivers/gpu/drm/radeon/btc_dpm.c &ps->medium.sclk, &ps->medium.mclk); ps 2204 drivers/gpu/drm/radeon/btc_dpm.c &ps->high.sclk, &ps->high.mclk); ps 2206 drivers/gpu/drm/radeon/btc_dpm.c btc_adjust_clock_combinations(rdev, max_limits, &ps->low); ps 2207 drivers/gpu/drm/radeon/btc_dpm.c btc_adjust_clock_combinations(rdev, max_limits, &ps->medium); ps 2208 drivers/gpu/drm/radeon/btc_dpm.c btc_adjust_clock_combinations(rdev, max_limits, &ps->high); ps 2211 drivers/gpu/drm/radeon/btc_dpm.c ps->low.sclk, max_limits->vddc, &ps->low.vddc); ps 2213 drivers/gpu/drm/radeon/btc_dpm.c ps->low.mclk, max_limits->vddci, &ps->low.vddci); ps 2215 drivers/gpu/drm/radeon/btc_dpm.c ps->low.mclk, max_limits->vddc, &ps->low.vddc); ps 2217 drivers/gpu/drm/radeon/btc_dpm.c rdev->clock.current_dispclk, max_limits->vddc, &ps->low.vddc); ps 2220 drivers/gpu/drm/radeon/btc_dpm.c ps->medium.sclk, max_limits->vddc, &ps->medium.vddc); ps 2222 drivers/gpu/drm/radeon/btc_dpm.c ps->medium.mclk, max_limits->vddci, &ps->medium.vddci); ps 2224 drivers/gpu/drm/radeon/btc_dpm.c ps->medium.mclk, max_limits->vddc, &ps->medium.vddc); ps 2226 drivers/gpu/drm/radeon/btc_dpm.c rdev->clock.current_dispclk, max_limits->vddc, &ps->medium.vddc); ps 2229 drivers/gpu/drm/radeon/btc_dpm.c ps->high.sclk, max_limits->vddc, &ps->high.vddc); ps 2231 drivers/gpu/drm/radeon/btc_dpm.c ps->high.mclk, max_limits->vddci, &ps->high.vddci); ps 2233 drivers/gpu/drm/radeon/btc_dpm.c ps->high.mclk, max_limits->vddc, &ps->high.vddc); ps 2235 drivers/gpu/drm/radeon/btc_dpm.c rdev->clock.current_dispclk, max_limits->vddc, &ps->high.vddc); ps 2238 drivers/gpu/drm/radeon/btc_dpm.c &ps->low.vddc, &ps->low.vddci); ps 2240 drivers/gpu/drm/radeon/btc_dpm.c &ps->medium.vddc, &ps->medium.vddci); ps 2242 drivers/gpu/drm/radeon/btc_dpm.c &ps->high.vddc, &ps->high.vddci); ps 2244 drivers/gpu/drm/radeon/btc_dpm.c if ((ps->high.vddc <= rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddc) && ps 2245 drivers/gpu/drm/radeon/btc_dpm.c (ps->medium.vddc <= rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddc) && ps 2246 drivers/gpu/drm/radeon/btc_dpm.c (ps->low.vddc <= rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddc)) ps 2247 drivers/gpu/drm/radeon/btc_dpm.c ps->dc_compatible = true; ps 2249 drivers/gpu/drm/radeon/btc_dpm.c ps->dc_compatible = false; ps 2251 drivers/gpu/drm/radeon/btc_dpm.c if (ps->low.vddc < rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2) ps 2252 drivers/gpu/drm/radeon/btc_dpm.c ps->low.flags &= ~ATOM_PPLIB_R600_FLAGS_PCIEGEN2; ps 2253 drivers/gpu/drm/radeon/btc_dpm.c if (ps->medium.vddc < rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2) ps 2254 drivers/gpu/drm/radeon/btc_dpm.c ps->medium.flags &= ~ATOM_PPLIB_R600_FLAGS_PCIEGEN2; ps 2255 drivers/gpu/drm/radeon/btc_dpm.c if (ps->high.vddc < rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2) ps 2256 drivers/gpu/drm/radeon/btc_dpm.c ps->high.flags &= ~ATOM_PPLIB_R600_FLAGS_PCIEGEN2; ps 2728 drivers/gpu/drm/radeon/btc_dpm.c kfree(rdev->pm.dpm.ps[i].ps_priv); ps 2730 drivers/gpu/drm/radeon/btc_dpm.c kfree(rdev->pm.dpm.ps); ps 2741 drivers/gpu/drm/radeon/btc_dpm.c struct rv7xx_ps *ps = rv770_get_ps(rps); ps 2751 drivers/gpu/drm/radeon/btc_dpm.c pl = &ps->low; ps 2753 drivers/gpu/drm/radeon/btc_dpm.c pl = &ps->medium; ps 2755 drivers/gpu/drm/radeon/btc_dpm.c pl = &ps->high; ps 2766 drivers/gpu/drm/radeon/btc_dpm.c struct rv7xx_ps *ps = rv770_get_ps(rps); ps 2776 drivers/gpu/drm/radeon/btc_dpm.c pl = &ps->low; ps 2778 drivers/gpu/drm/radeon/btc_dpm.c pl = &ps->medium; ps 2780 drivers/gpu/drm/radeon/btc_dpm.c pl = &ps->high; ps 2789 drivers/gpu/drm/radeon/btc_dpm.c struct rv7xx_ps *ps = rv770_get_ps(rps); ps 2799 drivers/gpu/drm/radeon/btc_dpm.c pl = &ps->low; ps 2801 drivers/gpu/drm/radeon/btc_dpm.c pl = &ps->medium; ps 2803 drivers/gpu/drm/radeon/btc_dpm.c pl = &ps->high; ps 205 drivers/gpu/drm/radeon/ci_dpm.c struct ci_ps *ps = rps->ps_priv; ps 207 drivers/gpu/drm/radeon/ci_dpm.c return ps; ps 798 drivers/gpu/drm/radeon/ci_dpm.c struct ci_ps *ps = ci_get_ps(rps); ps 830 drivers/gpu/drm/radeon/ci_dpm.c for (i = 0; i < ps->performance_level_count; i++) { ps 831 drivers/gpu/drm/radeon/ci_dpm.c if (ps->performance_levels[i].mclk > max_limits->mclk) ps 832 drivers/gpu/drm/radeon/ci_dpm.c ps->performance_levels[i].mclk = max_limits->mclk; ps 833 drivers/gpu/drm/radeon/ci_dpm.c if (ps->performance_levels[i].sclk > max_limits->sclk) ps 834 drivers/gpu/drm/radeon/ci_dpm.c ps->performance_levels[i].sclk = max_limits->sclk; ps 841 drivers/gpu/drm/radeon/ci_dpm.c mclk = ps->performance_levels[ps->performance_level_count - 1].mclk; ps 842 drivers/gpu/drm/radeon/ci_dpm.c sclk = ps->performance_levels[0].sclk; ps 844 drivers/gpu/drm/radeon/ci_dpm.c mclk = ps->performance_levels[0].mclk; ps 845 drivers/gpu/drm/radeon/ci_dpm.c sclk = ps->performance_levels[0].sclk; ps 855 drivers/gpu/drm/radeon/ci_dpm.c ps->performance_levels[0].sclk = sclk; ps 856 drivers/gpu/drm/radeon/ci_dpm.c ps->performance_levels[0].mclk = mclk; ps 858 drivers/gpu/drm/radeon/ci_dpm.c if (ps->performance_levels[1].sclk < ps->performance_levels[0].sclk) ps 859 drivers/gpu/drm/radeon/ci_dpm.c ps->performance_levels[1].sclk = ps->performance_levels[0].sclk; ps 862 drivers/gpu/drm/radeon/ci_dpm.c if (ps->performance_levels[0].mclk < ps->performance_levels[1].mclk) ps 863 drivers/gpu/drm/radeon/ci_dpm.c ps->performance_levels[0].mclk = ps->performance_levels[1].mclk; ps 865 drivers/gpu/drm/radeon/ci_dpm.c if (ps->performance_levels[1].mclk < ps->performance_levels[0].mclk) ps 866 drivers/gpu/drm/radeon/ci_dpm.c ps->performance_levels[1].mclk = ps->performance_levels[0].mclk; ps 5479 drivers/gpu/drm/radeon/ci_dpm.c struct ci_ps *ps = ci_get_ps(rps); ps 5480 drivers/gpu/drm/radeon/ci_dpm.c struct ci_pl *pl = &ps->performance_levels[index]; ps 5482 drivers/gpu/drm/radeon/ci_dpm.c ps->performance_level_count = index + 1; ps 5558 drivers/gpu/drm/radeon/ci_dpm.c struct ci_ps *ps; ps 5575 drivers/gpu/drm/radeon/ci_dpm.c rdev->pm.dpm.ps = kcalloc(state_array->ucNumEntries, ps 5578 drivers/gpu/drm/radeon/ci_dpm.c if (!rdev->pm.dpm.ps) ps 5589 drivers/gpu/drm/radeon/ci_dpm.c ps = kzalloc(sizeof(struct ci_ps), GFP_KERNEL); ps 5590 drivers/gpu/drm/radeon/ci_dpm.c if (ps == NULL) { ps 5591 drivers/gpu/drm/radeon/ci_dpm.c kfree(rdev->pm.dpm.ps); ps 5594 drivers/gpu/drm/radeon/ci_dpm.c rdev->pm.dpm.ps[i].ps_priv = ps; ps 5595 drivers/gpu/drm/radeon/ci_dpm.c ci_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i], ps 5610 drivers/gpu/drm/radeon/ci_dpm.c &rdev->pm.dpm.ps[i], k, ps 5667 drivers/gpu/drm/radeon/ci_dpm.c kfree(rdev->pm.dpm.ps[i].ps_priv); ps 5669 drivers/gpu/drm/radeon/ci_dpm.c kfree(rdev->pm.dpm.ps); ps 5959 drivers/gpu/drm/radeon/ci_dpm.c struct ci_ps *ps = ci_get_ps(rps); ps 5966 drivers/gpu/drm/radeon/ci_dpm.c for (i = 0; i < ps->performance_level_count; i++) { ps 5967 drivers/gpu/drm/radeon/ci_dpm.c pl = &ps->performance_levels[i]; ps 2148 drivers/gpu/drm/radeon/cypress_dpm.c kfree(rdev->pm.dpm.ps[i].ps_priv); ps 2150 drivers/gpu/drm/radeon/cypress_dpm.c kfree(rdev->pm.dpm.ps); ps 1628 drivers/gpu/drm/radeon/evergreen.c struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx]; ps 1629 drivers/gpu/drm/radeon/evergreen.c struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage; ps 246 drivers/gpu/drm/radeon/kv_dpm.c struct kv_ps *ps = rps->ps_priv; ps 248 drivers/gpu/drm/radeon/kv_dpm.c return ps; ps 2143 drivers/gpu/drm/radeon/kv_dpm.c struct kv_ps *ps = kv_get_ps(new_rps); ps 2187 drivers/gpu/drm/radeon/kv_dpm.c ps->need_dfs_bypass = true; ps 2189 drivers/gpu/drm/radeon/kv_dpm.c for (i = 0; i < ps->num_levels; i++) { ps 2190 drivers/gpu/drm/radeon/kv_dpm.c if (ps->levels[i].sclk < sclk) ps 2191 drivers/gpu/drm/radeon/kv_dpm.c ps->levels[i].sclk = sclk; ps 2195 drivers/gpu/drm/radeon/kv_dpm.c for (i = 0; i < ps->num_levels; i++) { ps 2198 drivers/gpu/drm/radeon/kv_dpm.c kv_convert_8bit_index_to_voltage(rdev, ps->levels[i].vddc_index))) { ps 2200 drivers/gpu/drm/radeon/kv_dpm.c ps->levels[i].sclk = table->entries[limit].clk; ps 2207 drivers/gpu/drm/radeon/kv_dpm.c for (i = 0; i < ps->num_levels; i++) { ps 2210 drivers/gpu/drm/radeon/kv_dpm.c kv_convert_8bit_index_to_voltage(rdev, ps->levels[i].vddc_index))) { ps 2212 drivers/gpu/drm/radeon/kv_dpm.c ps->levels[i].sclk = table->entries[limit].sclk_frequency; ps 2218 drivers/gpu/drm/radeon/kv_dpm.c for (i = 0; i < ps->num_levels; i++) { ps 2219 drivers/gpu/drm/radeon/kv_dpm.c ps->levels[i].sclk = stable_p_state_sclk; ps 2233 drivers/gpu/drm/radeon/kv_dpm.c ps->dpm0_pg_nb_ps_lo = 0x1; ps 2234 drivers/gpu/drm/radeon/kv_dpm.c ps->dpm0_pg_nb_ps_hi = 0x0; ps 2235 drivers/gpu/drm/radeon/kv_dpm.c ps->dpmx_nb_ps_lo = 0x1; ps 2236 drivers/gpu/drm/radeon/kv_dpm.c ps->dpmx_nb_ps_hi = 0x0; ps 2238 drivers/gpu/drm/radeon/kv_dpm.c ps->dpm0_pg_nb_ps_lo = 0x3; ps 2239 drivers/gpu/drm/radeon/kv_dpm.c ps->dpm0_pg_nb_ps_hi = 0x0; ps 2240 drivers/gpu/drm/radeon/kv_dpm.c ps->dpmx_nb_ps_lo = 0x3; ps 2241 drivers/gpu/drm/radeon/kv_dpm.c ps->dpmx_nb_ps_hi = 0x0; ps 2247 drivers/gpu/drm/radeon/kv_dpm.c ps->dpm0_pg_nb_ps_lo = force_high ? 0x2 : 0x3; ps 2248 drivers/gpu/drm/radeon/kv_dpm.c ps->dpm0_pg_nb_ps_hi = 0x2; ps 2249 drivers/gpu/drm/radeon/kv_dpm.c ps->dpmx_nb_ps_lo = force_high ? 0x2 : 0x3; ps 2250 drivers/gpu/drm/radeon/kv_dpm.c ps->dpmx_nb_ps_hi = 0x2; ps 2576 drivers/gpu/drm/radeon/kv_dpm.c struct kv_ps *ps) ps 2580 drivers/gpu/drm/radeon/kv_dpm.c ps->num_levels = 1; ps 2581 drivers/gpu/drm/radeon/kv_dpm.c ps->levels[0] = pi->boot_pl; ps 2589 drivers/gpu/drm/radeon/kv_dpm.c struct kv_ps *ps = kv_get_ps(rps); ps 2605 drivers/gpu/drm/radeon/kv_dpm.c kv_patch_boot_state(rdev, ps); ps 2616 drivers/gpu/drm/radeon/kv_dpm.c struct kv_ps *ps = kv_get_ps(rps); ps 2617 drivers/gpu/drm/radeon/kv_dpm.c struct kv_pl *pl = &ps->levels[index]; ps 2625 drivers/gpu/drm/radeon/kv_dpm.c ps->num_levels = index + 1; ps 2648 drivers/gpu/drm/radeon/kv_dpm.c struct kv_ps *ps; ps 2665 drivers/gpu/drm/radeon/kv_dpm.c rdev->pm.dpm.ps = kcalloc(state_array->ucNumEntries, ps 2668 drivers/gpu/drm/radeon/kv_dpm.c if (!rdev->pm.dpm.ps) ps 2679 drivers/gpu/drm/radeon/kv_dpm.c ps = kzalloc(sizeof(struct kv_ps), GFP_KERNEL); ps 2680 drivers/gpu/drm/radeon/kv_dpm.c if (ps == NULL) { ps 2681 drivers/gpu/drm/radeon/kv_dpm.c kfree(rdev->pm.dpm.ps); ps 2684 drivers/gpu/drm/radeon/kv_dpm.c rdev->pm.dpm.ps[i].ps_priv = ps; ps 2697 drivers/gpu/drm/radeon/kv_dpm.c &rdev->pm.dpm.ps[i], k, ps 2701 drivers/gpu/drm/radeon/kv_dpm.c kv_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i], ps 2853 drivers/gpu/drm/radeon/kv_dpm.c struct kv_ps *ps = kv_get_ps(rps); ps 2858 drivers/gpu/drm/radeon/kv_dpm.c for (i = 0; i < ps->num_levels; i++) { ps 2859 drivers/gpu/drm/radeon/kv_dpm.c struct kv_pl *pl = &ps->levels[i]; ps 2872 drivers/gpu/drm/radeon/kv_dpm.c kfree(rdev->pm.dpm.ps[i].ps_priv); ps 2874 drivers/gpu/drm/radeon/kv_dpm.c kfree(rdev->pm.dpm.ps); ps 737 drivers/gpu/drm/radeon/ni_dpm.c struct ni_ps *ps = rps->ps_priv; ps 739 drivers/gpu/drm/radeon/ni_dpm.c return ps; ps 790 drivers/gpu/drm/radeon/ni_dpm.c struct ni_ps *ps = ni_get_ps(rps); ps 809 drivers/gpu/drm/radeon/ni_dpm.c for (i = 0; i < ps->performance_level_count; i++) { ps 810 drivers/gpu/drm/radeon/ni_dpm.c if (ps->performance_levels[i].mclk > max_limits->mclk) ps 811 drivers/gpu/drm/radeon/ni_dpm.c ps->performance_levels[i].mclk = max_limits->mclk; ps 812 drivers/gpu/drm/radeon/ni_dpm.c if (ps->performance_levels[i].sclk > max_limits->sclk) ps 813 drivers/gpu/drm/radeon/ni_dpm.c ps->performance_levels[i].sclk = max_limits->sclk; ps 814 drivers/gpu/drm/radeon/ni_dpm.c if (ps->performance_levels[i].vddc > max_limits->vddc) ps 815 drivers/gpu/drm/radeon/ni_dpm.c ps->performance_levels[i].vddc = max_limits->vddc; ps 816 drivers/gpu/drm/radeon/ni_dpm.c if (ps->performance_levels[i].vddci > max_limits->vddci) ps 817 drivers/gpu/drm/radeon/ni_dpm.c ps->performance_levels[i].vddci = max_limits->vddci; ps 825 drivers/gpu/drm/radeon/ni_dpm.c ps->performance_levels[0].mclk = ps 826 drivers/gpu/drm/radeon/ni_dpm.c ps->performance_levels[ps->performance_level_count - 1].mclk; ps 827 drivers/gpu/drm/radeon/ni_dpm.c ps->performance_levels[0].vddci = ps 828 drivers/gpu/drm/radeon/ni_dpm.c ps->performance_levels[ps->performance_level_count - 1].vddci; ps 832 drivers/gpu/drm/radeon/ni_dpm.c &ps->performance_levels[0].sclk, ps 833 drivers/gpu/drm/radeon/ni_dpm.c &ps->performance_levels[0].mclk); ps 835 drivers/gpu/drm/radeon/ni_dpm.c for (i = 1; i < ps->performance_level_count; i++) { ps 836 drivers/gpu/drm/radeon/ni_dpm.c if (ps->performance_levels[i].sclk < ps->performance_levels[i - 1].sclk) ps 837 drivers/gpu/drm/radeon/ni_dpm.c ps->performance_levels[i].sclk = ps->performance_levels[i - 1].sclk; ps 838 drivers/gpu/drm/radeon/ni_dpm.c if (ps->performance_levels[i].vddc < ps->performance_levels[i - 1].vddc) ps 839 drivers/gpu/drm/radeon/ni_dpm.c ps->performance_levels[i].vddc = ps->performance_levels[i - 1].vddc; ps 844 drivers/gpu/drm/radeon/ni_dpm.c mclk = ps->performance_levels[0].mclk; ps 845 drivers/gpu/drm/radeon/ni_dpm.c vddci = ps->performance_levels[0].vddci; ps 846 drivers/gpu/drm/radeon/ni_dpm.c for (i = 1; i < ps->performance_level_count; i++) { ps 847 drivers/gpu/drm/radeon/ni_dpm.c if (mclk < ps->performance_levels[i].mclk) ps 848 drivers/gpu/drm/radeon/ni_dpm.c mclk = ps->performance_levels[i].mclk; ps 849 drivers/gpu/drm/radeon/ni_dpm.c if (vddci < ps->performance_levels[i].vddci) ps 850 drivers/gpu/drm/radeon/ni_dpm.c vddci = ps->performance_levels[i].vddci; ps 852 drivers/gpu/drm/radeon/ni_dpm.c for (i = 0; i < ps->performance_level_count; i++) { ps 853 drivers/gpu/drm/radeon/ni_dpm.c ps->performance_levels[i].mclk = mclk; ps 854 drivers/gpu/drm/radeon/ni_dpm.c ps->performance_levels[i].vddci = vddci; ps 857 drivers/gpu/drm/radeon/ni_dpm.c for (i = 1; i < ps->performance_level_count; i++) { ps 858 drivers/gpu/drm/radeon/ni_dpm.c if (ps->performance_levels[i].mclk < ps->performance_levels[i - 1].mclk) ps 859 drivers/gpu/drm/radeon/ni_dpm.c ps->performance_levels[i].mclk = ps->performance_levels[i - 1].mclk; ps 860 drivers/gpu/drm/radeon/ni_dpm.c if (ps->performance_levels[i].vddci < ps->performance_levels[i - 1].vddci) ps 861 drivers/gpu/drm/radeon/ni_dpm.c ps->performance_levels[i].vddci = ps->performance_levels[i - 1].vddci; ps 865 drivers/gpu/drm/radeon/ni_dpm.c for (i = 1; i < ps->performance_level_count; i++) ps 867 drivers/gpu/drm/radeon/ni_dpm.c &ps->performance_levels[i].sclk, ps 868 drivers/gpu/drm/radeon/ni_dpm.c &ps->performance_levels[i].mclk); ps 870 drivers/gpu/drm/radeon/ni_dpm.c for (i = 0; i < ps->performance_level_count; i++) ps 872 drivers/gpu/drm/radeon/ni_dpm.c &ps->performance_levels[i]); ps 874 drivers/gpu/drm/radeon/ni_dpm.c for (i = 0; i < ps->performance_level_count; i++) { ps 876 drivers/gpu/drm/radeon/ni_dpm.c ps->performance_levels[i].sclk, ps 877 drivers/gpu/drm/radeon/ni_dpm.c max_limits->vddc, &ps->performance_levels[i].vddc); ps 879 drivers/gpu/drm/radeon/ni_dpm.c ps->performance_levels[i].mclk, ps 880 drivers/gpu/drm/radeon/ni_dpm.c max_limits->vddci, &ps->performance_levels[i].vddci); ps 882 drivers/gpu/drm/radeon/ni_dpm.c ps->performance_levels[i].mclk, ps 883 drivers/gpu/drm/radeon/ni_dpm.c max_limits->vddc, &ps->performance_levels[i].vddc); ps 886 drivers/gpu/drm/radeon/ni_dpm.c max_limits->vddc, &ps->performance_levels[i].vddc); ps 889 drivers/gpu/drm/radeon/ni_dpm.c for (i = 0; i < ps->performance_level_count; i++) { ps 892 drivers/gpu/drm/radeon/ni_dpm.c &ps->performance_levels[i].vddc, ps 893 drivers/gpu/drm/radeon/ni_dpm.c &ps->performance_levels[i].vddci); ps 896 drivers/gpu/drm/radeon/ni_dpm.c ps->dc_compatible = true; ps 897 drivers/gpu/drm/radeon/ni_dpm.c for (i = 0; i < ps->performance_level_count; i++) { ps 898 drivers/gpu/drm/radeon/ni_dpm.c if (ps->performance_levels[i].vddc > rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddc) ps 899 drivers/gpu/drm/radeon/ni_dpm.c ps->dc_compatible = false; ps 901 drivers/gpu/drm/radeon/ni_dpm.c if (ps->performance_levels[i].vddc < rdev->pm.dpm.dyn_state.min_vddc_for_pcie_gen2) ps 902 drivers/gpu/drm/radeon/ni_dpm.c ps->performance_levels[i].flags &= ~ATOM_PPLIB_R600_FLAGS_PCIEGEN2; ps 909 drivers/gpu/drm/radeon/ni_dpm.c const u32 *ps = NULL; ps 911 drivers/gpu/drm/radeon/ni_dpm.c ps = (const u32 *)&cayman_cgcg_cgls_default; ps 914 drivers/gpu/drm/radeon/ni_dpm.c btc_program_mgcg_hw_sequence(rdev, ps, count); ps 921 drivers/gpu/drm/radeon/ni_dpm.c const u32 *ps = NULL; ps 924 drivers/gpu/drm/radeon/ni_dpm.c ps = (const u32 *)&cayman_cgcg_cgls_enable; ps 927 drivers/gpu/drm/radeon/ni_dpm.c ps = (const u32 *)&cayman_cgcg_cgls_disable; ps 931 drivers/gpu/drm/radeon/ni_dpm.c btc_program_mgcg_hw_sequence(rdev, ps, count); ps 937 drivers/gpu/drm/radeon/ni_dpm.c const u32 *ps = NULL; ps 939 drivers/gpu/drm/radeon/ni_dpm.c ps = (const u32 *)&cayman_mgcg_default; ps 942 drivers/gpu/drm/radeon/ni_dpm.c btc_program_mgcg_hw_sequence(rdev, ps, count); ps 949 drivers/gpu/drm/radeon/ni_dpm.c const u32 *ps = NULL; ps 952 drivers/gpu/drm/radeon/ni_dpm.c ps = (const u32 *)&cayman_mgcg_enable; ps 955 drivers/gpu/drm/radeon/ni_dpm.c ps = (const u32 *)&cayman_mgcg_disable; ps 959 drivers/gpu/drm/radeon/ni_dpm.c btc_program_mgcg_hw_sequence(rdev, ps, count); ps 965 drivers/gpu/drm/radeon/ni_dpm.c const u32 *ps = NULL; ps 967 drivers/gpu/drm/radeon/ni_dpm.c ps = (const u32 *)&cayman_sysls_default; ps 970 drivers/gpu/drm/radeon/ni_dpm.c btc_program_mgcg_hw_sequence(rdev, ps, count); ps 977 drivers/gpu/drm/radeon/ni_dpm.c const u32 *ps = NULL; ps 980 drivers/gpu/drm/radeon/ni_dpm.c ps = (const u32 *)&cayman_sysls_enable; ps 983 drivers/gpu/drm/radeon/ni_dpm.c ps = (const u32 *)&cayman_sysls_disable; ps 987 drivers/gpu/drm/radeon/ni_dpm.c btc_program_mgcg_hw_sequence(rdev, ps, count); ps 2298 drivers/gpu/drm/radeon/ni_dpm.c struct ni_ps *ps = ni_get_ps(radeon_state); ps 2302 drivers/gpu/drm/radeon/ni_dpm.c for (i = 0; i < ps->performance_level_count - 1; i++) ps 2305 drivers/gpu/drm/radeon/ni_dpm.c smc_state->levels[ps->performance_level_count - 1].bSP = ps 3926 drivers/gpu/drm/radeon/ni_dpm.c struct ni_ps *ps = ni_get_ps(rps); ps 3927 drivers/gpu/drm/radeon/ni_dpm.c struct rv7xx_pl *pl = &ps->performance_levels[index]; ps 3929 drivers/gpu/drm/radeon/ni_dpm.c ps->performance_level_count = index + 1; ps 3949 drivers/gpu/drm/radeon/ni_dpm.c if (ps->performance_levels[0].flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ps 3996 drivers/gpu/drm/radeon/ni_dpm.c struct ni_ps *ps; ps 4003 drivers/gpu/drm/radeon/ni_dpm.c rdev->pm.dpm.ps = kcalloc(power_info->pplib.ucNumStates, ps 4006 drivers/gpu/drm/radeon/ni_dpm.c if (!rdev->pm.dpm.ps) ps 4021 drivers/gpu/drm/radeon/ni_dpm.c ps = kzalloc(sizeof(struct ni_ps), GFP_KERNEL); ps 4022 drivers/gpu/drm/radeon/ni_dpm.c if (ps == NULL) { ps 4023 drivers/gpu/drm/radeon/ni_dpm.c kfree(rdev->pm.dpm.ps); ps 4026 drivers/gpu/drm/radeon/ni_dpm.c rdev->pm.dpm.ps[i].ps_priv = ps; ps 4027 drivers/gpu/drm/radeon/ni_dpm.c ni_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i], ps 4037 drivers/gpu/drm/radeon/ni_dpm.c &rdev->pm.dpm.ps[i], j, ps 4274 drivers/gpu/drm/radeon/ni_dpm.c kfree(rdev->pm.dpm.ps[i].ps_priv); ps 4276 drivers/gpu/drm/radeon/ni_dpm.c kfree(rdev->pm.dpm.ps); ps 4285 drivers/gpu/drm/radeon/ni_dpm.c struct ni_ps *ps = ni_get_ps(rps); ps 4292 drivers/gpu/drm/radeon/ni_dpm.c for (i = 0; i < ps->performance_level_count; i++) { ps 4293 drivers/gpu/drm/radeon/ni_dpm.c pl = &ps->performance_levels[i]; ps 4309 drivers/gpu/drm/radeon/ni_dpm.c struct ni_ps *ps = ni_get_ps(rps); ps 4315 drivers/gpu/drm/radeon/ni_dpm.c if (current_index >= ps->performance_level_count) { ps 4318 drivers/gpu/drm/radeon/ni_dpm.c pl = &ps->performance_levels[current_index]; ps 4329 drivers/gpu/drm/radeon/ni_dpm.c struct ni_ps *ps = ni_get_ps(rps); ps 4335 drivers/gpu/drm/radeon/ni_dpm.c if (current_index >= ps->performance_level_count) { ps 4338 drivers/gpu/drm/radeon/ni_dpm.c pl = &ps->performance_levels[current_index]; ps 4347 drivers/gpu/drm/radeon/ni_dpm.c struct ni_ps *ps = ni_get_ps(rps); ps 4353 drivers/gpu/drm/radeon/ni_dpm.c if (current_index >= ps->performance_level_count) { ps 4356 drivers/gpu/drm/radeon/ni_dpm.c pl = &ps->performance_levels[current_index]; ps 354 drivers/gpu/drm/radeon/r100.c struct radeon_power_state *ps = &rdev->pm.power_state[requested_index]; ps 355 drivers/gpu/drm/radeon/r100.c struct radeon_voltage *voltage = &ps->clock_info[0].voltage; ps 359 drivers/gpu/drm/radeon/r100.c if (ps->misc & ATOM_PM_MISCINFO_VOLTAGE_DROP_SUPPORT) { ps 385 drivers/gpu/drm/radeon/r100.c if (ps->misc & ATOM_PM_MISCINFO_ASIC_REDUCED_SPEED_SCLK_EN) { ps 387 drivers/gpu/drm/radeon/r100.c if (ps->misc & ATOM_PM_MISCINFO_DYN_CLK_3D_IDLE) ps 391 drivers/gpu/drm/radeon/r100.c if (ps->misc & ATOM_PM_MISCINFO_DYNAMIC_CLOCK_DIVIDER_BY_2) ps 393 drivers/gpu/drm/radeon/r100.c else if (ps->misc & ATOM_PM_MISCINFO_DYNAMIC_CLOCK_DIVIDER_BY_4) ps 398 drivers/gpu/drm/radeon/r100.c if (ps->misc & ATOM_PM_MISCINFO_ASIC_DYNAMIC_VOLTAGE_EN) { ps 421 drivers/gpu/drm/radeon/r100.c if (ps->misc & ATOM_PM_MISCINFO_DYNAMIC_HDP_BLOCK_EN) ps 434 drivers/gpu/drm/radeon/r100.c (ps->pcie_lanes != ps 437 drivers/gpu/drm/radeon/r100.c ps->pcie_lanes); ps 438 drivers/gpu/drm/radeon/r100.c DRM_DEBUG_DRIVER("Setting: p: %d\n", ps->pcie_lanes); ps 780 drivers/gpu/drm/radeon/r600.c struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx]; ps 781 drivers/gpu/drm/radeon/r600.c struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage; ps 1535 drivers/gpu/drm/radeon/radeon.h struct radeon_ps *ps; ps 1978 drivers/gpu/drm/radeon/radeon.h void (*print_power_state)(struct radeon_device *rdev, struct radeon_ps *ps); ps 2782 drivers/gpu/drm/radeon/radeon.h #define radeon_dpm_print_power_state(rdev, ps) rdev->asic->dpm.print_power_state((rdev), (ps)) ps 433 drivers/gpu/drm/radeon/radeon_asic.h struct radeon_ps *ps); ps 451 drivers/gpu/drm/radeon/radeon_asic.h struct radeon_ps *ps); ps 492 drivers/gpu/drm/radeon/radeon_asic.h struct radeon_ps *ps); ps 592 drivers/gpu/drm/radeon/radeon_asic.h struct radeon_ps *ps); ps 669 drivers/gpu/drm/radeon/radeon_asic.h struct radeon_ps *ps); ps 690 drivers/gpu/drm/radeon/radeon_asic.h struct radeon_ps *ps); ps 887 drivers/gpu/drm/radeon/radeon_asic.h struct radeon_ps *ps); ps 917 drivers/gpu/drm/radeon/radeon_asic.h struct radeon_ps *ps); ps 879 drivers/gpu/drm/radeon/radeon_pm.c struct radeon_ps *ps; ps 895 drivers/gpu/drm/radeon/radeon_pm.c ps = &rdev->pm.dpm.ps[i]; ps 896 drivers/gpu/drm/radeon/radeon_pm.c ui_class = ps->class & ATOM_PPLIB_CLASSIFICATION_UI_MASK; ps 901 drivers/gpu/drm/radeon/radeon_pm.c if (ps->caps & ATOM_PPLIB_SINGLE_DISPLAY_ONLY) { ps 903 drivers/gpu/drm/radeon/radeon_pm.c return ps; ps 905 drivers/gpu/drm/radeon/radeon_pm.c return ps; ps 910 drivers/gpu/drm/radeon/radeon_pm.c if (ps->caps & ATOM_PPLIB_SINGLE_DISPLAY_ONLY) { ps 912 drivers/gpu/drm/radeon/radeon_pm.c return ps; ps 914 drivers/gpu/drm/radeon/radeon_pm.c return ps; ps 919 drivers/gpu/drm/radeon/radeon_pm.c if (ps->caps & ATOM_PPLIB_SINGLE_DISPLAY_ONLY) { ps 921 drivers/gpu/drm/radeon/radeon_pm.c return ps; ps 923 drivers/gpu/drm/radeon/radeon_pm.c return ps; ps 933 drivers/gpu/drm/radeon/radeon_pm.c if (ps->class & ATOM_PPLIB_CLASSIFICATION_SDSTATE) ps 934 drivers/gpu/drm/radeon/radeon_pm.c return ps; ps 937 drivers/gpu/drm/radeon/radeon_pm.c if (ps->class & ATOM_PPLIB_CLASSIFICATION_HDSTATE) ps 938 drivers/gpu/drm/radeon/radeon_pm.c return ps; ps 941 drivers/gpu/drm/radeon/radeon_pm.c if (ps->class & ATOM_PPLIB_CLASSIFICATION_HD2STATE) ps 942 drivers/gpu/drm/radeon/radeon_pm.c return ps; ps 945 drivers/gpu/drm/radeon/radeon_pm.c if (ps->class2 & ATOM_PPLIB_CLASSIFICATION2_MVC) ps 946 drivers/gpu/drm/radeon/radeon_pm.c return ps; ps 951 drivers/gpu/drm/radeon/radeon_pm.c if (ps->class & ATOM_PPLIB_CLASSIFICATION_THERMAL) ps 952 drivers/gpu/drm/radeon/radeon_pm.c return ps; ps 955 drivers/gpu/drm/radeon/radeon_pm.c if (ps->class & ATOM_PPLIB_CLASSIFICATION_ACPI) ps 956 drivers/gpu/drm/radeon/radeon_pm.c return ps; ps 959 drivers/gpu/drm/radeon/radeon_pm.c if (ps->class2 & ATOM_PPLIB_CLASSIFICATION2_ULV) ps 960 drivers/gpu/drm/radeon/radeon_pm.c return ps; ps 963 drivers/gpu/drm/radeon/radeon_pm.c if (ps->class & ATOM_PPLIB_CLASSIFICATION_3DPERFORMANCE) ps 964 drivers/gpu/drm/radeon/radeon_pm.c return ps; ps 1005 drivers/gpu/drm/radeon/radeon_pm.c struct radeon_ps *ps; ps 1022 drivers/gpu/drm/radeon/radeon_pm.c ps = radeon_dpm_pick_power_state(rdev, dpm_state); ps 1023 drivers/gpu/drm/radeon/radeon_pm.c if (ps) ps 1024 drivers/gpu/drm/radeon/radeon_pm.c rdev->pm.dpm.requested_ps = ps; ps 1031 drivers/gpu/drm/radeon/radeon_pm.c if (ps->vce_active != rdev->pm.dpm.vce_active) ps 1084 drivers/gpu/drm/radeon/radeon_pm.c ps->vce_active = rdev->pm.dpm.vce_active; ps 1363 drivers/gpu/drm/radeon/radeon_pm.c radeon_dpm_print_power_state(rdev, &rdev->pm.dpm.ps[i]); ps 225 drivers/gpu/drm/radeon/rs600.c struct radeon_power_state *ps = &rdev->pm.power_state[requested_index]; ps 226 drivers/gpu/drm/radeon/rs600.c struct radeon_voltage *voltage = &ps->clock_info[0].voltage; ps 231 drivers/gpu/drm/radeon/rs600.c if (ps->misc & ATOM_PM_MISCINFO_VOLTAGE_DROP_SUPPORT) { ps 256 drivers/gpu/drm/radeon/rs600.c if (ps->misc & ATOM_PM_MISCINFO_ASIC_REDUCED_SPEED_SCLK_EN) { ps 257 drivers/gpu/drm/radeon/rs600.c if (ps->misc & ATOM_PM_MISCINFO_DYNAMIC_CLOCK_DIVIDER_BY_2) { ps 260 drivers/gpu/drm/radeon/rs600.c } else if (ps->misc & ATOM_PM_MISCINFO_DYNAMIC_CLOCK_DIVIDER_BY_4) { ps 271 drivers/gpu/drm/radeon/rs600.c if (ps->misc & ATOM_PM_MISCINFO_ASIC_DYNAMIC_VOLTAGE_EN) { ps 283 drivers/gpu/drm/radeon/rs600.c if (ps->misc & ATOM_PM_MISCINFO_DYNAMIC_HDP_BLOCK_EN) ps 291 drivers/gpu/drm/radeon/rs600.c if (ps->misc & ATOM_PM_MISCINFO_DYNAMIC_MC_HOST_BLOCK_EN) ps 298 drivers/gpu/drm/radeon/rs600.c if (ps->misc & ATOM_PM_MISCINFO2_DYNAMIC_BACK_BIAS_EN) ps 308 drivers/gpu/drm/radeon/rs600.c (ps->pcie_lanes != ps 311 drivers/gpu/drm/radeon/rs600.c ps->pcie_lanes); ps 312 drivers/gpu/drm/radeon/rs600.c DRM_DEBUG("Setting: p: %d\n", ps->pcie_lanes); ps 38 drivers/gpu/drm/radeon/rs780_dpm.c struct igp_ps *ps = rps->ps_priv; ps 40 drivers/gpu/drm/radeon/rs780_dpm.c return ps; ps 753 drivers/gpu/drm/radeon/rs780_dpm.c struct igp_ps *ps = rs780_get_ps(rps); ps 758 drivers/gpu/drm/radeon/rs780_dpm.c ps->sclk_low = sclk; ps 761 drivers/gpu/drm/radeon/rs780_dpm.c ps->sclk_high = sclk; ps 765 drivers/gpu/drm/radeon/rs780_dpm.c ps->min_voltage = RS780_VDDC_LEVEL_UNKNOWN; ps 766 drivers/gpu/drm/radeon/rs780_dpm.c ps->max_voltage = RS780_VDDC_LEVEL_UNKNOWN; ps 769 drivers/gpu/drm/radeon/rs780_dpm.c ps->min_voltage = RS780_VDDC_LEVEL_LOW; ps 770 drivers/gpu/drm/radeon/rs780_dpm.c ps->max_voltage = RS780_VDDC_LEVEL_LOW; ps 773 drivers/gpu/drm/radeon/rs780_dpm.c ps->min_voltage = RS780_VDDC_LEVEL_HIGH; ps 774 drivers/gpu/drm/radeon/rs780_dpm.c ps->max_voltage = RS780_VDDC_LEVEL_HIGH; ps 777 drivers/gpu/drm/radeon/rs780_dpm.c ps->min_voltage = RS780_VDDC_LEVEL_LOW; ps 778 drivers/gpu/drm/radeon/rs780_dpm.c ps->max_voltage = RS780_VDDC_LEVEL_HIGH; ps 781 drivers/gpu/drm/radeon/rs780_dpm.c ps->flags = le32_to_cpu(clock_info->rs780.ulFlags); ps 784 drivers/gpu/drm/radeon/rs780_dpm.c ps->sclk_low = rdev->clock.default_sclk; ps 785 drivers/gpu/drm/radeon/rs780_dpm.c ps->sclk_high = rdev->clock.default_sclk; ps 786 drivers/gpu/drm/radeon/rs780_dpm.c ps->min_voltage = RS780_VDDC_LEVEL_HIGH; ps 787 drivers/gpu/drm/radeon/rs780_dpm.c ps->max_voltage = RS780_VDDC_LEVEL_HIGH; ps 802 drivers/gpu/drm/radeon/rs780_dpm.c struct igp_ps *ps; ps 809 drivers/gpu/drm/radeon/rs780_dpm.c rdev->pm.dpm.ps = kcalloc(power_info->pplib.ucNumStates, ps 812 drivers/gpu/drm/radeon/rs780_dpm.c if (!rdev->pm.dpm.ps) ps 831 drivers/gpu/drm/radeon/rs780_dpm.c ps = kzalloc(sizeof(struct igp_ps), GFP_KERNEL); ps 832 drivers/gpu/drm/radeon/rs780_dpm.c if (ps == NULL) { ps 833 drivers/gpu/drm/radeon/rs780_dpm.c kfree(rdev->pm.dpm.ps); ps 836 drivers/gpu/drm/radeon/rs780_dpm.c rdev->pm.dpm.ps[i].ps_priv = ps; ps 837 drivers/gpu/drm/radeon/rs780_dpm.c rs780_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i], ps 841 drivers/gpu/drm/radeon/rs780_dpm.c &rdev->pm.dpm.ps[i], ps 943 drivers/gpu/drm/radeon/rs780_dpm.c struct igp_ps *ps = rs780_get_ps(rps); ps 949 drivers/gpu/drm/radeon/rs780_dpm.c ps->sclk_low, ps->min_voltage); ps 951 drivers/gpu/drm/radeon/rs780_dpm.c ps->sclk_high, ps->max_voltage); ps 960 drivers/gpu/drm/radeon/rs780_dpm.c kfree(rdev->pm.dpm.ps[i].ps_priv); ps 962 drivers/gpu/drm/radeon/rs780_dpm.c kfree(rdev->pm.dpm.ps); ps 987 drivers/gpu/drm/radeon/rs780_dpm.c struct igp_ps *ps = rs780_get_ps(rps); ps 999 drivers/gpu/drm/radeon/rs780_dpm.c if (sclk < (ps->sclk_low + 500)) ps 1001 drivers/gpu/drm/radeon/rs780_dpm.c ps->sclk_low, ps->min_voltage); ps 1004 drivers/gpu/drm/radeon/rs780_dpm.c ps->sclk_high, ps->max_voltage); ps 1034 drivers/gpu/drm/radeon/rs780_dpm.c struct igp_ps *ps = rs780_get_ps(rps); ps 1046 drivers/gpu/drm/radeon/rs780_dpm.c ps->sclk_high, false, ÷rs); ps 1053 drivers/gpu/drm/radeon/rs780_dpm.c ps->sclk_low, false, ÷rs); ps 1065 drivers/gpu/drm/radeon/rs780_dpm.c if (ps->sclk_high != ps->sclk_low) { ps 38 drivers/gpu/drm/radeon/rv6xx_dpm.c struct rv6xx_ps *ps = rps->ps_priv; ps 40 drivers/gpu/drm/radeon/rv6xx_dpm.c return ps; ps 1820 drivers/gpu/drm/radeon/rv6xx_dpm.c struct rv6xx_ps *ps = rv6xx_get_ps(rps); ps 1827 drivers/gpu/drm/radeon/rv6xx_dpm.c pl = &ps->low; ps 1830 drivers/gpu/drm/radeon/rv6xx_dpm.c pl = &ps->medium; ps 1834 drivers/gpu/drm/radeon/rv6xx_dpm.c pl = &ps->high; ps 1883 drivers/gpu/drm/radeon/rv6xx_dpm.c struct rv6xx_ps *ps; ps 1890 drivers/gpu/drm/radeon/rv6xx_dpm.c rdev->pm.dpm.ps = kcalloc(power_info->pplib.ucNumStates, ps 1893 drivers/gpu/drm/radeon/rv6xx_dpm.c if (!rdev->pm.dpm.ps) ps 1908 drivers/gpu/drm/radeon/rv6xx_dpm.c ps = kzalloc(sizeof(struct rv6xx_ps), GFP_KERNEL); ps 1909 drivers/gpu/drm/radeon/rv6xx_dpm.c if (ps == NULL) { ps 1910 drivers/gpu/drm/radeon/rv6xx_dpm.c kfree(rdev->pm.dpm.ps); ps 1913 drivers/gpu/drm/radeon/rv6xx_dpm.c rdev->pm.dpm.ps[i].ps_priv = ps; ps 1914 drivers/gpu/drm/radeon/rv6xx_dpm.c rv6xx_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i], ps 1923 drivers/gpu/drm/radeon/rv6xx_dpm.c &rdev->pm.dpm.ps[i], j, ps 2010 drivers/gpu/drm/radeon/rv6xx_dpm.c struct rv6xx_ps *ps = rv6xx_get_ps(rps); ps 2016 drivers/gpu/drm/radeon/rv6xx_dpm.c pl = &ps->low; ps 2019 drivers/gpu/drm/radeon/rv6xx_dpm.c pl = &ps->medium; ps 2022 drivers/gpu/drm/radeon/rv6xx_dpm.c pl = &ps->high; ps 2032 drivers/gpu/drm/radeon/rv6xx_dpm.c struct rv6xx_ps *ps = rv6xx_get_ps(rps); ps 2042 drivers/gpu/drm/radeon/rv6xx_dpm.c pl = &ps->low; ps 2044 drivers/gpu/drm/radeon/rv6xx_dpm.c pl = &ps->medium; ps 2046 drivers/gpu/drm/radeon/rv6xx_dpm.c pl = &ps->high; ps 2057 drivers/gpu/drm/radeon/rv6xx_dpm.c struct rv6xx_ps *ps = rv6xx_get_ps(rps); ps 2067 drivers/gpu/drm/radeon/rv6xx_dpm.c pl = &ps->low; ps 2069 drivers/gpu/drm/radeon/rv6xx_dpm.c pl = &ps->medium; ps 2071 drivers/gpu/drm/radeon/rv6xx_dpm.c pl = &ps->high; ps 2080 drivers/gpu/drm/radeon/rv6xx_dpm.c struct rv6xx_ps *ps = rv6xx_get_ps(rps); ps 2090 drivers/gpu/drm/radeon/rv6xx_dpm.c pl = &ps->low; ps 2092 drivers/gpu/drm/radeon/rv6xx_dpm.c pl = &ps->medium; ps 2094 drivers/gpu/drm/radeon/rv6xx_dpm.c pl = &ps->high; ps 2104 drivers/gpu/drm/radeon/rv6xx_dpm.c kfree(rdev->pm.dpm.ps[i].ps_priv); ps 2106 drivers/gpu/drm/radeon/rv6xx_dpm.c kfree(rdev->pm.dpm.ps); ps 879 drivers/gpu/drm/radeon/rv770.c struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx]; ps 880 drivers/gpu/drm/radeon/rv770.c struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage; ps 49 drivers/gpu/drm/radeon/rv770_dpm.c struct rv7xx_ps *ps = rps->ps_priv; ps 51 drivers/gpu/drm/radeon/rv770_dpm.c return ps; ps 2179 drivers/gpu/drm/radeon/rv770_dpm.c struct rv7xx_ps *ps = rv770_get_ps(rps); ps 2185 drivers/gpu/drm/radeon/rv770_dpm.c pl = &ps->low; ps 2188 drivers/gpu/drm/radeon/rv770_dpm.c pl = &ps->medium; ps 2192 drivers/gpu/drm/radeon/rv770_dpm.c pl = &ps->high; ps 2228 drivers/gpu/drm/radeon/rv770_dpm.c if (ps->low.flags & ATOM_PPLIB_R600_FLAGS_PCIEGEN2) ps 2277 drivers/gpu/drm/radeon/rv770_dpm.c struct rv7xx_ps *ps; ps 2284 drivers/gpu/drm/radeon/rv770_dpm.c rdev->pm.dpm.ps = kcalloc(power_info->pplib.ucNumStates, ps 2287 drivers/gpu/drm/radeon/rv770_dpm.c if (!rdev->pm.dpm.ps) ps 2302 drivers/gpu/drm/radeon/rv770_dpm.c ps = kzalloc(sizeof(struct rv7xx_ps), GFP_KERNEL); ps 2303 drivers/gpu/drm/radeon/rv770_dpm.c if (ps == NULL) { ps 2304 drivers/gpu/drm/radeon/rv770_dpm.c kfree(rdev->pm.dpm.ps); ps 2307 drivers/gpu/drm/radeon/rv770_dpm.c rdev->pm.dpm.ps[i].ps_priv = ps; ps 2308 drivers/gpu/drm/radeon/rv770_dpm.c rv7xx_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i], ps 2318 drivers/gpu/drm/radeon/rv770_dpm.c &rdev->pm.dpm.ps[i], j, ps 2435 drivers/gpu/drm/radeon/rv770_dpm.c struct rv7xx_ps *ps = rv770_get_ps(rps); ps 2442 drivers/gpu/drm/radeon/rv770_dpm.c pl = &ps->low; ps 2445 drivers/gpu/drm/radeon/rv770_dpm.c pl = &ps->medium; ps 2448 drivers/gpu/drm/radeon/rv770_dpm.c pl = &ps->high; ps 2452 drivers/gpu/drm/radeon/rv770_dpm.c pl = &ps->low; ps 2455 drivers/gpu/drm/radeon/rv770_dpm.c pl = &ps->medium; ps 2458 drivers/gpu/drm/radeon/rv770_dpm.c pl = &ps->high; ps 2469 drivers/gpu/drm/radeon/rv770_dpm.c struct rv7xx_ps *ps = rv770_get_ps(rps); ps 2479 drivers/gpu/drm/radeon/rv770_dpm.c pl = &ps->low; ps 2481 drivers/gpu/drm/radeon/rv770_dpm.c pl = &ps->medium; ps 2483 drivers/gpu/drm/radeon/rv770_dpm.c pl = &ps->high; ps 2498 drivers/gpu/drm/radeon/rv770_dpm.c struct rv7xx_ps *ps = rv770_get_ps(rps); ps 2508 drivers/gpu/drm/radeon/rv770_dpm.c pl = &ps->low; ps 2510 drivers/gpu/drm/radeon/rv770_dpm.c pl = &ps->medium; ps 2512 drivers/gpu/drm/radeon/rv770_dpm.c pl = &ps->high; ps 2520 drivers/gpu/drm/radeon/rv770_dpm.c struct rv7xx_ps *ps = rv770_get_ps(rps); ps 2530 drivers/gpu/drm/radeon/rv770_dpm.c pl = &ps->low; ps 2532 drivers/gpu/drm/radeon/rv770_dpm.c pl = &ps->medium; ps 2534 drivers/gpu/drm/radeon/rv770_dpm.c pl = &ps->high; ps 2544 drivers/gpu/drm/radeon/rv770_dpm.c kfree(rdev->pm.dpm.ps[i].ps_priv); ps 2546 drivers/gpu/drm/radeon/rv770_dpm.c kfree(rdev->pm.dpm.ps); ps 2971 drivers/gpu/drm/radeon/si_dpm.c struct ni_ps *ps = ni_get_ps(rps); ps 3031 drivers/gpu/drm/radeon/si_dpm.c for (i = ps->performance_level_count - 2; i >= 0; i--) { ps 3032 drivers/gpu/drm/radeon/si_dpm.c if (ps->performance_levels[i].vddc > ps->performance_levels[i+1].vddc) ps 3033 drivers/gpu/drm/radeon/si_dpm.c ps->performance_levels[i].vddc = ps->performance_levels[i+1].vddc; ps 3036 drivers/gpu/drm/radeon/si_dpm.c for (i = 0; i < ps->performance_level_count; i++) { ps 3037 drivers/gpu/drm/radeon/si_dpm.c if (ps->performance_levels[i].mclk > max_limits->mclk) ps 3038 drivers/gpu/drm/radeon/si_dpm.c ps->performance_levels[i].mclk = max_limits->mclk; ps 3039 drivers/gpu/drm/radeon/si_dpm.c if (ps->performance_levels[i].sclk > max_limits->sclk) ps 3040 drivers/gpu/drm/radeon/si_dpm.c ps->performance_levels[i].sclk = max_limits->sclk; ps 3041 drivers/gpu/drm/radeon/si_dpm.c if (ps->performance_levels[i].vddc > max_limits->vddc) ps 3042 drivers/gpu/drm/radeon/si_dpm.c ps->performance_levels[i].vddc = max_limits->vddc; ps 3043 drivers/gpu/drm/radeon/si_dpm.c if (ps->performance_levels[i].vddci > max_limits->vddci) ps 3044 drivers/gpu/drm/radeon/si_dpm.c ps->performance_levels[i].vddci = max_limits->vddci; ps 3056 drivers/gpu/drm/radeon/si_dpm.c for (i = 0; i < ps->performance_level_count; i++) { ps 3058 drivers/gpu/drm/radeon/si_dpm.c if (ps->performance_levels[i].sclk > max_sclk_vddc) ps 3059 drivers/gpu/drm/radeon/si_dpm.c ps->performance_levels[i].sclk = max_sclk_vddc; ps 3062 drivers/gpu/drm/radeon/si_dpm.c if (ps->performance_levels[i].mclk > max_mclk_vddci) ps 3063 drivers/gpu/drm/radeon/si_dpm.c ps->performance_levels[i].mclk = max_mclk_vddci; ps 3066 drivers/gpu/drm/radeon/si_dpm.c if (ps->performance_levels[i].mclk > max_mclk_vddc) ps 3067 drivers/gpu/drm/radeon/si_dpm.c ps->performance_levels[i].mclk = max_mclk_vddc; ps 3070 drivers/gpu/drm/radeon/si_dpm.c if (ps->performance_levels[i].mclk > max_mclk) ps 3071 drivers/gpu/drm/radeon/si_dpm.c ps->performance_levels[i].mclk = max_mclk; ps 3074 drivers/gpu/drm/radeon/si_dpm.c if (ps->performance_levels[i].sclk > max_sclk) ps 3075 drivers/gpu/drm/radeon/si_dpm.c ps->performance_levels[i].sclk = max_sclk; ps 3082 drivers/gpu/drm/radeon/si_dpm.c mclk = ps->performance_levels[ps->performance_level_count - 1].mclk; ps 3083 drivers/gpu/drm/radeon/si_dpm.c vddci = ps->performance_levels[ps->performance_level_count - 1].vddci; ps 3085 drivers/gpu/drm/radeon/si_dpm.c mclk = ps->performance_levels[0].mclk; ps 3086 drivers/gpu/drm/radeon/si_dpm.c vddci = ps->performance_levels[0].vddci; ps 3090 drivers/gpu/drm/radeon/si_dpm.c sclk = ps->performance_levels[ps->performance_level_count - 1].sclk; ps 3091 drivers/gpu/drm/radeon/si_dpm.c vddc = ps->performance_levels[ps->performance_level_count - 1].vddc; ps 3093 drivers/gpu/drm/radeon/si_dpm.c sclk = ps->performance_levels[0].sclk; ps 3094 drivers/gpu/drm/radeon/si_dpm.c vddc = ps->performance_levels[0].vddc; ps 3105 drivers/gpu/drm/radeon/si_dpm.c ps->performance_levels[0].sclk = sclk; ps 3106 drivers/gpu/drm/radeon/si_dpm.c ps->performance_levels[0].mclk = mclk; ps 3107 drivers/gpu/drm/radeon/si_dpm.c ps->performance_levels[0].vddc = vddc; ps 3108 drivers/gpu/drm/radeon/si_dpm.c ps->performance_levels[0].vddci = vddci; ps 3111 drivers/gpu/drm/radeon/si_dpm.c sclk = ps->performance_levels[0].sclk; ps 3112 drivers/gpu/drm/radeon/si_dpm.c for (i = 1; i < ps->performance_level_count; i++) { ps 3113 drivers/gpu/drm/radeon/si_dpm.c if (sclk < ps->performance_levels[i].sclk) ps 3114 drivers/gpu/drm/radeon/si_dpm.c sclk = ps->performance_levels[i].sclk; ps 3116 drivers/gpu/drm/radeon/si_dpm.c for (i = 0; i < ps->performance_level_count; i++) { ps 3117 drivers/gpu/drm/radeon/si_dpm.c ps->performance_levels[i].sclk = sclk; ps 3118 drivers/gpu/drm/radeon/si_dpm.c ps->performance_levels[i].vddc = vddc; ps 3121 drivers/gpu/drm/radeon/si_dpm.c for (i = 1; i < ps->performance_level_count; i++) { ps 3122 drivers/gpu/drm/radeon/si_dpm.c if (ps->performance_levels[i].sclk < ps->performance_levels[i - 1].sclk) ps 3123 drivers/gpu/drm/radeon/si_dpm.c ps->performance_levels[i].sclk = ps->performance_levels[i - 1].sclk; ps 3124 drivers/gpu/drm/radeon/si_dpm.c if (ps->performance_levels[i].vddc < ps->performance_levels[i - 1].vddc) ps 3125 drivers/gpu/drm/radeon/si_dpm.c ps->performance_levels[i].vddc = ps->performance_levels[i - 1].vddc; ps 3130 drivers/gpu/drm/radeon/si_dpm.c mclk = ps->performance_levels[0].mclk; ps 3131 drivers/gpu/drm/radeon/si_dpm.c for (i = 1; i < ps->performance_level_count; i++) { ps 3132 drivers/gpu/drm/radeon/si_dpm.c if (mclk < ps->performance_levels[i].mclk) ps 3133 drivers/gpu/drm/radeon/si_dpm.c mclk = ps->performance_levels[i].mclk; ps 3135 drivers/gpu/drm/radeon/si_dpm.c for (i = 0; i < ps->performance_level_count; i++) { ps 3136 drivers/gpu/drm/radeon/si_dpm.c ps->performance_levels[i].mclk = mclk; ps 3137 drivers/gpu/drm/radeon/si_dpm.c ps->performance_levels[i].vddci = vddci; ps 3140 drivers/gpu/drm/radeon/si_dpm.c for (i = 1; i < ps->performance_level_count; i++) { ps 3141 drivers/gpu/drm/radeon/si_dpm.c if (ps->performance_levels[i].mclk < ps->performance_levels[i - 1].mclk) ps 3142 drivers/gpu/drm/radeon/si_dpm.c ps->performance_levels[i].mclk = ps->performance_levels[i - 1].mclk; ps 3143 drivers/gpu/drm/radeon/si_dpm.c if (ps->performance_levels[i].vddci < ps->performance_levels[i - 1].vddci) ps 3144 drivers/gpu/drm/radeon/si_dpm.c ps->performance_levels[i].vddci = ps->performance_levels[i - 1].vddci; ps 3148 drivers/gpu/drm/radeon/si_dpm.c for (i = 0; i < ps->performance_level_count; i++) ps 3150 drivers/gpu/drm/radeon/si_dpm.c &ps->performance_levels[i]); ps 3152 drivers/gpu/drm/radeon/si_dpm.c for (i = 0; i < ps->performance_level_count; i++) { ps 3153 drivers/gpu/drm/radeon/si_dpm.c if (ps->performance_levels[i].vddc < min_vce_voltage) ps 3154 drivers/gpu/drm/radeon/si_dpm.c ps->performance_levels[i].vddc = min_vce_voltage; ps 3156 drivers/gpu/drm/radeon/si_dpm.c ps->performance_levels[i].sclk, ps 3157 drivers/gpu/drm/radeon/si_dpm.c max_limits->vddc, &ps->performance_levels[i].vddc); ps 3159 drivers/gpu/drm/radeon/si_dpm.c ps->performance_levels[i].mclk, ps 3160 drivers/gpu/drm/radeon/si_dpm.c max_limits->vddci, &ps->performance_levels[i].vddci); ps 3162 drivers/gpu/drm/radeon/si_dpm.c ps->performance_levels[i].mclk, ps 3163 drivers/gpu/drm/radeon/si_dpm.c max_limits->vddc, &ps->performance_levels[i].vddc); ps 3166 drivers/gpu/drm/radeon/si_dpm.c max_limits->vddc, &ps->performance_levels[i].vddc); ps 3169 drivers/gpu/drm/radeon/si_dpm.c for (i = 0; i < ps->performance_level_count; i++) { ps 3172 drivers/gpu/drm/radeon/si_dpm.c &ps->performance_levels[i].vddc, ps 3173 drivers/gpu/drm/radeon/si_dpm.c &ps->performance_levels[i].vddci); ps 3176 drivers/gpu/drm/radeon/si_dpm.c ps->dc_compatible = true; ps 3177 drivers/gpu/drm/radeon/si_dpm.c for (i = 0; i < ps->performance_level_count; i++) { ps 3178 drivers/gpu/drm/radeon/si_dpm.c if (ps->performance_levels[i].vddc > rdev->pm.dpm.dyn_state.max_clock_voltage_on_dc.vddc) ps 3179 drivers/gpu/drm/radeon/si_dpm.c ps->dc_compatible = false; ps 3405 drivers/gpu/drm/radeon/si_dpm.c struct ni_ps *ps = ni_get_ps(rps); ps 3406 drivers/gpu/drm/radeon/si_dpm.c u32 levels = ps->performance_level_count; ps 4966 drivers/gpu/drm/radeon/si_dpm.c struct ni_ps *ps = ni_get_ps(radeon_state); ps 4970 drivers/gpu/drm/radeon/si_dpm.c for (i = 0; i < ps->performance_level_count - 1; i++) ps 4973 drivers/gpu/drm/radeon/si_dpm.c smc_state->levels[ps->performance_level_count - 1].bSP = ps 6739 drivers/gpu/drm/radeon/si_dpm.c struct ni_ps *ps = ni_get_ps(rps); ps 6741 drivers/gpu/drm/radeon/si_dpm.c struct rv7xx_pl *pl = &ps->performance_levels[index]; ps 6744 drivers/gpu/drm/radeon/si_dpm.c ps->performance_level_count = index + 1; ps 6823 drivers/gpu/drm/radeon/si_dpm.c struct ni_ps *ps; ps 6840 drivers/gpu/drm/radeon/si_dpm.c rdev->pm.dpm.ps = kcalloc(state_array->ucNumEntries, ps 6843 drivers/gpu/drm/radeon/si_dpm.c if (!rdev->pm.dpm.ps) ps 6854 drivers/gpu/drm/radeon/si_dpm.c ps = kzalloc(sizeof(struct ni_ps), GFP_KERNEL); ps 6855 drivers/gpu/drm/radeon/si_dpm.c if (ps == NULL) { ps 6856 drivers/gpu/drm/radeon/si_dpm.c kfree(rdev->pm.dpm.ps); ps 6859 drivers/gpu/drm/radeon/si_dpm.c rdev->pm.dpm.ps[i].ps_priv = ps; ps 6860 drivers/gpu/drm/radeon/si_dpm.c si_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i], ps 6875 drivers/gpu/drm/radeon/si_dpm.c &rdev->pm.dpm.ps[i], k, ps 7084 drivers/gpu/drm/radeon/si_dpm.c kfree(rdev->pm.dpm.ps[i].ps_priv); ps 7086 drivers/gpu/drm/radeon/si_dpm.c kfree(rdev->pm.dpm.ps); ps 7097 drivers/gpu/drm/radeon/si_dpm.c struct ni_ps *ps = ni_get_ps(rps); ps 7103 drivers/gpu/drm/radeon/si_dpm.c if (current_index >= ps->performance_level_count) { ps 7106 drivers/gpu/drm/radeon/si_dpm.c pl = &ps->performance_levels[current_index]; ps 7117 drivers/gpu/drm/radeon/si_dpm.c struct ni_ps *ps = ni_get_ps(rps); ps 7123 drivers/gpu/drm/radeon/si_dpm.c if (current_index >= ps->performance_level_count) { ps 7126 drivers/gpu/drm/radeon/si_dpm.c pl = &ps->performance_levels[current_index]; ps 7135 drivers/gpu/drm/radeon/si_dpm.c struct ni_ps *ps = ni_get_ps(rps); ps 7141 drivers/gpu/drm/radeon/si_dpm.c if (current_index >= ps->performance_level_count) { ps 7144 drivers/gpu/drm/radeon/si_dpm.c pl = &ps->performance_levels[current_index]; ps 76 drivers/gpu/drm/radeon/sumo_dpm.c struct sumo_ps *ps = rps->ps_priv; ps 78 drivers/gpu/drm/radeon/sumo_dpm.c return ps; ps 345 drivers/gpu/drm/radeon/sumo_dpm.c struct sumo_ps *ps = sumo_get_ps(rps); ps 347 drivers/gpu/drm/radeon/sumo_dpm.c u32 highest_engine_clock = ps->levels[ps->num_levels - 1].sclk; ps 349 drivers/gpu/drm/radeon/sumo_dpm.c if (ps->flags & SUMO_POWERSTATE_FLAGS_BOOST_STATE) ps 354 drivers/gpu/drm/radeon/sumo_dpm.c for (i = 0; i < ps->num_levels - 1; i++) ps 359 drivers/gpu/drm/radeon/sumo_dpm.c if (ps->flags & SUMO_POWERSTATE_FLAGS_BOOST_STATE) ps 388 drivers/gpu/drm/radeon/sumo_dpm.c struct sumo_ps *ps = sumo_get_ps(rps); ps 408 drivers/gpu/drm/radeon/sumo_dpm.c for (i = 0; i < ps->num_levels; i++) { ps 409 drivers/gpu/drm/radeon/sumo_dpm.c asi = (i == ps->num_levels - 1) ? pi->pasi : pi->asi; ps 411 drivers/gpu/drm/radeon/sumo_dpm.c m_a = asi * ps->levels[i].sclk / 100; ps 418 drivers/gpu/drm/radeon/sumo_dpm.c if (ps->flags & SUMO_POWERSTATE_FLAGS_BOOST_STATE) { ps 423 drivers/gpu/drm/radeon/sumo_dpm.c a_t = CG_R(m_a * r[ps->num_levels - 1] / 100) | ps 424 drivers/gpu/drm/radeon/sumo_dpm.c CG_L(m_a * l[ps->num_levels - 1] / 100); ps 1043 drivers/gpu/drm/radeon/sumo_dpm.c struct sumo_ps *ps, ps 1060 drivers/gpu/drm/radeon/sumo_dpm.c ps->levels[0].vddc_index = current_vddc; ps 1062 drivers/gpu/drm/radeon/sumo_dpm.c if (ps->levels[0].sclk > current_sclk) ps 1063 drivers/gpu/drm/radeon/sumo_dpm.c ps->levels[0].sclk = current_sclk; ps 1065 drivers/gpu/drm/radeon/sumo_dpm.c ps->levels[0].ss_divider_index = ps 1066 drivers/gpu/drm/radeon/sumo_dpm.c sumo_get_sleep_divider_id_from_clock(rdev, ps->levels[0].sclk, sclk_in_sr); ps 1068 drivers/gpu/drm/radeon/sumo_dpm.c ps->levels[0].ds_divider_index = ps 1069 drivers/gpu/drm/radeon/sumo_dpm.c sumo_get_sleep_divider_id_from_clock(rdev, ps->levels[0].sclk, SUMO_MINIMUM_ENGINE_CLOCK); ps 1071 drivers/gpu/drm/radeon/sumo_dpm.c if (ps->levels[0].ds_divider_index > ps->levels[0].ss_divider_index + 1) ps 1072 drivers/gpu/drm/radeon/sumo_dpm.c ps->levels[0].ds_divider_index = ps->levels[0].ss_divider_index + 1; ps 1074 drivers/gpu/drm/radeon/sumo_dpm.c if (ps->levels[0].ss_divider_index == ps->levels[0].ds_divider_index) { ps 1075 drivers/gpu/drm/radeon/sumo_dpm.c if (ps->levels[0].ss_divider_index > 1) ps 1076 drivers/gpu/drm/radeon/sumo_dpm.c ps->levels[0].ss_divider_index = ps->levels[0].ss_divider_index - 1; ps 1079 drivers/gpu/drm/radeon/sumo_dpm.c if (ps->levels[0].ss_divider_index == 0) ps 1080 drivers/gpu/drm/radeon/sumo_dpm.c ps->levels[0].ds_divider_index = 0; ps 1082 drivers/gpu/drm/radeon/sumo_dpm.c if (ps->levels[0].ds_divider_index == 0) ps 1083 drivers/gpu/drm/radeon/sumo_dpm.c ps->levels[0].ss_divider_index = 0; ps 1090 drivers/gpu/drm/radeon/sumo_dpm.c struct sumo_ps *ps = sumo_get_ps(new_rps); ps 1099 drivers/gpu/drm/radeon/sumo_dpm.c return sumo_patch_thermal_state(rdev, ps, current_ps); ps 1103 drivers/gpu/drm/radeon/sumo_dpm.c ps->flags |= SUMO_POWERSTATE_FLAGS_BOOST_STATE; ps 1109 drivers/gpu/drm/radeon/sumo_dpm.c ps->flags |= SUMO_POWERSTATE_FLAGS_FORCE_NBPS1_STATE; ps 1111 drivers/gpu/drm/radeon/sumo_dpm.c for (i = 0; i < ps->num_levels; i++) { ps 1112 drivers/gpu/drm/radeon/sumo_dpm.c if (ps->levels[i].vddc_index < min_voltage) ps 1113 drivers/gpu/drm/radeon/sumo_dpm.c ps->levels[i].vddc_index = min_voltage; ps 1115 drivers/gpu/drm/radeon/sumo_dpm.c if (ps->levels[i].sclk < min_sclk) ps 1116 drivers/gpu/drm/radeon/sumo_dpm.c ps->levels[i].sclk = ps 1119 drivers/gpu/drm/radeon/sumo_dpm.c ps->levels[i].ss_divider_index = ps 1120 drivers/gpu/drm/radeon/sumo_dpm.c sumo_get_sleep_divider_id_from_clock(rdev, ps->levels[i].sclk, sclk_in_sr); ps 1122 drivers/gpu/drm/radeon/sumo_dpm.c ps->levels[i].ds_divider_index = ps 1123 drivers/gpu/drm/radeon/sumo_dpm.c sumo_get_sleep_divider_id_from_clock(rdev, ps->levels[i].sclk, SUMO_MINIMUM_ENGINE_CLOCK); ps 1125 drivers/gpu/drm/radeon/sumo_dpm.c if (ps->levels[i].ds_divider_index > ps->levels[i].ss_divider_index + 1) ps 1126 drivers/gpu/drm/radeon/sumo_dpm.c ps->levels[i].ds_divider_index = ps->levels[i].ss_divider_index + 1; ps 1128 drivers/gpu/drm/radeon/sumo_dpm.c if (ps->levels[i].ss_divider_index == ps->levels[i].ds_divider_index) { ps 1129 drivers/gpu/drm/radeon/sumo_dpm.c if (ps->levels[i].ss_divider_index > 1) ps 1130 drivers/gpu/drm/radeon/sumo_dpm.c ps->levels[i].ss_divider_index = ps->levels[i].ss_divider_index - 1; ps 1133 drivers/gpu/drm/radeon/sumo_dpm.c if (ps->levels[i].ss_divider_index == 0) ps 1134 drivers/gpu/drm/radeon/sumo_dpm.c ps->levels[i].ds_divider_index = 0; ps 1136 drivers/gpu/drm/radeon/sumo_dpm.c if (ps->levels[i].ds_divider_index == 0) ps 1137 drivers/gpu/drm/radeon/sumo_dpm.c ps->levels[i].ss_divider_index = 0; ps 1139 drivers/gpu/drm/radeon/sumo_dpm.c if (ps->flags & SUMO_POWERSTATE_FLAGS_FORCE_NBPS1_STATE) ps 1140 drivers/gpu/drm/radeon/sumo_dpm.c ps->levels[i].allow_gnb_slow = 1; ps 1143 drivers/gpu/drm/radeon/sumo_dpm.c ps->levels[i].allow_gnb_slow = 0; ps 1144 drivers/gpu/drm/radeon/sumo_dpm.c else if (i == ps->num_levels - 1) ps 1145 drivers/gpu/drm/radeon/sumo_dpm.c ps->levels[i].allow_gnb_slow = 0; ps 1147 drivers/gpu/drm/radeon/sumo_dpm.c ps->levels[i].allow_gnb_slow = 1; ps 1393 drivers/gpu/drm/radeon/sumo_dpm.c struct sumo_ps *ps) ps 1397 drivers/gpu/drm/radeon/sumo_dpm.c ps->num_levels = 1; ps 1398 drivers/gpu/drm/radeon/sumo_dpm.c ps->flags = 0; ps 1399 drivers/gpu/drm/radeon/sumo_dpm.c ps->levels[0] = pi->boot_pl; ps 1407 drivers/gpu/drm/radeon/sumo_dpm.c struct sumo_ps *ps = sumo_get_ps(rps); ps 1423 drivers/gpu/drm/radeon/sumo_dpm.c sumo_patch_boot_state(rdev, ps); ps 1434 drivers/gpu/drm/radeon/sumo_dpm.c struct sumo_ps *ps = sumo_get_ps(rps); ps 1435 drivers/gpu/drm/radeon/sumo_dpm.c struct sumo_pl *pl = &ps->levels[index]; ps 1444 drivers/gpu/drm/radeon/sumo_dpm.c ps->num_levels = index + 1; ps 1467 drivers/gpu/drm/radeon/sumo_dpm.c struct sumo_ps *ps; ps 1484 drivers/gpu/drm/radeon/sumo_dpm.c rdev->pm.dpm.ps = kcalloc(state_array->ucNumEntries, ps 1487 drivers/gpu/drm/radeon/sumo_dpm.c if (!rdev->pm.dpm.ps) ps 1498 drivers/gpu/drm/radeon/sumo_dpm.c ps = kzalloc(sizeof(struct sumo_ps), GFP_KERNEL); ps 1499 drivers/gpu/drm/radeon/sumo_dpm.c if (ps == NULL) { ps 1500 drivers/gpu/drm/radeon/sumo_dpm.c kfree(rdev->pm.dpm.ps); ps 1503 drivers/gpu/drm/radeon/sumo_dpm.c rdev->pm.dpm.ps[i].ps_priv = ps; ps 1515 drivers/gpu/drm/radeon/sumo_dpm.c &rdev->pm.dpm.ps[i], k, ps 1519 drivers/gpu/drm/radeon/sumo_dpm.c sumo_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i], ps 1798 drivers/gpu/drm/radeon/sumo_dpm.c struct sumo_ps *ps = sumo_get_ps(rps); ps 1803 drivers/gpu/drm/radeon/sumo_dpm.c for (i = 0; i < ps->num_levels; i++) { ps 1804 drivers/gpu/drm/radeon/sumo_dpm.c struct sumo_pl *pl = &ps->levels[i]; ps 1817 drivers/gpu/drm/radeon/sumo_dpm.c struct sumo_ps *ps = sumo_get_ps(rps); ps 1829 drivers/gpu/drm/radeon/sumo_dpm.c } else if (current_index >= ps->num_levels) { ps 1832 drivers/gpu/drm/radeon/sumo_dpm.c pl = &ps->levels[current_index]; ps 1844 drivers/gpu/drm/radeon/sumo_dpm.c struct sumo_ps *ps = sumo_get_ps(rps); ps 1853 drivers/gpu/drm/radeon/sumo_dpm.c } else if (current_index >= ps->num_levels) { ps 1856 drivers/gpu/drm/radeon/sumo_dpm.c pl = &ps->levels[current_index]; ps 1875 drivers/gpu/drm/radeon/sumo_dpm.c kfree(rdev->pm.dpm.ps[i].ps_priv); ps 1877 drivers/gpu/drm/radeon/sumo_dpm.c kfree(rdev->pm.dpm.ps); ps 1904 drivers/gpu/drm/radeon/sumo_dpm.c struct sumo_ps *ps = sumo_get_ps(rps); ps 1907 drivers/gpu/drm/radeon/sumo_dpm.c if (ps->num_levels <= 1) ps 1913 drivers/gpu/drm/radeon/sumo_dpm.c sumo_power_level_enable(rdev, ps->num_levels - 1, true); ps 1914 drivers/gpu/drm/radeon/sumo_dpm.c sumo_set_forced_level(rdev, ps->num_levels - 1); ps 1916 drivers/gpu/drm/radeon/sumo_dpm.c for (i = 0; i < ps->num_levels - 1; i++) { ps 1928 drivers/gpu/drm/radeon/sumo_dpm.c for (i = 1; i < ps->num_levels; i++) { ps 1935 drivers/gpu/drm/radeon/sumo_dpm.c for (i = 0; i < ps->num_levels; i++) { ps 351 drivers/gpu/drm/radeon/trinity_dpm.c struct trinity_ps *ps = rps->ps_priv; ps 353 drivers/gpu/drm/radeon/trinity_dpm.c return ps; ps 873 drivers/gpu/drm/radeon/trinity_dpm.c struct trinity_ps *ps = trinity_get_ps(rps); ps 874 drivers/gpu/drm/radeon/trinity_dpm.c u32 uvdstates = (ps->vclk_low_divider | ps 875 drivers/gpu/drm/radeon/trinity_dpm.c ps->vclk_high_divider << 8 | ps 876 drivers/gpu/drm/radeon/trinity_dpm.c ps->dclk_low_divider << 16 | ps 877 drivers/gpu/drm/radeon/trinity_dpm.c ps->dclk_high_divider << 24); ps 1207 drivers/gpu/drm/radeon/trinity_dpm.c struct trinity_ps *ps = trinity_get_ps(rps); ps 1210 drivers/gpu/drm/radeon/trinity_dpm.c if (ps->num_levels <= 1) ps 1217 drivers/gpu/drm/radeon/trinity_dpm.c ret = trinity_dpm_n_levels_disabled(rdev, ps->num_levels - 1); ps 1221 drivers/gpu/drm/radeon/trinity_dpm.c for (i = 0; i < ps->num_levels; i++) { ps 1325 drivers/gpu/drm/radeon/trinity_dpm.c struct trinity_ps *ps) ps 1329 drivers/gpu/drm/radeon/trinity_dpm.c ps->num_levels = 1; ps 1330 drivers/gpu/drm/radeon/trinity_dpm.c ps->nbps_flags = 0; ps 1331 drivers/gpu/drm/radeon/trinity_dpm.c ps->bapm_flags = 0; ps 1332 drivers/gpu/drm/radeon/trinity_dpm.c ps->levels[0] = pi->boot_pl; ps 1400 drivers/gpu/drm/radeon/trinity_dpm.c struct trinity_ps *ps, ps 1417 drivers/gpu/drm/radeon/trinity_dpm.c ps->levels[0].vddc_index = current_vddc; ps 1419 drivers/gpu/drm/radeon/trinity_dpm.c if (ps->levels[0].sclk > current_sclk) ps 1420 drivers/gpu/drm/radeon/trinity_dpm.c ps->levels[0].sclk = current_sclk; ps 1422 drivers/gpu/drm/radeon/trinity_dpm.c ps->levels[0].ds_divider_index = ps 1423 drivers/gpu/drm/radeon/trinity_dpm.c trinity_get_sleep_divider_id_from_clock(rdev, ps->levels[0].sclk, sclk_in_sr); ps 1424 drivers/gpu/drm/radeon/trinity_dpm.c ps->levels[0].ss_divider_index = ps->levels[0].ds_divider_index; ps 1425 drivers/gpu/drm/radeon/trinity_dpm.c ps->levels[0].allow_gnb_slow = 1; ps 1426 drivers/gpu/drm/radeon/trinity_dpm.c ps->levels[0].force_nbp_state = 0; ps 1427 drivers/gpu/drm/radeon/trinity_dpm.c ps->levels[0].display_wm = 0; ps 1428 drivers/gpu/drm/radeon/trinity_dpm.c ps->levels[0].vce_wm = ps 1429 drivers/gpu/drm/radeon/trinity_dpm.c trinity_calculate_vce_wm(rdev, ps->levels[0].sclk); ps 1433 drivers/gpu/drm/radeon/trinity_dpm.c struct trinity_ps *ps, u32 index) ps 1435 drivers/gpu/drm/radeon/trinity_dpm.c if (ps == NULL || ps->num_levels <= 1) ps 1437 drivers/gpu/drm/radeon/trinity_dpm.c else if (ps->num_levels == 2) { ps 1445 drivers/gpu/drm/radeon/trinity_dpm.c else if (ps->levels[index].sclk < 30000) ps 1474 drivers/gpu/drm/radeon/trinity_dpm.c struct trinity_ps *ps = trinity_get_ps(rps); ps 1494 drivers/gpu/drm/radeon/trinity_dpm.c ps->vclk_low_divider = ps 1496 drivers/gpu/drm/radeon/trinity_dpm.c ps->dclk_low_divider = ps 1498 drivers/gpu/drm/radeon/trinity_dpm.c ps->vclk_high_divider = ps 1500 drivers/gpu/drm/radeon/trinity_dpm.c ps->dclk_high_divider = ps 1539 drivers/gpu/drm/radeon/trinity_dpm.c struct trinity_ps *ps = trinity_get_ps(new_rps); ps 1551 drivers/gpu/drm/radeon/trinity_dpm.c return trinity_patch_thermal_state(rdev, ps, current_ps); ps 1563 drivers/gpu/drm/radeon/trinity_dpm.c for (i = 0; i < ps->num_levels; i++) { ps 1564 drivers/gpu/drm/radeon/trinity_dpm.c if (ps->levels[i].vddc_index < min_voltage) ps 1565 drivers/gpu/drm/radeon/trinity_dpm.c ps->levels[i].vddc_index = min_voltage; ps 1567 drivers/gpu/drm/radeon/trinity_dpm.c if (ps->levels[i].sclk < min_sclk) ps 1568 drivers/gpu/drm/radeon/trinity_dpm.c ps->levels[i].sclk = ps 1574 drivers/gpu/drm/radeon/trinity_dpm.c if (ps->levels[i].sclk < rdev->pm.dpm.vce_states[rdev->pm.dpm.vce_level].sclk) ps 1575 drivers/gpu/drm/radeon/trinity_dpm.c ps->levels[i].sclk = rdev->pm.dpm.vce_states[rdev->pm.dpm.vce_level].sclk; ps 1578 drivers/gpu/drm/radeon/trinity_dpm.c if (ps->levels[i].vddc_index < min_vce_voltage) ps 1579 drivers/gpu/drm/radeon/trinity_dpm.c ps->levels[i].vddc_index = min_vce_voltage; ps 1582 drivers/gpu/drm/radeon/trinity_dpm.c ps->levels[i].ds_divider_index = ps 1583 drivers/gpu/drm/radeon/trinity_dpm.c sumo_get_sleep_divider_id_from_clock(rdev, ps->levels[i].sclk, sclk_in_sr); ps 1585 drivers/gpu/drm/radeon/trinity_dpm.c ps->levels[i].ss_divider_index = ps->levels[i].ds_divider_index; ps 1587 drivers/gpu/drm/radeon/trinity_dpm.c ps->levels[i].allow_gnb_slow = 1; ps 1588 drivers/gpu/drm/radeon/trinity_dpm.c ps->levels[i].force_nbp_state = 0; ps 1589 drivers/gpu/drm/radeon/trinity_dpm.c ps->levels[i].display_wm = ps 1590 drivers/gpu/drm/radeon/trinity_dpm.c trinity_calculate_display_wm(rdev, ps, i); ps 1591 drivers/gpu/drm/radeon/trinity_dpm.c ps->levels[i].vce_wm = ps 1592 drivers/gpu/drm/radeon/trinity_dpm.c trinity_calculate_vce_wm(rdev, ps->levels[0].sclk); ps 1597 drivers/gpu/drm/radeon/trinity_dpm.c ps->bapm_flags |= TRINITY_POWERSTATE_FLAGS_BAPM_DISABLE; ps 1600 drivers/gpu/drm/radeon/trinity_dpm.c ps->Dpm0PgNbPsLo = 0x1; ps 1601 drivers/gpu/drm/radeon/trinity_dpm.c ps->Dpm0PgNbPsHi = 0x0; ps 1602 drivers/gpu/drm/radeon/trinity_dpm.c ps->DpmXNbPsLo = 0x2; ps 1603 drivers/gpu/drm/radeon/trinity_dpm.c ps->DpmXNbPsHi = 0x1; ps 1611 drivers/gpu/drm/radeon/trinity_dpm.c ps->Dpm0PgNbPsLo = force_high ? 0x2 : 0x3; ps 1612 drivers/gpu/drm/radeon/trinity_dpm.c ps->Dpm0PgNbPsHi = 0x1; ps 1613 drivers/gpu/drm/radeon/trinity_dpm.c ps->DpmXNbPsLo = force_high ? 0x2 : 0x3; ps 1614 drivers/gpu/drm/radeon/trinity_dpm.c ps->DpmXNbPsHi = 0x2; ps 1615 drivers/gpu/drm/radeon/trinity_dpm.c ps->levels[ps->num_levels - 1].allow_gnb_slow = 0; ps 1686 drivers/gpu/drm/radeon/trinity_dpm.c struct trinity_ps *ps = trinity_get_ps(rps); ps 1702 drivers/gpu/drm/radeon/trinity_dpm.c trinity_patch_boot_state(rdev, ps); ps 1713 drivers/gpu/drm/radeon/trinity_dpm.c struct trinity_ps *ps = trinity_get_ps(rps); ps 1714 drivers/gpu/drm/radeon/trinity_dpm.c struct trinity_pl *pl = &ps->levels[index]; ps 1722 drivers/gpu/drm/radeon/trinity_dpm.c ps->num_levels = index + 1; ps 1745 drivers/gpu/drm/radeon/trinity_dpm.c struct sumo_ps *ps; ps 1762 drivers/gpu/drm/radeon/trinity_dpm.c rdev->pm.dpm.ps = kcalloc(state_array->ucNumEntries, ps 1765 drivers/gpu/drm/radeon/trinity_dpm.c if (!rdev->pm.dpm.ps) ps 1776 drivers/gpu/drm/radeon/trinity_dpm.c ps = kzalloc(sizeof(struct sumo_ps), GFP_KERNEL); ps 1777 drivers/gpu/drm/radeon/trinity_dpm.c if (ps == NULL) { ps 1778 drivers/gpu/drm/radeon/trinity_dpm.c kfree(rdev->pm.dpm.ps); ps 1781 drivers/gpu/drm/radeon/trinity_dpm.c rdev->pm.dpm.ps[i].ps_priv = ps; ps 1794 drivers/gpu/drm/radeon/trinity_dpm.c &rdev->pm.dpm.ps[i], k, ps 1798 drivers/gpu/drm/radeon/trinity_dpm.c trinity_parse_pplib_non_clock_info(rdev, &rdev->pm.dpm.ps[i], ps 2016 drivers/gpu/drm/radeon/trinity_dpm.c struct trinity_ps *ps = trinity_get_ps(rps); ps 2021 drivers/gpu/drm/radeon/trinity_dpm.c for (i = 0; i < ps->num_levels; i++) { ps 2022 drivers/gpu/drm/radeon/trinity_dpm.c struct trinity_pl *pl = &ps->levels[i]; ps 2035 drivers/gpu/drm/radeon/trinity_dpm.c struct trinity_ps *ps = trinity_get_ps(rps); ps 2041 drivers/gpu/drm/radeon/trinity_dpm.c if (current_index >= ps->num_levels) { ps 2044 drivers/gpu/drm/radeon/trinity_dpm.c pl = &ps->levels[current_index]; ps 2056 drivers/gpu/drm/radeon/trinity_dpm.c struct trinity_ps *ps = trinity_get_ps(rps); ps 2062 drivers/gpu/drm/radeon/trinity_dpm.c if (current_index >= ps->num_levels) { ps 2065 drivers/gpu/drm/radeon/trinity_dpm.c pl = &ps->levels[current_index]; ps 2084 drivers/gpu/drm/radeon/trinity_dpm.c kfree(rdev->pm.dpm.ps[i].ps_priv); ps 2086 drivers/gpu/drm/radeon/trinity_dpm.c kfree(rdev->pm.dpm.ps); ps 218 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c struct drm_plane_state *ps; ps 224 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c ps = crtc->primary->state; ps 225 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c fb = ps->fb; ps 226 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c vps = vmw_plane_state_to_vps(ps); ps 209 drivers/hid/hid-asus.c struct input_mt_slot *ps = &mt->slots[i]; ps 210 drivers/hid/hid-asus.c int id = input_mt_get_value(ps, ABS_MT_TRACKING_ID); ps 215 drivers/hid/hid-asus.c oldest = ps; ps 139 drivers/hid/hid-logitech-hidpp.c struct power_supply *ps; ps 730 drivers/hid/hid-logitech-hidpp.c if (hidpp->battery.ps) ps 731 drivers/hid/hid-logitech-hidpp.c power_supply_changed(hidpp->battery.ps); ps 1218 drivers/hid/hid-logitech-hidpp.c if (hidpp->battery.ps) ps 1219 drivers/hid/hid-logitech-hidpp.c power_supply_changed(hidpp->battery.ps); ps 1460 drivers/hid/hid-logitech-hidpp.c if (hidpp->battery.ps) ps 1461 drivers/hid/hid-logitech-hidpp.c power_supply_changed(hidpp->battery.ps); ps 3225 drivers/hid/hid-logitech-hidpp.c if (hidpp->battery.ps) ps 3281 drivers/hid/hid-logitech-hidpp.c battery->ps = devm_power_supply_register(&hidpp->hid_dev->dev, ps 3284 drivers/hid/hid-logitech-hidpp.c if (IS_ERR(battery->ps)) ps 3285 drivers/hid/hid-logitech-hidpp.c return PTR_ERR(battery->ps); ps 3287 drivers/hid/hid-logitech-hidpp.c power_supply_powers(battery->ps, &hidpp->hid_dev->dev); ps 3359 drivers/hid/hid-logitech-hidpp.c if (hidpp->battery.ps) { ps 3363 drivers/hid/hid-logitech-hidpp.c power_supply_changed(hidpp->battery.ps); ps 3435 drivers/hid/hid-logitech-hidpp.c if (hidpp->battery.ps) ps 3436 drivers/hid/hid-logitech-hidpp.c power_supply_changed(hidpp->battery.ps); ps 1243 drivers/hid/wacom_wac.c struct input_mt_slot *ps = &input->mt->slots[i]; ps 1244 drivers/hid/wacom_wac.c int id = input_mt_get_value(ps, ABS_MT_TRACKING_ID); ps 206 drivers/infiniband/core/cma.c struct xarray *cma_pernet_xa(struct net *net, enum rdma_ucm_port_space ps) ps 210 drivers/infiniband/core/cma.c switch (ps) { ps 235 drivers/infiniband/core/cma.c enum rdma_ucm_port_space ps; ps 248 drivers/infiniband/core/cma.c static int cma_ps_alloc(struct net *net, enum rdma_ucm_port_space ps, ps 251 drivers/infiniband/core/cma.c struct xarray *xa = cma_pernet_xa(net, ps); ps 257 drivers/infiniband/core/cma.c enum rdma_ucm_port_space ps, int snum) ps 259 drivers/infiniband/core/cma.c struct xarray *xa = cma_pernet_xa(net, ps); ps 264 drivers/infiniband/core/cma.c static void cma_ps_remove(struct net *net, enum rdma_ucm_port_space ps, ps 267 drivers/infiniband/core/cma.c struct xarray *xa = cma_pernet_xa(net, ps); ps 567 drivers/infiniband/core/cma.c switch (id_priv->id.ps) { ps 669 drivers/infiniband/core/cma.c id_priv->id.ps == RDMA_PS_IPOIB) ps 721 drivers/infiniband/core/cma.c id_priv->id.ps == RDMA_PS_IPOIB) ps 761 drivers/infiniband/core/cma.c id_priv->id.ps == RDMA_PS_IPOIB) ps 877 drivers/infiniband/core/cma.c void *context, enum rdma_ucm_port_space ps, ps 891 drivers/infiniband/core/cma.c id_priv->id.ps = ps; ps 1796 drivers/infiniband/core/cma.c cma_ps_remove(net, bind_list->ps, bind_list->port); ps 2020 drivers/infiniband/core/cma.c listen_id->ps, ib_event->param.req_rcvd.qp_type, ps 2080 drivers/infiniband/core/cma.c listen_id->ps, IB_QPT_UD, ps 2234 drivers/infiniband/core/cma.c return cpu_to_be64(((u64)id->ps << 16) + be16_to_cpu(cma_port(addr))); ps 2479 drivers/infiniband/core/cma.c id = __rdma_create_id(net, cma_listen_handler, id_priv, id_priv->id.ps, ps 3270 drivers/infiniband/core/cma.c static int cma_alloc_port(enum rdma_ucm_port_space ps, ps 3280 drivers/infiniband/core/cma.c ret = cma_ps_alloc(id_priv->id.route.addr.dev_addr.net, ps, bind_list, ps 3285 drivers/infiniband/core/cma.c bind_list->ps = ps; ps 3333 drivers/infiniband/core/cma.c static int cma_alloc_any_port(enum rdma_ucm_port_space ps, ps 3349 drivers/infiniband/core/cma.c bind_list = cma_ps_find(net, ps, (unsigned short)rover); ps 3352 drivers/infiniband/core/cma.c ret = cma_alloc_port(ps, id_priv, rover); ps 3411 drivers/infiniband/core/cma.c static int cma_use_port(enum rdma_ucm_port_space ps, ps 3422 drivers/infiniband/core/cma.c bind_list = cma_ps_find(id_priv->id.route.addr.dev_addr.net, ps, snum); ps 3424 drivers/infiniband/core/cma.c ret = cma_alloc_port(ps, id_priv, snum); ps 3448 drivers/infiniband/core/cma.c switch (id_priv->id.ps) { ps 3453 drivers/infiniband/core/cma.c return id_priv->id.ps; ps 3463 drivers/infiniband/core/cma.c enum rdma_ucm_port_space ps = 0; ps 3471 drivers/infiniband/core/cma.c if ((id_priv->id.ps == RDMA_PS_IB) && (sid == (RDMA_IB_IP_PS_IB & mask))) { ps 3473 drivers/infiniband/core/cma.c ps = RDMA_PS_IB; ps 3474 drivers/infiniband/core/cma.c } else if (((id_priv->id.ps == RDMA_PS_IB) || (id_priv->id.ps == RDMA_PS_TCP)) && ps 3477 drivers/infiniband/core/cma.c ps = RDMA_PS_TCP; ps 3478 drivers/infiniband/core/cma.c } else if (((id_priv->id.ps == RDMA_PS_IB) || (id_priv->id.ps == RDMA_PS_UDP)) && ps 3481 drivers/infiniband/core/cma.c ps = RDMA_PS_UDP; ps 3484 drivers/infiniband/core/cma.c if (ps) { ps 3489 drivers/infiniband/core/cma.c return ps; ps 3494 drivers/infiniband/core/cma.c enum rdma_ucm_port_space ps; ps 3498 drivers/infiniband/core/cma.c ps = cma_select_inet_ps(id_priv); ps 3500 drivers/infiniband/core/cma.c ps = cma_select_ib_ps(id_priv); ps 3501 drivers/infiniband/core/cma.c if (!ps) ps 3506 drivers/infiniband/core/cma.c ret = cma_alloc_any_port(ps, id_priv); ps 3508 drivers/infiniband/core/cma.c ret = cma_use_port(ps, id_priv); ps 4238 drivers/infiniband/core/cma.c if (id_priv->id.ps == RDMA_PS_UDP) ps 4243 drivers/infiniband/core/cma.c if (id_priv->id.ps == RDMA_PS_UDP) ps 4290 drivers/infiniband/core/cma.c if (id_priv->id.ps == RDMA_PS_IPOIB) ps 4376 drivers/infiniband/core/cma.c if (id_priv->id.ps == RDMA_PS_UDP) ps 509 drivers/infiniband/core/nldev.c if (nla_put_u32(msg, RDMA_NLDEV_ATTR_RES_PS, cm_id->ps)) ps 460 drivers/infiniband/core/ucma.c switch (cmd->ps) { ps 504 drivers/infiniband/core/ucma.c ucma_event_handler, ctx, cmd.ps, qp_type, NULL); ps 979 drivers/infiniband/hw/hfi1/hfi.h struct hfi1_pkt_state *ps, ps 1503 drivers/infiniband/hw/hfi1/hfi.h struct hfi1_pkt_state *ps, ps 1507 drivers/infiniband/hw/hfi1/hfi.h struct hfi1_pkt_state *ps, ps 73 drivers/infiniband/hw/hfi1/opa_compat.h static inline u8 port_states_to_logical_state(struct opa_port_states *ps) ps 75 drivers/infiniband/hw/hfi1/opa_compat.h return ps->portphysstate_portstate & OPA_PI_MASK_PORT_STATE; ps 78 drivers/infiniband/hw/hfi1/opa_compat.h static inline u8 port_states_to_phys_state(struct opa_port_states *ps) ps 80 drivers/infiniband/hw/hfi1/opa_compat.h return ((ps->portphysstate_portstate & ps 111 drivers/infiniband/hw/hfi1/rc.c struct hfi1_pkt_state *ps) ps 197 drivers/infiniband/hw/hfi1/rc.c ps->s_txreq->mr = e->rdma_sge.mr; ps 198 drivers/infiniband/hw/hfi1/rc.c if (ps->s_txreq->mr) ps 199 drivers/infiniband/hw/hfi1/rc.c rvt_get_mr(ps->s_txreq->mr); ps 202 drivers/infiniband/hw/hfi1/rc.c ps->s_txreq->ss = &qp->s_ack_rdma_sge; ps 246 drivers/infiniband/hw/hfi1/rc.c ps->s_txreq->mr = e->rdma_sge.mr; ps 247 drivers/infiniband/hw/hfi1/rc.c if (ps->s_txreq->mr) ps 248 drivers/infiniband/hw/hfi1/rc.c rvt_get_mr(ps->s_txreq->mr); ps 255 drivers/infiniband/hw/hfi1/rc.c ps->s_txreq->ss = NULL; ps 272 drivers/infiniband/hw/hfi1/rc.c ps->s_txreq->ss = &qp->s_ack_rdma_sge; ps 273 drivers/infiniband/hw/hfi1/rc.c ps->s_txreq->mr = qp->s_ack_rdma_sge.sge.mr; ps 274 drivers/infiniband/hw/hfi1/rc.c if (ps->s_txreq->mr) ps 275 drivers/infiniband/hw/hfi1/rc.c rvt_get_mr(ps->s_txreq->mr); ps 330 drivers/infiniband/hw/hfi1/rc.c &ps->s_txreq->ss); ps 350 drivers/infiniband/hw/hfi1/rc.c ps->s_txreq->ss = &qp->s_ack_rdma_sge; ps 391 drivers/infiniband/hw/hfi1/rc.c ps->s_txreq->txreq.flags |= SDMA_TXREQ_F_VIP; ps 392 drivers/infiniband/hw/hfi1/rc.c ps->s_txreq->ss = NULL; ps 395 drivers/infiniband/hw/hfi1/rc.c ps->s_txreq->sde = qpriv->s_sde; ps 396 drivers/infiniband/hw/hfi1/rc.c ps->s_txreq->s_cur_size = len; ps 397 drivers/infiniband/hw/hfi1/rc.c ps->s_txreq->hdr_dwords = hwords; ps 398 drivers/infiniband/hw/hfi1/rc.c hfi1_make_ruc_header(qp, ohdr, bth0, bth1, bth2, middle, ps); ps 401 drivers/infiniband/hw/hfi1/rc.c spin_unlock_irqrestore(&qp->s_lock, ps->flags); ps 402 drivers/infiniband/hw/hfi1/rc.c spin_lock_irqsave(&qp->r_lock, ps->flags); ps 406 drivers/infiniband/hw/hfi1/rc.c spin_unlock_irqrestore(&qp->r_lock, ps->flags); ps 407 drivers/infiniband/hw/hfi1/rc.c spin_lock_irqsave(&qp->s_lock, ps->flags); ps 429 drivers/infiniband/hw/hfi1/rc.c int hfi1_make_rc_req(struct rvt_qp *qp, struct hfi1_pkt_state *ps) ps 452 drivers/infiniband/hw/hfi1/rc.c ps->s_txreq = get_txreq(ps->dev, qp); ps 453 drivers/infiniband/hw/hfi1/rc.c if (!ps->s_txreq) ps 460 drivers/infiniband/hw/hfi1/rc.c ohdr = &ps->s_txreq->phdr.hdr.ibh.u.l.oth; ps 462 drivers/infiniband/hw/hfi1/rc.c ohdr = &ps->s_txreq->phdr.hdr.ibh.u.oth; ps 468 drivers/infiniband/hw/hfi1/rc.c ohdr = &ps->s_txreq->phdr.hdr.opah.u.l.oth; ps 470 drivers/infiniband/hw/hfi1/rc.c ohdr = &ps->s_txreq->phdr.hdr.opah.u.oth; ps 475 drivers/infiniband/hw/hfi1/rc.c make_rc_ack(dev, qp, ohdr, ps)) ps 1188 drivers/infiniband/hw/hfi1/rc.c ps->s_txreq->hdr_dwords = hwords; ps 1189 drivers/infiniband/hw/hfi1/rc.c ps->s_txreq->sde = priv->s_sde; ps 1190 drivers/infiniband/hw/hfi1/rc.c ps->s_txreq->ss = ss; ps 1191 drivers/infiniband/hw/hfi1/rc.c ps->s_txreq->s_cur_size = len; ps 1199 drivers/infiniband/hw/hfi1/rc.c ps); ps 1203 drivers/infiniband/hw/hfi1/rc.c hfi1_put_txreq(ps->s_txreq); ps 1204 drivers/infiniband/hw/hfi1/rc.c ps->s_txreq = NULL; ps 1208 drivers/infiniband/hw/hfi1/rc.c hfi1_put_txreq(ps->s_txreq); ps 1211 drivers/infiniband/hw/hfi1/rc.c ps->s_txreq = NULL; ps 276 drivers/infiniband/hw/hfi1/ruc.c struct hfi1_pkt_state *ps) ps 279 drivers/infiniband/hw/hfi1/ruc.c struct hfi1_ibport *ibp = ps->ibp; ps 285 drivers/infiniband/hw/hfi1/ruc.c (ps->s_txreq->hdr_dwords << 2), ps 286 drivers/infiniband/hw/hfi1/ruc.c ps->s_txreq->s_cur_size); ps 287 drivers/infiniband/hw/hfi1/ruc.c u32 nwords = SIZE_OF_CRC + ((ps->s_txreq->s_cur_size + ps 302 drivers/infiniband/hw/hfi1/ruc.c grh = &ps->s_txreq->phdr.hdr.opah.u.l.grh; ps 304 drivers/infiniband/hw/hfi1/ruc.c ps->s_txreq->hdr_dwords += ps 306 drivers/infiniband/hw/hfi1/ruc.c ps->s_txreq->hdr_dwords - LRH_16B_DWORDS, ps 338 drivers/infiniband/hw/hfi1/ruc.c hfi1_make_16b_hdr(&ps->s_txreq->phdr.hdr.opah, ps 342 drivers/infiniband/hw/hfi1/ruc.c (ps->s_txreq->hdr_dwords + nwords) >> 1, ps 364 drivers/infiniband/hw/hfi1/ruc.c struct hfi1_pkt_state *ps) ps 367 drivers/infiniband/hw/hfi1/ruc.c struct hfi1_ibport *ibp = ps->ibp; ps 370 drivers/infiniband/hw/hfi1/ruc.c u8 extra_bytes = -ps->s_txreq->s_cur_size & 3; ps 371 drivers/infiniband/hw/hfi1/ruc.c u32 nwords = SIZE_OF_CRC + ((ps->s_txreq->s_cur_size + ps 375 drivers/infiniband/hw/hfi1/ruc.c struct ib_grh *grh = &ps->s_txreq->phdr.hdr.ibh.u.l.grh; ps 378 drivers/infiniband/hw/hfi1/ruc.c ps->s_txreq->hdr_dwords += ps 381 drivers/infiniband/hw/hfi1/ruc.c ps->s_txreq->hdr_dwords - LRH_9B_DWORDS, ps 407 drivers/infiniband/hw/hfi1/ruc.c hfi1_make_ib_hdr(&ps->s_txreq->phdr.hdr.ibh, ps 409 drivers/infiniband/hw/hfi1/ruc.c ps->s_txreq->hdr_dwords + nwords, ps 418 drivers/infiniband/hw/hfi1/ruc.c struct hfi1_pkt_state *ps); ps 428 drivers/infiniband/hw/hfi1/ruc.c struct hfi1_pkt_state *ps) ps 449 drivers/infiniband/hw/hfi1/ruc.c ps); ps 470 drivers/infiniband/hw/hfi1/ruc.c bool hfi1_schedule_send_yield(struct rvt_qp *qp, struct hfi1_pkt_state *ps, ps 473 drivers/infiniband/hw/hfi1/ruc.c ps->pkts_sent = true; ps 475 drivers/infiniband/hw/hfi1/ruc.c if (unlikely(time_after(jiffies, ps->timeout))) { ps 476 drivers/infiniband/hw/hfi1/ruc.c if (!ps->in_thread || ps 477 drivers/infiniband/hw/hfi1/ruc.c workqueue_congested(ps->cpu, ps->ppd->hfi1_wq)) { ps 478 drivers/infiniband/hw/hfi1/ruc.c spin_lock_irqsave(&qp->s_lock, ps->flags); ps 497 drivers/infiniband/hw/hfi1/ruc.c spin_unlock_irqrestore(&qp->s_lock, ps->flags); ps 498 drivers/infiniband/hw/hfi1/ruc.c this_cpu_inc(*ps->ppd->dd->send_schedule); ps 504 drivers/infiniband/hw/hfi1/ruc.c this_cpu_inc(*ps->ppd->dd->send_schedule); ps 505 drivers/infiniband/hw/hfi1/ruc.c ps->timeout = jiffies + ps->timeout_int; ps 536 drivers/infiniband/hw/hfi1/ruc.c struct hfi1_pkt_state ps; ps 538 drivers/infiniband/hw/hfi1/ruc.c int (*make_req)(struct rvt_qp *qp, struct hfi1_pkt_state *ps); ps 540 drivers/infiniband/hw/hfi1/ruc.c ps.dev = to_idev(qp->ibqp.device); ps 541 drivers/infiniband/hw/hfi1/ruc.c ps.ibp = to_iport(qp->ibqp.device, qp->port_num); ps 542 drivers/infiniband/hw/hfi1/ruc.c ps.ppd = ppd_from_ibp(ps.ibp); ps 543 drivers/infiniband/hw/hfi1/ruc.c ps.in_thread = in_thread; ps 544 drivers/infiniband/hw/hfi1/ruc.c ps.wait = iowait_get_ib_work(&priv->s_iowait); ps 551 drivers/infiniband/hw/hfi1/ruc.c ~((1 << ps.ppd->lmc) - 1)) == ps 552 drivers/infiniband/hw/hfi1/ruc.c ps.ppd->lid)) { ps 557 drivers/infiniband/hw/hfi1/ruc.c ps.timeout_int = qp->timeout_jiffies; ps 561 drivers/infiniband/hw/hfi1/ruc.c ~((1 << ps.ppd->lmc) - 1)) == ps 562 drivers/infiniband/hw/hfi1/ruc.c ps.ppd->lid)) { ps 567 drivers/infiniband/hw/hfi1/ruc.c ps.timeout_int = SEND_RESCHED_TIMEOUT; ps 571 drivers/infiniband/hw/hfi1/ruc.c ps.timeout_int = SEND_RESCHED_TIMEOUT; ps 574 drivers/infiniband/hw/hfi1/ruc.c spin_lock_irqsave(&qp->s_lock, ps.flags); ps 580 drivers/infiniband/hw/hfi1/ruc.c spin_unlock_irqrestore(&qp->s_lock, ps.flags); ps 586 drivers/infiniband/hw/hfi1/ruc.c ps.timeout_int = ps.timeout_int / 8; ps 587 drivers/infiniband/hw/hfi1/ruc.c ps.timeout = jiffies + ps.timeout_int; ps 588 drivers/infiniband/hw/hfi1/ruc.c ps.cpu = priv->s_sde ? priv->s_sde->cpu : ps 589 drivers/infiniband/hw/hfi1/ruc.c cpumask_first(cpumask_of_node(ps.ppd->dd->node)); ps 590 drivers/infiniband/hw/hfi1/ruc.c ps.pkts_sent = false; ps 593 drivers/infiniband/hw/hfi1/ruc.c ps.s_txreq = get_waiting_verbs_txreq(ps.wait); ps 596 drivers/infiniband/hw/hfi1/ruc.c if (ps.s_txreq) { ps 599 drivers/infiniband/hw/hfi1/ruc.c spin_unlock_irqrestore(&qp->s_lock, ps.flags); ps 604 drivers/infiniband/hw/hfi1/ruc.c if (hfi1_verbs_send(qp, &ps)) ps 608 drivers/infiniband/hw/hfi1/ruc.c if (hfi1_schedule_send_yield(qp, &ps, false)) ps 611 drivers/infiniband/hw/hfi1/ruc.c spin_lock_irqsave(&qp->s_lock, ps.flags); ps 613 drivers/infiniband/hw/hfi1/ruc.c } while (make_req(qp, &ps)); ps 614 drivers/infiniband/hw/hfi1/ruc.c iowait_starve_clear(ps.pkts_sent, &priv->s_iowait); ps 615 drivers/infiniband/hw/hfi1/ruc.c spin_unlock_irqrestore(&qp->s_lock, ps.flags); ps 125 drivers/infiniband/hw/hfi1/tid_rdma.c struct hfi1_pkt_state *ps); ps 4995 drivers/infiniband/hw/hfi1/tid_rdma.c int hfi1_make_tid_rdma_pkt(struct rvt_qp *qp, struct hfi1_pkt_state *ps) ps 5023 drivers/infiniband/hw/hfi1/tid_rdma.c ps->s_txreq = get_waiting_verbs_txreq(iowork); ps 5024 drivers/infiniband/hw/hfi1/tid_rdma.c if (ps->s_txreq || hfi1_make_rc_req(qp, ps)) { ps 5030 drivers/infiniband/hw/hfi1/tid_rdma.c ps->s_txreq = get_txreq(ps->dev, qp); ps 5031 drivers/infiniband/hw/hfi1/tid_rdma.c if (!ps->s_txreq) ps 5034 drivers/infiniband/hw/hfi1/tid_rdma.c ohdr = &ps->s_txreq->phdr.hdr.ibh.u.oth; ps 5037 drivers/infiniband/hw/hfi1/tid_rdma.c make_tid_rdma_ack(qp, ohdr, ps)) ps 5150 drivers/infiniband/hw/hfi1/tid_rdma.c ps->s_txreq->hdr_dwords = hwords; ps 5151 drivers/infiniband/hw/hfi1/tid_rdma.c ps->s_txreq->sde = priv->s_sde; ps 5152 drivers/infiniband/hw/hfi1/tid_rdma.c ps->s_txreq->ss = ss; ps 5153 drivers/infiniband/hw/hfi1/tid_rdma.c ps->s_txreq->s_cur_size = len; ps 5155 drivers/infiniband/hw/hfi1/tid_rdma.c middle, ps); ps 5158 drivers/infiniband/hw/hfi1/tid_rdma.c hfi1_put_txreq(ps->s_txreq); ps 5160 drivers/infiniband/hw/hfi1/tid_rdma.c ps->s_txreq = NULL; ps 5175 drivers/infiniband/hw/hfi1/tid_rdma.c struct hfi1_pkt_state *ps) ps 5299 drivers/infiniband/hw/hfi1/tid_rdma.c ps->s_txreq->hdr_dwords = hwords; ps 5300 drivers/infiniband/hw/hfi1/tid_rdma.c ps->s_txreq->sde = qpriv->s_sde; ps 5301 drivers/infiniband/hw/hfi1/tid_rdma.c ps->s_txreq->s_cur_size = len; ps 5302 drivers/infiniband/hw/hfi1/tid_rdma.c ps->s_txreq->ss = NULL; ps 5304 drivers/infiniband/hw/hfi1/tid_rdma.c ps); ps 5305 drivers/infiniband/hw/hfi1/tid_rdma.c ps->s_txreq->txreq.flags |= SDMA_TXREQ_F_VIP; ps 5338 drivers/infiniband/hw/hfi1/tid_rdma.c struct hfi1_pkt_state ps; ps 5341 drivers/infiniband/hw/hfi1/tid_rdma.c ps.dev = to_idev(qp->ibqp.device); ps 5342 drivers/infiniband/hw/hfi1/tid_rdma.c ps.ibp = to_iport(qp->ibqp.device, qp->port_num); ps 5343 drivers/infiniband/hw/hfi1/tid_rdma.c ps.ppd = ppd_from_ibp(ps.ibp); ps 5344 drivers/infiniband/hw/hfi1/tid_rdma.c ps.wait = iowait_get_tid_work(&priv->s_iowait); ps 5345 drivers/infiniband/hw/hfi1/tid_rdma.c ps.in_thread = false; ps 5346 drivers/infiniband/hw/hfi1/tid_rdma.c ps.timeout_int = qp->timeout_jiffies / 8; ps 5349 drivers/infiniband/hw/hfi1/tid_rdma.c spin_lock_irqsave(&qp->s_lock, ps.flags); ps 5355 drivers/infiniband/hw/hfi1/tid_rdma.c spin_unlock_irqrestore(&qp->s_lock, ps.flags); ps 5361 drivers/infiniband/hw/hfi1/tid_rdma.c ps.timeout = jiffies + ps.timeout_int; ps 5362 drivers/infiniband/hw/hfi1/tid_rdma.c ps.cpu = priv->s_sde ? priv->s_sde->cpu : ps 5363 drivers/infiniband/hw/hfi1/tid_rdma.c cpumask_first(cpumask_of_node(ps.ppd->dd->node)); ps 5364 drivers/infiniband/hw/hfi1/tid_rdma.c ps.pkts_sent = false; ps 5367 drivers/infiniband/hw/hfi1/tid_rdma.c ps.s_txreq = get_waiting_verbs_txreq(ps.wait); ps 5370 drivers/infiniband/hw/hfi1/tid_rdma.c if (ps.s_txreq) { ps 5373 drivers/infiniband/hw/hfi1/tid_rdma.c ps.wait = iowait_get_ib_work(&priv->s_iowait); ps 5375 drivers/infiniband/hw/hfi1/tid_rdma.c spin_unlock_irqrestore(&qp->s_lock, ps.flags); ps 5381 drivers/infiniband/hw/hfi1/tid_rdma.c if (hfi1_verbs_send(qp, &ps)) ps 5385 drivers/infiniband/hw/hfi1/tid_rdma.c if (hfi1_schedule_send_yield(qp, &ps, true)) ps 5388 drivers/infiniband/hw/hfi1/tid_rdma.c spin_lock_irqsave(&qp->s_lock, ps.flags); ps 5392 drivers/infiniband/hw/hfi1/tid_rdma.c ps.wait = iowait_get_tid_work(&priv->s_iowait); ps 5398 drivers/infiniband/hw/hfi1/tid_rdma.c } while (hfi1_make_tid_rdma_pkt(qp, &ps)); ps 5399 drivers/infiniband/hw/hfi1/tid_rdma.c iowait_starve_clear(ps.pkts_sent, &priv->s_iowait); ps 5400 drivers/infiniband/hw/hfi1/tid_rdma.c spin_unlock_irqrestore(&qp->s_lock, ps.flags); ps 311 drivers/infiniband/hw/hfi1/tid_rdma.h int hfi1_make_tid_rdma_pkt(struct rvt_qp *qp, struct hfi1_pkt_state *ps); ps 63 drivers/infiniband/hw/hfi1/uc.c int hfi1_make_uc_req(struct rvt_qp *qp, struct hfi1_pkt_state *ps) ps 74 drivers/infiniband/hw/hfi1/uc.c ps->s_txreq = get_txreq(ps->dev, qp); ps 75 drivers/infiniband/hw/hfi1/uc.c if (!ps->s_txreq) ps 99 drivers/infiniband/hw/hfi1/uc.c ohdr = &ps->s_txreq->phdr.hdr.ibh.u.l.oth; ps 101 drivers/infiniband/hw/hfi1/uc.c ohdr = &ps->s_txreq->phdr.hdr.ibh.u.oth; ps 107 drivers/infiniband/hw/hfi1/uc.c ohdr = &ps->s_txreq->phdr.hdr.opah.u.l.oth; ps 109 drivers/infiniband/hw/hfi1/uc.c ohdr = &ps->s_txreq->phdr.hdr.opah.u.oth; ps 269 drivers/infiniband/hw/hfi1/uc.c ps->s_txreq->hdr_dwords = hwords; ps 270 drivers/infiniband/hw/hfi1/uc.c ps->s_txreq->sde = priv->s_sde; ps 271 drivers/infiniband/hw/hfi1/uc.c ps->s_txreq->ss = &qp->s_sge; ps 272 drivers/infiniband/hw/hfi1/uc.c ps->s_txreq->s_cur_size = len; ps 275 drivers/infiniband/hw/hfi1/uc.c middle, ps); ps 279 drivers/infiniband/hw/hfi1/uc.c hfi1_put_txreq(ps->s_txreq); ps 280 drivers/infiniband/hw/hfi1/uc.c ps->s_txreq = NULL; ps 284 drivers/infiniband/hw/hfi1/uc.c hfi1_put_txreq(ps->s_txreq); ps 287 drivers/infiniband/hw/hfi1/uc.c ps->s_txreq = NULL; ps 303 drivers/infiniband/hw/hfi1/ud.c void hfi1_make_ud_req_9B(struct rvt_qp *qp, struct hfi1_pkt_state *ps, ps 324 drivers/infiniband/hw/hfi1/ud.c ps->s_txreq->hdr_dwords = 7; ps 326 drivers/infiniband/hw/hfi1/ud.c ps->s_txreq->hdr_dwords++; ps 329 drivers/infiniband/hw/hfi1/ud.c grh = &ps->s_txreq->phdr.hdr.ibh.u.l.grh; ps 330 drivers/infiniband/hw/hfi1/ud.c ps->s_txreq->hdr_dwords += ps 332 drivers/infiniband/hw/hfi1/ud.c ps->s_txreq->hdr_dwords - LRH_9B_DWORDS, ps 335 drivers/infiniband/hw/hfi1/ud.c ohdr = &ps->s_txreq->phdr.hdr.ibh.u.l.oth; ps 338 drivers/infiniband/hw/hfi1/ud.c ohdr = &ps->s_txreq->phdr.hdr.ibh.u.oth; ps 366 drivers/infiniband/hw/hfi1/ud.c len = ps->s_txreq->hdr_dwords + nwords; ps 369 drivers/infiniband/hw/hfi1/ud.c ps->s_txreq->phdr.hdr.hdr_type = HFI1_PKT_TYPE_9B; ps 370 drivers/infiniband/hw/hfi1/ud.c hfi1_make_ib_hdr(&ps->s_txreq->phdr.hdr.ibh, ps 374 drivers/infiniband/hw/hfi1/ud.c void hfi1_make_ud_req_16B(struct rvt_qp *qp, struct hfi1_pkt_state *ps, ps 399 drivers/infiniband/hw/hfi1/ud.c ps->s_txreq->hdr_dwords = 6; ps 403 drivers/infiniband/hw/hfi1/ud.c ps->s_txreq->hdr_dwords = 9; ps 405 drivers/infiniband/hw/hfi1/ud.c ps->s_txreq->hdr_dwords++; ps 409 drivers/infiniband/hw/hfi1/ud.c extra_bytes = hfi1_get_16b_padding((ps->s_txreq->hdr_dwords << 2), ps 426 drivers/infiniband/hw/hfi1/ud.c grh = &ps->s_txreq->phdr.hdr.opah.u.l.grh; ps 427 drivers/infiniband/hw/hfi1/ud.c ps->s_txreq->hdr_dwords += hfi1_make_grh( ps 429 drivers/infiniband/hw/hfi1/ud.c ps->s_txreq->hdr_dwords - LRH_16B_DWORDS, ps 431 drivers/infiniband/hw/hfi1/ud.c ohdr = &ps->s_txreq->phdr.hdr.opah.u.l.oth; ps 434 drivers/infiniband/hw/hfi1/ud.c ohdr = &ps->s_txreq->phdr.hdr.opah.u.oth; ps 454 drivers/infiniband/hw/hfi1/ud.c hfi1_16B_set_qpn(&ps->s_txreq->phdr.hdr.opah.u.mgmt, ps 460 drivers/infiniband/hw/hfi1/ud.c len = (ps->s_txreq->hdr_dwords + nwords) >> 1; ps 463 drivers/infiniband/hw/hfi1/ud.c ps->s_txreq->phdr.hdr.hdr_type = HFI1_PKT_TYPE_16B; ps 464 drivers/infiniband/hw/hfi1/ud.c hfi1_make_16b_hdr(&ps->s_txreq->phdr.hdr.opah, ps 476 drivers/infiniband/hw/hfi1/ud.c int hfi1_make_ud_req(struct rvt_qp *qp, struct hfi1_pkt_state *ps) ps 486 drivers/infiniband/hw/hfi1/ud.c ps->s_txreq = get_txreq(ps->dev, qp); ps 487 drivers/infiniband/hw/hfi1/ud.c if (!ps->s_txreq) ps 527 drivers/infiniband/hw/hfi1/ud.c unsigned long tflags = ps->flags; ps 543 drivers/infiniband/hw/hfi1/ud.c ps->flags = tflags; ps 550 drivers/infiniband/hw/hfi1/ud.c ps->s_txreq->s_cur_size = wqe->length; ps 551 drivers/infiniband/hw/hfi1/ud.c ps->s_txreq->ss = &qp->s_sge; ps 561 drivers/infiniband/hw/hfi1/ud.c hfi1_make_ud_req_tbl[priv->hdr_type](qp, ps, qp->s_wqe); ps 563 drivers/infiniband/hw/hfi1/ud.c ps->s_txreq->sde = priv->s_sde; ps 565 drivers/infiniband/hw/hfi1/ud.c ps->s_txreq->psc = priv->s_sendcontext; ps 574 drivers/infiniband/hw/hfi1/ud.c hfi1_put_txreq(ps->s_txreq); ps 575 drivers/infiniband/hw/hfi1/ud.c ps->s_txreq = NULL; ps 579 drivers/infiniband/hw/hfi1/ud.c hfi1_put_txreq(ps->s_txreq); ps 582 drivers/infiniband/hw/hfi1/ud.c ps->s_txreq = NULL; ps 144 drivers/infiniband/hw/hfi1/verbs.c struct hfi1_pkt_state *ps, ps 670 drivers/infiniband/hw/hfi1/verbs.c struct hfi1_pkt_state *ps) ps 678 drivers/infiniband/hw/hfi1/verbs.c list_add_tail(&ps->s_txreq->txreq.list, ps 679 drivers/infiniband/hw/hfi1/verbs.c &ps->wait->tx_head); ps 682 drivers/infiniband/hw/hfi1/verbs.c hfi1_qp_unbusy(qp, ps->wait); ps 738 drivers/infiniband/hw/hfi1/verbs.c static void update_tx_opstats(struct rvt_qp *qp, struct hfi1_pkt_state *ps, ps 745 drivers/infiniband/hw/hfi1/verbs.c inc_opstats(plen * 4, &s->stats[ps->opcode]); ps 838 drivers/infiniband/hw/hfi1/verbs.c int hfi1_verbs_send_dma(struct rvt_qp *qp, struct hfi1_pkt_state *ps, ps 843 drivers/infiniband/hw/hfi1/verbs.c u32 hdrwords = ps->s_txreq->hdr_dwords; ps 844 drivers/infiniband/hw/hfi1/verbs.c u32 len = ps->s_txreq->s_cur_size; ps 846 drivers/infiniband/hw/hfi1/verbs.c struct hfi1_ibdev *dev = ps->dev; ps 847 drivers/infiniband/hw/hfi1/verbs.c struct hfi1_pportdata *ppd = ps->ppd; ps 853 drivers/infiniband/hw/hfi1/verbs.c if (ps->s_txreq->phdr.hdr.hdr_type) { ps 863 drivers/infiniband/hw/hfi1/verbs.c tx = ps->s_txreq; ps 870 drivers/infiniband/hw/hfi1/verbs.c if (ps->s_txreq->phdr.hdr.hdr_type) ps 882 drivers/infiniband/hw/hfi1/verbs.c if (unlikely(hfi1_dbg_should_fault_tx(qp, ps->opcode))) ps 883 drivers/infiniband/hw/hfi1/verbs.c pbc = hfi1_fault_tx(qp, ps->opcode, pbc); ps 886 drivers/infiniband/hw/hfi1/verbs.c pbc = update_hcrc(ps->opcode, pbc); ps 893 drivers/infiniband/hw/hfi1/verbs.c ret = sdma_send_txreq(tx->sde, ps->wait, &tx->txreq, ps->pkts_sent); ps 900 drivers/infiniband/hw/hfi1/verbs.c update_tx_opstats(qp, ps, plen); ps 902 drivers/infiniband/hw/hfi1/verbs.c &ps->s_txreq->phdr.hdr, ib_is_sc5(sc5)); ps 909 drivers/infiniband/hw/hfi1/verbs.c ret = wait_kmem(dev, qp, ps); ps 912 drivers/infiniband/hw/hfi1/verbs.c hfi1_put_txreq(ps->s_txreq); ps 913 drivers/infiniband/hw/hfi1/verbs.c ps->s_txreq = NULL; ps 924 drivers/infiniband/hw/hfi1/verbs.c struct hfi1_pkt_state *ps, ps 941 drivers/infiniband/hw/hfi1/verbs.c list_add_tail(&ps->s_txreq->txreq.list, ps 942 drivers/infiniband/hw/hfi1/verbs.c &ps->wait->tx_head); ps 952 drivers/infiniband/hw/hfi1/verbs.c iowait_queue(ps->pkts_sent, &priv->s_iowait, ps 962 drivers/infiniband/hw/hfi1/verbs.c hfi1_qp_unbusy(qp, ps->wait); ps 978 drivers/infiniband/hw/hfi1/verbs.c int hfi1_verbs_send_pio(struct rvt_qp *qp, struct hfi1_pkt_state *ps, ps 982 drivers/infiniband/hw/hfi1/verbs.c u32 hdrwords = ps->s_txreq->hdr_dwords; ps 983 drivers/infiniband/hw/hfi1/verbs.c struct rvt_sge_state *ss = ps->s_txreq->ss; ps 984 drivers/infiniband/hw/hfi1/verbs.c u32 len = ps->s_txreq->s_cur_size; ps 987 drivers/infiniband/hw/hfi1/verbs.c struct hfi1_pportdata *ppd = ps->ppd; ps 998 drivers/infiniband/hw/hfi1/verbs.c if (ps->s_txreq->phdr.hdr.hdr_type) { ps 1003 drivers/infiniband/hw/hfi1/verbs.c hdr = (u32 *)&ps->s_txreq->phdr.hdr.opah; ps 1006 drivers/infiniband/hw/hfi1/verbs.c hdr = (u32 *)&ps->s_txreq->phdr.hdr.ibh; ps 1022 drivers/infiniband/hw/hfi1/verbs.c sc = ps->s_txreq->psc; ps 1028 drivers/infiniband/hw/hfi1/verbs.c if (ps->s_txreq->phdr.hdr.hdr_type) ps 1034 drivers/infiniband/hw/hfi1/verbs.c if (unlikely(hfi1_dbg_should_fault_tx(qp, ps->opcode))) ps 1035 drivers/infiniband/hw/hfi1/verbs.c pbc = hfi1_fault_tx(qp, ps->opcode, pbc); ps 1038 drivers/infiniband/hw/hfi1/verbs.c pbc = update_hcrc(ps->opcode, pbc); ps 1065 drivers/infiniband/hw/hfi1/verbs.c ret = pio_wait(qp, sc, ps, RVT_S_WAIT_PIO); ps 1097 drivers/infiniband/hw/hfi1/verbs.c update_tx_opstats(qp, ps, plen); ps 1099 drivers/infiniband/hw/hfi1/verbs.c &ps->s_txreq->phdr.hdr, ib_is_sc5(sc5)); ps 1107 drivers/infiniband/hw/hfi1/verbs.c hfi1_rc_verbs_aborted(qp, &ps->s_txreq->phdr.hdr); ps 1108 drivers/infiniband/hw/hfi1/verbs.c hfi1_rc_send_complete(qp, &ps->s_txreq->phdr.hdr); ps 1115 drivers/infiniband/hw/hfi1/verbs.c hfi1_put_txreq(ps->s_txreq); ps 1215 drivers/infiniband/hw/hfi1/verbs.c struct hfi1_pkt_state *ps) ps 1219 drivers/infiniband/hw/hfi1/verbs.c struct verbs_txreq *tx = ps->s_txreq; ps 1235 drivers/infiniband/hw/hfi1/verbs.c (BIT(ps->opcode & OPMASK) & pio_opmask[ps->opcode >> 5]) && ps 1254 drivers/infiniband/hw/hfi1/verbs.c int hfi1_verbs_send(struct rvt_qp *qp, struct hfi1_pkt_state *ps) ps 1266 drivers/infiniband/hw/hfi1/verbs.c if (ps->s_txreq->phdr.hdr.hdr_type) { ps 1267 drivers/infiniband/hw/hfi1/verbs.c struct hfi1_16b_header *hdr = &ps->s_txreq->phdr.hdr.opah; ps 1278 drivers/infiniband/hw/hfi1/verbs.c struct ib_header *hdr = &ps->s_txreq->phdr.hdr.ibh; ps 1290 drivers/infiniband/hw/hfi1/verbs.c ps->opcode = ib_bth_get_opcode(ohdr); ps 1292 drivers/infiniband/hw/hfi1/verbs.c ps->opcode = IB_OPCODE_UD_SEND_ONLY; ps 1294 drivers/infiniband/hw/hfi1/verbs.c sr = get_send_routine(qp, ps); ps 1319 drivers/infiniband/hw/hfi1/verbs.c ps->s_txreq->psc, ps 1320 drivers/infiniband/hw/hfi1/verbs.c ps, ps 1322 drivers/infiniband/hw/hfi1/verbs.c return sr(qp, ps, 0); ps 404 drivers/infiniband/hw/hfi1/verbs.h int hfi1_verbs_send(struct rvt_qp *qp, struct hfi1_pkt_state *ps); ps 447 drivers/infiniband/hw/hfi1/verbs.h struct hfi1_pkt_state *ps); ps 449 drivers/infiniband/hw/hfi1/verbs.h bool hfi1_schedule_send_yield(struct rvt_qp *qp, struct hfi1_pkt_state *ps, ps 460 drivers/infiniband/hw/hfi1/verbs.h int hfi1_make_rc_req(struct rvt_qp *qp, struct hfi1_pkt_state *ps); ps 462 drivers/infiniband/hw/hfi1/verbs.h int hfi1_make_uc_req(struct rvt_qp *qp, struct hfi1_pkt_state *ps); ps 464 drivers/infiniband/hw/hfi1/verbs.h int hfi1_make_ud_req(struct rvt_qp *qp, struct hfi1_pkt_state *ps); ps 480 drivers/infiniband/hw/hfi1/verbs.h int hfi1_verbs_send_dma(struct rvt_qp *qp, struct hfi1_pkt_state *ps, ps 483 drivers/infiniband/hw/hfi1/verbs.h int hfi1_verbs_send_pio(struct rvt_qp *qp, struct hfi1_pkt_state *ps, ps 604 drivers/infiniband/sw/rdmavt/mr.c u32 ps = 1 << mr->mr.page_shift; ps 614 drivers/infiniband/sw/rdmavt/mr.c mr->mr.map[m]->segs[n].length = ps; ps 615 drivers/infiniband/sw/rdmavt/mr.c mr->mr.length += ps; ps 616 drivers/infiniband/sw/rdmavt/mr.c trace_rvt_mr_page_seg(&mr->mr, m, n, (void *)addr, ps); ps 792 drivers/infiniband/sw/rdmavt/mr.c u32 ps; ps 806 drivers/infiniband/sw/rdmavt/mr.c ps = 1 << fmr->mr.page_shift; ps 807 drivers/infiniband/sw/rdmavt/mr.c fmr->mr.length = list_len * ps; ps 812 drivers/infiniband/sw/rdmavt/mr.c fmr->mr.map[m]->segs[n].length = ps; ps 813 drivers/infiniband/sw/rdmavt/mr.c trace_rvt_mr_fmr_seg(&fmr->mr, m, n, (void *)page_list[i], ps); ps 209 drivers/input/input-mt.c struct input_mt_slot *ps = &mt->slots[i]; ps 210 drivers/input/input-mt.c int id = input_mt_get_value(ps, ABS_MT_TRACKING_ID); ps 215 drivers/input/input-mt.c oldest = ps; ps 56 drivers/md/dm-mpath.c struct path_selector ps; ps 171 drivers/md/dm-mpath.c struct path_selector *ps = &pg->ps; ps 173 drivers/md/dm-mpath.c if (ps->type) { ps 174 drivers/md/dm-mpath.c ps->type->destroy(ps); ps 175 drivers/md/dm-mpath.c dm_put_path_selector(ps->type); ps 351 drivers/md/dm-mpath.c path = pg->ps.type->select_path(&pg->ps, nr_bytes); ps 540 drivers/md/dm-mpath.c if (pgpath->pg->ps.type->start_io) ps 541 drivers/md/dm-mpath.c pgpath->pg->ps.type->start_io(&pgpath->pg->ps, ps 558 drivers/md/dm-mpath.c if (pgpath && pgpath->pg->ps.type->end_io) ps 559 drivers/md/dm-mpath.c pgpath->pg->ps.type->end_io(&pgpath->pg->ps, ps 625 drivers/md/dm-mpath.c if (pgpath->pg->ps.type->start_io) ps 626 drivers/md/dm-mpath.c pgpath->pg->ps.type->start_io(&pgpath->pg->ps, ps 767 drivers/md/dm-mpath.c r = pst->create(&pg->ps, ps_argc, as->argv); ps 774 drivers/md/dm-mpath.c pg->ps.type = pst; ps 836 drivers/md/dm-mpath.c static struct pgpath *parse_path(struct dm_arg_set *as, struct path_selector *ps, ps 874 drivers/md/dm-mpath.c r = ps->type->add_path(ps, &p->path, as->argc, as->argv, &ti->error); ps 941 drivers/md/dm-mpath.c pgpath = parse_path(&path_args, &pg->ps, ti); ps 1232 drivers/md/dm-mpath.c pgpath->pg->ps.type->fail_path(&pgpath->pg->ps, &pgpath->path); ps 1269 drivers/md/dm-mpath.c r = pgpath->pg->ps.type->reinstate_path(&pgpath->pg->ps, &pgpath->path); ps 1560 drivers/md/dm-mpath.c struct path_selector *ps = &pgpath->pg->ps; ps 1562 drivers/md/dm-mpath.c if (ps->type->end_io) ps 1563 drivers/md/dm-mpath.c ps->type->end_io(ps, &pgpath->path, mpio->nr_bytes); ps 1604 drivers/md/dm-mpath.c struct path_selector *ps = &pgpath->pg->ps; ps 1606 drivers/md/dm-mpath.c if (ps->type->end_io) ps 1607 drivers/md/dm-mpath.c ps->type->end_io(ps, &pgpath->path, mpio->nr_bytes); ps 1737 drivers/md/dm-mpath.c if (pg->ps.type->status) ps 1738 drivers/md/dm-mpath.c sz += pg->ps.type->status(&pg->ps, NULL, type, ps 1745 drivers/md/dm-mpath.c pg->ps.type->info_args); ps 1751 drivers/md/dm-mpath.c if (pg->ps.type->status) ps 1752 drivers/md/dm-mpath.c sz += pg->ps.type->status(&pg->ps, ps 1761 drivers/md/dm-mpath.c DMEMIT("%s ", pg->ps.type->name); ps 1763 drivers/md/dm-mpath.c if (pg->ps.type->status) ps 1764 drivers/md/dm-mpath.c sz += pg->ps.type->status(&pg->ps, NULL, type, ps 1771 drivers/md/dm-mpath.c pg->ps.type->table_args); ps 1775 drivers/md/dm-mpath.c if (pg->ps.type->status) ps 1776 drivers/md/dm-mpath.c sz += pg->ps.type->status(&pg->ps, ps 40 drivers/md/dm-path-selector.h int (*create) (struct path_selector *ps, unsigned argc, char **argv); ps 41 drivers/md/dm-path-selector.h void (*destroy) (struct path_selector *ps); ps 47 drivers/md/dm-path-selector.h int (*add_path) (struct path_selector *ps, struct dm_path *path, ps 54 drivers/md/dm-path-selector.h struct dm_path *(*select_path) (struct path_selector *ps, ps 60 drivers/md/dm-path-selector.h void (*fail_path) (struct path_selector *ps, struct dm_path *p); ps 65 drivers/md/dm-path-selector.h int (*reinstate_path) (struct path_selector *ps, struct dm_path *p); ps 71 drivers/md/dm-path-selector.h int (*status) (struct path_selector *ps, struct dm_path *path, ps 74 drivers/md/dm-path-selector.h int (*start_io) (struct path_selector *ps, struct dm_path *path, ps 76 drivers/md/dm-path-selector.h int (*end_io) (struct path_selector *ps, struct dm_path *path, ps 55 drivers/md/dm-queue-length.c static int ql_create(struct path_selector *ps, unsigned argc, char **argv) ps 62 drivers/md/dm-queue-length.c ps->context = s; ps 76 drivers/md/dm-queue-length.c static void ql_destroy(struct path_selector *ps) ps 78 drivers/md/dm-queue-length.c struct selector *s = ps->context; ps 83 drivers/md/dm-queue-length.c ps->context = NULL; ps 86 drivers/md/dm-queue-length.c static int ql_status(struct path_selector *ps, struct dm_path *path, ps 111 drivers/md/dm-queue-length.c static int ql_add_path(struct path_selector *ps, struct dm_path *path, ps 114 drivers/md/dm-queue-length.c struct selector *s = ps->context; ps 160 drivers/md/dm-queue-length.c static void ql_fail_path(struct path_selector *ps, struct dm_path *path) ps 162 drivers/md/dm-queue-length.c struct selector *s = ps->context; ps 171 drivers/md/dm-queue-length.c static int ql_reinstate_path(struct path_selector *ps, struct dm_path *path) ps 173 drivers/md/dm-queue-length.c struct selector *s = ps->context; ps 187 drivers/md/dm-queue-length.c static struct dm_path *ql_select_path(struct path_selector *ps, size_t nr_bytes) ps 189 drivers/md/dm-queue-length.c struct selector *s = ps->context; ps 219 drivers/md/dm-queue-length.c static int ql_start_io(struct path_selector *ps, struct dm_path *path, ps 229 drivers/md/dm-queue-length.c static int ql_end_io(struct path_selector *ps, struct dm_path *path, ps 65 drivers/md/dm-round-robin.c static int rr_create(struct path_selector *ps, unsigned argc, char **argv) ps 73 drivers/md/dm-round-robin.c ps->context = s; ps 77 drivers/md/dm-round-robin.c static void rr_destroy(struct path_selector *ps) ps 79 drivers/md/dm-round-robin.c struct selector *s = ps->context; ps 84 drivers/md/dm-round-robin.c ps->context = NULL; ps 87 drivers/md/dm-round-robin.c static int rr_status(struct path_selector *ps, struct dm_path *path, ps 113 drivers/md/dm-round-robin.c static int rr_add_path(struct path_selector *ps, struct dm_path *path, ps 116 drivers/md/dm-round-robin.c struct selector *s = ps->context; ps 157 drivers/md/dm-round-robin.c static void rr_fail_path(struct path_selector *ps, struct dm_path *p) ps 160 drivers/md/dm-round-robin.c struct selector *s = ps->context; ps 168 drivers/md/dm-round-robin.c static int rr_reinstate_path(struct path_selector *ps, struct dm_path *p) ps 171 drivers/md/dm-round-robin.c struct selector *s = ps->context; ps 181 drivers/md/dm-round-robin.c static struct dm_path *rr_select_path(struct path_selector *ps, size_t nr_bytes) ps 184 drivers/md/dm-round-robin.c struct selector *s = ps->context; ps 51 drivers/md/dm-service-time.c static int st_create(struct path_selector *ps, unsigned argc, char **argv) ps 58 drivers/md/dm-service-time.c ps->context = s; ps 72 drivers/md/dm-service-time.c static void st_destroy(struct path_selector *ps) ps 74 drivers/md/dm-service-time.c struct selector *s = ps->context; ps 79 drivers/md/dm-service-time.c ps->context = NULL; ps 82 drivers/md/dm-service-time.c static int st_status(struct path_selector *ps, struct dm_path *path, ps 108 drivers/md/dm-service-time.c static int st_add_path(struct path_selector *ps, struct dm_path *path, ps 111 drivers/md/dm-service-time.c struct selector *s = ps->context; ps 173 drivers/md/dm-service-time.c static void st_fail_path(struct path_selector *ps, struct dm_path *path) ps 175 drivers/md/dm-service-time.c struct selector *s = ps->context; ps 184 drivers/md/dm-service-time.c static int st_reinstate_path(struct path_selector *ps, struct dm_path *path) ps 186 drivers/md/dm-service-time.c struct selector *s = ps->context; ps 274 drivers/md/dm-service-time.c static struct dm_path *st_select_path(struct path_selector *ps, size_t nr_bytes) ps 276 drivers/md/dm-service-time.c struct selector *s = ps->context; ps 301 drivers/md/dm-service-time.c static int st_start_io(struct path_selector *ps, struct dm_path *path, ps 311 drivers/md/dm-service-time.c static int st_end_io(struct path_selector *ps, struct dm_path *path, ps 167 drivers/md/dm-snap-persistent.c static int alloc_area(struct pstore *ps) ps 172 drivers/md/dm-snap-persistent.c len = ps->store->chunk_size << SECTOR_SHIFT; ps 178 drivers/md/dm-snap-persistent.c ps->area = vmalloc(len); ps 179 drivers/md/dm-snap-persistent.c if (!ps->area) ps 182 drivers/md/dm-snap-persistent.c ps->zero_area = vzalloc(len); ps 183 drivers/md/dm-snap-persistent.c if (!ps->zero_area) ps 186 drivers/md/dm-snap-persistent.c ps->header_area = vmalloc(len); ps 187 drivers/md/dm-snap-persistent.c if (!ps->header_area) ps 193 drivers/md/dm-snap-persistent.c vfree(ps->zero_area); ps 196 drivers/md/dm-snap-persistent.c vfree(ps->area); ps 202 drivers/md/dm-snap-persistent.c static void free_area(struct pstore *ps) ps 204 drivers/md/dm-snap-persistent.c vfree(ps->area); ps 205 drivers/md/dm-snap-persistent.c ps->area = NULL; ps 206 drivers/md/dm-snap-persistent.c vfree(ps->zero_area); ps 207 drivers/md/dm-snap-persistent.c ps->zero_area = NULL; ps 208 drivers/md/dm-snap-persistent.c vfree(ps->header_area); ps 209 drivers/md/dm-snap-persistent.c ps->header_area = NULL; ps 229 drivers/md/dm-snap-persistent.c static int chunk_io(struct pstore *ps, void *area, chunk_t chunk, int op, ps 233 drivers/md/dm-snap-persistent.c .bdev = dm_snap_cow(ps->store->snap)->bdev, ps 234 drivers/md/dm-snap-persistent.c .sector = ps->store->chunk_size * chunk, ps 235 drivers/md/dm-snap-persistent.c .count = ps->store->chunk_size, ps 242 drivers/md/dm-snap-persistent.c .client = ps->io_client, ps 258 drivers/md/dm-snap-persistent.c queue_work(ps->metadata_wq, &req.work); ps 259 drivers/md/dm-snap-persistent.c flush_workqueue(ps->metadata_wq); ps 268 drivers/md/dm-snap-persistent.c static chunk_t area_location(struct pstore *ps, chunk_t area) ps 270 drivers/md/dm-snap-persistent.c return NUM_SNAPSHOT_HDR_CHUNKS + ((ps->exceptions_per_area + 1) * area); ps 273 drivers/md/dm-snap-persistent.c static void skip_metadata(struct pstore *ps) ps 275 drivers/md/dm-snap-persistent.c uint32_t stride = ps->exceptions_per_area + 1; ps 276 drivers/md/dm-snap-persistent.c chunk_t next_free = ps->next_free; ps 278 drivers/md/dm-snap-persistent.c ps->next_free++; ps 285 drivers/md/dm-snap-persistent.c static int area_io(struct pstore *ps, int op, int op_flags) ps 290 drivers/md/dm-snap-persistent.c chunk = area_location(ps, ps->current_area); ps 292 drivers/md/dm-snap-persistent.c r = chunk_io(ps, ps->area, chunk, op, op_flags, 0); ps 299 drivers/md/dm-snap-persistent.c static void zero_memory_area(struct pstore *ps) ps 301 drivers/md/dm-snap-persistent.c memset(ps->area, 0, ps->store->chunk_size << SECTOR_SHIFT); ps 304 drivers/md/dm-snap-persistent.c static int zero_disk_area(struct pstore *ps, chunk_t area) ps 306 drivers/md/dm-snap-persistent.c return chunk_io(ps, ps->zero_area, area_location(ps, area), ps 310 drivers/md/dm-snap-persistent.c static int read_header(struct pstore *ps, int *new_snapshot) ps 322 drivers/md/dm-snap-persistent.c if (!ps->store->chunk_size) { ps 323 drivers/md/dm-snap-persistent.c ps->store->chunk_size = max(DM_CHUNK_SIZE_DEFAULT_SECTORS, ps 324 drivers/md/dm-snap-persistent.c bdev_logical_block_size(dm_snap_cow(ps->store->snap)-> ps 326 drivers/md/dm-snap-persistent.c ps->store->chunk_mask = ps->store->chunk_size - 1; ps 327 drivers/md/dm-snap-persistent.c ps->store->chunk_shift = __ffs(ps->store->chunk_size); ps 331 drivers/md/dm-snap-persistent.c ps->io_client = dm_io_client_create(); ps 332 drivers/md/dm-snap-persistent.c if (IS_ERR(ps->io_client)) ps 333 drivers/md/dm-snap-persistent.c return PTR_ERR(ps->io_client); ps 335 drivers/md/dm-snap-persistent.c r = alloc_area(ps); ps 339 drivers/md/dm-snap-persistent.c r = chunk_io(ps, ps->header_area, 0, REQ_OP_READ, 0, 1); ps 343 drivers/md/dm-snap-persistent.c dh = ps->header_area; ps 357 drivers/md/dm-snap-persistent.c ps->valid = le32_to_cpu(dh->valid); ps 358 drivers/md/dm-snap-persistent.c ps->version = le32_to_cpu(dh->version); ps 361 drivers/md/dm-snap-persistent.c if (ps->store->chunk_size == chunk_size) ps 367 drivers/md/dm-snap-persistent.c chunk_size, ps->store->chunk_size); ps 370 drivers/md/dm-snap-persistent.c free_area(ps); ps 372 drivers/md/dm-snap-persistent.c r = dm_exception_store_set_chunk_size(ps->store, chunk_size, ps 380 drivers/md/dm-snap-persistent.c r = alloc_area(ps); ps 384 drivers/md/dm-snap-persistent.c free_area(ps); ps 388 drivers/md/dm-snap-persistent.c static int write_header(struct pstore *ps) ps 392 drivers/md/dm-snap-persistent.c memset(ps->header_area, 0, ps->store->chunk_size << SECTOR_SHIFT); ps 394 drivers/md/dm-snap-persistent.c dh = ps->header_area; ps 396 drivers/md/dm-snap-persistent.c dh->valid = cpu_to_le32(ps->valid); ps 397 drivers/md/dm-snap-persistent.c dh->version = cpu_to_le32(ps->version); ps 398 drivers/md/dm-snap-persistent.c dh->chunk_size = cpu_to_le32(ps->store->chunk_size); ps 400 drivers/md/dm-snap-persistent.c return chunk_io(ps, ps->header_area, 0, REQ_OP_WRITE, 0, 1); ps 406 drivers/md/dm-snap-persistent.c static struct disk_exception *get_exception(struct pstore *ps, void *ps_area, ps 409 drivers/md/dm-snap-persistent.c BUG_ON(index >= ps->exceptions_per_area); ps 414 drivers/md/dm-snap-persistent.c static void read_exception(struct pstore *ps, void *ps_area, ps 417 drivers/md/dm-snap-persistent.c struct disk_exception *de = get_exception(ps, ps_area, index); ps 424 drivers/md/dm-snap-persistent.c static void write_exception(struct pstore *ps, ps 427 drivers/md/dm-snap-persistent.c struct disk_exception *de = get_exception(ps, ps->area, index); ps 434 drivers/md/dm-snap-persistent.c static void clear_exception(struct pstore *ps, uint32_t index) ps 436 drivers/md/dm-snap-persistent.c struct disk_exception *de = get_exception(ps, ps->area, index); ps 448 drivers/md/dm-snap-persistent.c static int insert_exceptions(struct pstore *ps, void *ps_area, ps 461 drivers/md/dm-snap-persistent.c for (i = 0; i < ps->exceptions_per_area; i++) { ps 462 drivers/md/dm-snap-persistent.c read_exception(ps, ps_area, i, &e); ps 471 drivers/md/dm-snap-persistent.c ps->current_committed = i; ps 479 drivers/md/dm-snap-persistent.c if (ps->next_free <= e.new_chunk) ps 480 drivers/md/dm-snap-persistent.c ps->next_free = e.new_chunk + 1; ps 493 drivers/md/dm-snap-persistent.c static int read_exceptions(struct pstore *ps, ps 502 drivers/md/dm-snap-persistent.c client = dm_bufio_client_create(dm_snap_cow(ps->store->snap)->bdev, ps 503 drivers/md/dm-snap-persistent.c ps->store->chunk_size << SECTOR_SHIFT, ps 518 drivers/md/dm-snap-persistent.c for (ps->current_area = 0; full; ps->current_area++) { ps 523 drivers/md/dm-snap-persistent.c if (unlikely(prefetch_area < ps->current_area)) ps 524 drivers/md/dm-snap-persistent.c prefetch_area = ps->current_area; ps 527 drivers/md/dm-snap-persistent.c chunk_t pf_chunk = area_location(ps, prefetch_area); ps 534 drivers/md/dm-snap-persistent.c } while (prefetch_area <= ps->current_area + DM_PREFETCH_CHUNKS); ps 536 drivers/md/dm-snap-persistent.c chunk = area_location(ps, ps->current_area); ps 544 drivers/md/dm-snap-persistent.c r = insert_exceptions(ps, area, callback, callback_context, ps 548 drivers/md/dm-snap-persistent.c memcpy(ps->area, area, ps->store->chunk_size << SECTOR_SHIFT); ps 558 drivers/md/dm-snap-persistent.c ps->current_area--; ps 560 drivers/md/dm-snap-persistent.c skip_metadata(ps); ps 580 drivers/md/dm-snap-persistent.c struct pstore *ps = get_info(store); ps 582 drivers/md/dm-snap-persistent.c *sectors_allocated = ps->next_free * store->chunk_size; ps 590 drivers/md/dm-snap-persistent.c *metadata_sectors = (ps->current_area + 1 + NUM_SNAPSHOT_HDR_CHUNKS) * ps 596 drivers/md/dm-snap-persistent.c struct pstore *ps = get_info(store); ps 598 drivers/md/dm-snap-persistent.c destroy_workqueue(ps->metadata_wq); ps 601 drivers/md/dm-snap-persistent.c if (ps->io_client) ps 602 drivers/md/dm-snap-persistent.c dm_io_client_destroy(ps->io_client); ps 603 drivers/md/dm-snap-persistent.c free_area(ps); ps 606 drivers/md/dm-snap-persistent.c vfree(ps->callbacks); ps 608 drivers/md/dm-snap-persistent.c kfree(ps); ps 617 drivers/md/dm-snap-persistent.c struct pstore *ps = get_info(store); ps 622 drivers/md/dm-snap-persistent.c r = read_header(ps, &new_snapshot); ps 629 drivers/md/dm-snap-persistent.c ps->exceptions_per_area = (ps->store->chunk_size << SECTOR_SHIFT) / ps 631 drivers/md/dm-snap-persistent.c ps->callbacks = dm_vcalloc(ps->exceptions_per_area, ps 632 drivers/md/dm-snap-persistent.c sizeof(*ps->callbacks)); ps 633 drivers/md/dm-snap-persistent.c if (!ps->callbacks) ps 640 drivers/md/dm-snap-persistent.c r = write_header(ps); ps 646 drivers/md/dm-snap-persistent.c ps->current_area = 0; ps 647 drivers/md/dm-snap-persistent.c zero_memory_area(ps); ps 648 drivers/md/dm-snap-persistent.c r = zero_disk_area(ps, 0); ps 656 drivers/md/dm-snap-persistent.c if (ps->version != SNAPSHOT_DISK_VERSION) { ps 658 drivers/md/dm-snap-persistent.c ps->version); ps 665 drivers/md/dm-snap-persistent.c if (!ps->valid) ps 671 drivers/md/dm-snap-persistent.c r = read_exceptions(ps, callback, callback_context); ps 679 drivers/md/dm-snap-persistent.c struct pstore *ps = get_info(store); ps 683 drivers/md/dm-snap-persistent.c if (size < ((ps->next_free + 1) * store->chunk_size)) ps 686 drivers/md/dm-snap-persistent.c e->new_chunk = ps->next_free; ps 692 drivers/md/dm-snap-persistent.c ps->next_free++; ps 693 drivers/md/dm-snap-persistent.c skip_metadata(ps); ps 695 drivers/md/dm-snap-persistent.c atomic_inc(&ps->pending_count); ps 705 drivers/md/dm-snap-persistent.c struct pstore *ps = get_info(store); ps 710 drivers/md/dm-snap-persistent.c ps->valid = 0; ps 714 drivers/md/dm-snap-persistent.c write_exception(ps, ps->current_committed++, &ce); ps 722 drivers/md/dm-snap-persistent.c cb = ps->callbacks + ps->callback_count++; ps 730 drivers/md/dm-snap-persistent.c if (!atomic_dec_and_test(&ps->pending_count) && ps 731 drivers/md/dm-snap-persistent.c (ps->current_committed != ps->exceptions_per_area)) ps 737 drivers/md/dm-snap-persistent.c if ((ps->current_committed == ps->exceptions_per_area) && ps 738 drivers/md/dm-snap-persistent.c zero_disk_area(ps, ps->current_area + 1)) ps 739 drivers/md/dm-snap-persistent.c ps->valid = 0; ps 744 drivers/md/dm-snap-persistent.c if (ps->valid && area_io(ps, REQ_OP_WRITE, ps 746 drivers/md/dm-snap-persistent.c ps->valid = 0; ps 751 drivers/md/dm-snap-persistent.c if (ps->current_committed == ps->exceptions_per_area) { ps 752 drivers/md/dm-snap-persistent.c ps->current_committed = 0; ps 753 drivers/md/dm-snap-persistent.c ps->current_area++; ps 754 drivers/md/dm-snap-persistent.c zero_memory_area(ps); ps 757 drivers/md/dm-snap-persistent.c for (i = 0; i < ps->callback_count; i++) { ps 758 drivers/md/dm-snap-persistent.c cb = ps->callbacks + i; ps 759 drivers/md/dm-snap-persistent.c cb->callback(cb->context, ps->valid); ps 762 drivers/md/dm-snap-persistent.c ps->callback_count = 0; ps 769 drivers/md/dm-snap-persistent.c struct pstore *ps = get_info(store); ps 777 drivers/md/dm-snap-persistent.c if (!ps->current_committed) { ps 781 drivers/md/dm-snap-persistent.c if (!ps->current_area) ps 784 drivers/md/dm-snap-persistent.c ps->current_area--; ps 785 drivers/md/dm-snap-persistent.c r = area_io(ps, REQ_OP_READ, 0); ps 788 drivers/md/dm-snap-persistent.c ps->current_committed = ps->exceptions_per_area; ps 791 drivers/md/dm-snap-persistent.c read_exception(ps, ps->area, ps->current_committed - 1, &ce); ps 799 drivers/md/dm-snap-persistent.c for (nr_consecutive = 1; nr_consecutive < ps->current_committed; ps 801 drivers/md/dm-snap-persistent.c read_exception(ps, ps->area, ps 802 drivers/md/dm-snap-persistent.c ps->current_committed - 1 - nr_consecutive, &ce); ps 815 drivers/md/dm-snap-persistent.c struct pstore *ps = get_info(store); ps 817 drivers/md/dm-snap-persistent.c BUG_ON(nr_merged > ps->current_committed); ps 820 drivers/md/dm-snap-persistent.c clear_exception(ps, ps->current_committed - 1 - i); ps 822 drivers/md/dm-snap-persistent.c r = area_io(ps, REQ_OP_WRITE, REQ_PREFLUSH | REQ_FUA); ps 826 drivers/md/dm-snap-persistent.c ps->current_committed -= nr_merged; ps 838 drivers/md/dm-snap-persistent.c ps->next_free = area_location(ps, ps->current_area) + ps 839 drivers/md/dm-snap-persistent.c ps->current_committed + 1; ps 846 drivers/md/dm-snap-persistent.c struct pstore *ps = get_info(store); ps 848 drivers/md/dm-snap-persistent.c ps->valid = 0; ps 849 drivers/md/dm-snap-persistent.c if (write_header(ps)) ps 855 drivers/md/dm-snap-persistent.c struct pstore *ps; ps 859 drivers/md/dm-snap-persistent.c ps = kzalloc(sizeof(*ps), GFP_KERNEL); ps 860 drivers/md/dm-snap-persistent.c if (!ps) ps 863 drivers/md/dm-snap-persistent.c ps->store = store; ps 864 drivers/md/dm-snap-persistent.c ps->valid = 1; ps 865 drivers/md/dm-snap-persistent.c ps->version = SNAPSHOT_DISK_VERSION; ps 866 drivers/md/dm-snap-persistent.c ps->area = NULL; ps 867 drivers/md/dm-snap-persistent.c ps->zero_area = NULL; ps 868 drivers/md/dm-snap-persistent.c ps->header_area = NULL; ps 869 drivers/md/dm-snap-persistent.c ps->next_free = NUM_SNAPSHOT_HDR_CHUNKS + 1; /* header and 1st area */ ps 870 drivers/md/dm-snap-persistent.c ps->current_committed = 0; ps 872 drivers/md/dm-snap-persistent.c ps->callback_count = 0; ps 873 drivers/md/dm-snap-persistent.c atomic_set(&ps->pending_count, 0); ps 874 drivers/md/dm-snap-persistent.c ps->callbacks = NULL; ps 876 drivers/md/dm-snap-persistent.c ps->metadata_wq = alloc_workqueue("ksnaphd", WQ_MEM_RECLAIM, 0); ps 877 drivers/md/dm-snap-persistent.c if (!ps->metadata_wq) { ps 894 drivers/md/dm-snap-persistent.c store->context = ps; ps 899 drivers/md/dm-snap-persistent.c destroy_workqueue(ps->metadata_wq); ps 901 drivers/md/dm-snap-persistent.c kfree(ps); ps 206 drivers/media/platform/omap/omap_vout.c int ps = 2, line_length = 0; ps 217 drivers/media/platform/omap/omap_vout.c ps = 2; ps 219 drivers/media/platform/omap/omap_vout.c ps = 4; ps 221 drivers/media/platform/omap/omap_vout.c ps = 3; ps 223 drivers/media/platform/omap/omap_vout.c vout->ps = ps; ps 225 drivers/media/platform/omap/omap_vout.c *cropped_offset = (line_length * ps) * ps 226 drivers/media/platform/omap/omap_vout.c crop->top + crop->left * ps; ps 325 drivers/media/platform/omap/omap_vout_vrfb.c int vr_ps = 1, ps = 2, temp_ps = 2; ps 339 drivers/media/platform/omap/omap_vout_vrfb.c ps = 4; ps 342 drivers/media/platform/omap/omap_vout_vrfb.c ps = 2; /* otherwise the pixel size is 2 byte */ ps 345 drivers/media/platform/omap/omap_vout_vrfb.c ps = 4; ps 347 drivers/media/platform/omap/omap_vout_vrfb.c ps = 3; ps 349 drivers/media/platform/omap/omap_vout_vrfb.c vout->ps = ps; ps 364 drivers/media/platform/omap/omap_vout_vrfb.c temp_ps = ps / vr_ps; ps 371 drivers/media/platform/omap/omap_vout_vrfb.c ((crop->width / (vr_ps)) - 1) * ps); ps 380 drivers/media/platform/omap/omap_vout_vrfb.c *cropped_offset = offset + (line_length * ps * ctop) + ps 381 drivers/media/platform/omap/omap_vout_vrfb.c (cleft / vr_ps) * ps; ps 384 drivers/media/platform/omap/omap_vout_vrfb.c *cropped_offset = offset + (line_length * ps * ctop) + ps 385 drivers/media/platform/omap/omap_vout_vrfb.c (cleft / vr_ps) * ps + (line_length * ps 386 drivers/media/platform/omap/omap_vout_vrfb.c (crop->height - 1) * ps); ps 392 drivers/media/platform/omap/omap_vout_vrfb.c temp_ps = ps / vr_ps; ps 395 drivers/media/platform/omap/omap_vout_vrfb.c temp_ps * crop->left + ctop * ps; ps 398 drivers/media/platform/omap/omap_vout_vrfb.c temp_ps * crop->left + ctop * ps + ps 400 drivers/media/platform/omap/omap_vout_vrfb.c ps); ps 405 drivers/media/platform/omap/omap_vout_vrfb.c *cropped_offset = (line_length * ps) * ps 406 drivers/media/platform/omap/omap_vout_vrfb.c crop->top + (crop->left / vr_ps) * ps; ps 408 drivers/media/platform/omap/omap_vout_vrfb.c *cropped_offset = (line_length * ps) * ps 409 drivers/media/platform/omap/omap_vout_vrfb.c crop->top + (crop->left / vr_ps) * ps + ps 410 drivers/media/platform/omap/omap_vout_vrfb.c (line_length * (crop->height - 1) * ps); ps 414 drivers/media/platform/omap/omap_vout_vrfb.c *cropped_offset = (line_length * ps * crop->top) / ps 415 drivers/media/platform/omap/omap_vout_vrfb.c vr_ps + (crop->left * ps) / vr_ps + ps 416 drivers/media/platform/omap/omap_vout_vrfb.c ((crop->width / vr_ps) - 1) * ps; ps 169 drivers/media/platform/omap/omap_voutdef.h int ps, vr_ps, line_length, first_int, field_id; ps 78 drivers/media/radio/wl128x/fmdrv.h u8 ps[2]; ps 83 drivers/media/radio/wl128x/fmdrv.h u8 ps[2]; ps 190 drivers/misc/bh1770glc.c int ps) ps 192 drivers/misc/bh1770glc.c chip->int_mode_prox = ps; ps 195 drivers/misc/bh1770glc.c (chip->int_mode_lux << 1) | (ps << 0)); ps 253 drivers/misc/bh1770glc.c static inline u8 bh1770_psadjusted_to_raw(struct bh1770_chip *chip, u8 ps) ps 257 drivers/misc/bh1770glc.c raw = (((u32)ps * BH1770_COEF_SCALER) / chip->prox_coef); ps 258 drivers/misc/sgi-gru/grufault.c int ret, ps; ps 269 drivers/misc/sgi-gru/grufault.c ret = atomic_pte_lookup(vma, vaddr, write, &paddr, &ps); ps 273 drivers/misc/sgi-gru/grufault.c if (non_atomic_pte_lookup(vma, vaddr, write, &paddr, &ps)) ps 278 drivers/misc/sgi-gru/grufault.c paddr = paddr & ~((1UL << ps) - 1); ps 280 drivers/misc/sgi-gru/grufault.c *pageshift = ps; ps 1406 drivers/mmc/host/sdhci-pci-core.c int tm, ps; ps 1422 drivers/mmc/host/sdhci-pci-core.c ps = readw(slot->host->ioaddr + SYSKT_POWER_STATUS); ps 1423 drivers/mmc/host/sdhci-pci-core.c if (ps & SYSKT_POWER_STATUS_OK) ps 453 drivers/mtd/nand/raw/marvell_nand.c #define TO_CYCLES(ps, period_ns) (DIV_ROUND_UP(ps / 1000, period_ns)) ps 454 drivers/mtd/nand/raw/marvell_nand.c #define TO_CYCLES64(ps, period_ns) (DIV_ROUND_UP_ULL(div_u64(ps, 1000), \ ps 524 drivers/mtd/nand/raw/mpc5121_nfc.c uint romloc, ps; ps 547 drivers/mtd/nand/raw/mpc5121_nfc.c ps = (rcwh >> 7) & 0x1; ps 553 drivers/mtd/nand/raw/mpc5121_nfc.c switch ((ps << 2) | romloc) { ps 474 drivers/mtd/nand/raw/tango_nand.c static u32 to_ticks(int kHz, int ps) ps 476 drivers/mtd/nand/raw/tango_nand.c return DIV_ROUND_UP_ULL((u64)kHz * ps, NSEC_PER_SEC); ps 25 drivers/mtd/ubi/debug.h #define ubi_dbg_print_hex_dump(l, ps, pt, r, g, b, len, a) \ ps 26 drivers/mtd/ubi/debug.h print_hex_dump(l, ps, pt, r, g, b, len, a) ps 73 drivers/net/dsa/dsa_loop.c struct dsa_loop_priv *ps = ds->priv; ps 77 drivers/net/dsa/dsa_loop.c memcpy(ps->ports[i].mib, dsa_loop_mibs, ps 96 drivers/net/dsa/dsa_loop.c struct dsa_loop_priv *ps = ds->priv; ps 104 drivers/net/dsa/dsa_loop.c ps->ports[port].mib[i].name, ETH_GSTRING_LEN); ps 110 drivers/net/dsa/dsa_loop.c struct dsa_loop_priv *ps = ds->priv; ps 114 drivers/net/dsa/dsa_loop.c data[i] = ps->ports[port].mib[i].val; ps 119 drivers/net/dsa/dsa_loop.c struct dsa_loop_priv *ps = ds->priv; ps 120 drivers/net/dsa/dsa_loop.c struct mii_bus *bus = ps->bus; ps 123 drivers/net/dsa/dsa_loop.c ret = mdiobus_read_nested(bus, ps->port_base + port, regnum); ps 125 drivers/net/dsa/dsa_loop.c ps->ports[port].mib[DSA_LOOP_PHY_READ_ERR].val++; ps 127 drivers/net/dsa/dsa_loop.c ps->ports[port].mib[DSA_LOOP_PHY_READ_OK].val++; ps 135 drivers/net/dsa/dsa_loop.c struct dsa_loop_priv *ps = ds->priv; ps 136 drivers/net/dsa/dsa_loop.c struct mii_bus *bus = ps->bus; ps 139 drivers/net/dsa/dsa_loop.c ret = mdiobus_write_nested(bus, ps->port_base + port, regnum, value); ps 141 drivers/net/dsa/dsa_loop.c ps->ports[port].mib[DSA_LOOP_PHY_WRITE_ERR].val++; ps 143 drivers/net/dsa/dsa_loop.c ps->ports[port].mib[DSA_LOOP_PHY_WRITE_OK].val++; ps 184 drivers/net/dsa/dsa_loop.c struct dsa_loop_priv *ps = ds->priv; ps 185 drivers/net/dsa/dsa_loop.c struct mii_bus *bus = ps->bus; ps 191 drivers/net/dsa/dsa_loop.c mdiobus_read(bus, ps->port_base + port, MII_BMSR); ps 204 drivers/net/dsa/dsa_loop.c struct dsa_loop_priv *ps = ds->priv; ps 205 drivers/net/dsa/dsa_loop.c struct mii_bus *bus = ps->bus; ps 210 drivers/net/dsa/dsa_loop.c mdiobus_read(bus, ps->port_base + port, MII_BMSR); ps 213 drivers/net/dsa/dsa_loop.c vl = &ps->vlans[vid]; ps 226 drivers/net/dsa/dsa_loop.c ps->pvid = vid; ps 233 drivers/net/dsa/dsa_loop.c struct dsa_loop_priv *ps = ds->priv; ps 234 drivers/net/dsa/dsa_loop.c struct mii_bus *bus = ps->bus; ps 236 drivers/net/dsa/dsa_loop.c u16 vid, pvid = ps->pvid; ps 239 drivers/net/dsa/dsa_loop.c mdiobus_read(bus, ps->port_base + port, MII_BMSR); ps 242 drivers/net/dsa/dsa_loop.c vl = &ps->vlans[vid]; ps 254 drivers/net/dsa/dsa_loop.c ps->pvid = pvid; ps 280 drivers/net/dsa/dsa_loop.c struct dsa_loop_priv *ps; ps 293 drivers/net/dsa/dsa_loop.c ps = devm_kzalloc(&mdiodev->dev, sizeof(*ps), GFP_KERNEL); ps 294 drivers/net/dsa/dsa_loop.c if (!ps) ps 297 drivers/net/dsa/dsa_loop.c ps->netdev = dev_get_by_name(&init_net, pdata->netdev); ps 298 drivers/net/dsa/dsa_loop.c if (!ps->netdev) ps 301 drivers/net/dsa/dsa_loop.c pdata->cd.netdev[DSA_LOOP_CPU_PORT] = &ps->netdev->dev; ps 305 drivers/net/dsa/dsa_loop.c ds->priv = ps; ps 306 drivers/net/dsa/dsa_loop.c ps->bus = mdiodev->bus; ps 316 drivers/net/dsa/dsa_loop.c struct dsa_loop_priv *ps = ds->priv; ps 319 drivers/net/dsa/dsa_loop.c dev_put(ps->netdev); ps 95 drivers/net/dsa/mv88e6xxx/hwtstamp.c struct mv88e6xxx_port_hwtstamp *ps = &chip->port_hwtstamp[port]; ps 101 drivers/net/dsa/mv88e6xxx/hwtstamp.c clear_bit_unlock(MV88E6XXX_HWTSTAMP_ENABLED, &ps->state); ps 170 drivers/net/dsa/mv88e6xxx/hwtstamp.c set_bit(MV88E6XXX_HWTSTAMP_ENABLED, &ps->state); ps 179 drivers/net/dsa/mv88e6xxx/hwtstamp.c struct mv88e6xxx_port_hwtstamp *ps = &chip->port_hwtstamp[port]; ps 194 drivers/net/dsa/mv88e6xxx/hwtstamp.c memcpy(&ps->tstamp_config, &config, sizeof(config)); ps 204 drivers/net/dsa/mv88e6xxx/hwtstamp.c struct mv88e6xxx_port_hwtstamp *ps = &chip->port_hwtstamp[port]; ps 205 drivers/net/dsa/mv88e6xxx/hwtstamp.c struct hwtstamp_config *config = &ps->tstamp_config; ps 250 drivers/net/dsa/mv88e6xxx/hwtstamp.c struct mv88e6xxx_port_hwtstamp *ps = &chip->port_hwtstamp[port]; ps 260 drivers/net/dsa/mv88e6xxx/hwtstamp.c if (!test_bit(MV88E6XXX_HWTSTAMP_ENABLED, &ps->state)) ps 287 drivers/net/dsa/mv88e6xxx/hwtstamp.c struct mv88e6xxx_port_hwtstamp *ps, ps 305 drivers/net/dsa/mv88e6xxx/hwtstamp.c err = mv88e6xxx_port_ptp_read(chip, ps->port_id, ps 318 drivers/net/dsa/mv88e6xxx/hwtstamp.c err = mv88e6xxx_port_ptp_write(chip, ps->port_id, reg, 0); ps 343 drivers/net/dsa/mv88e6xxx/hwtstamp.c struct mv88e6xxx_port_hwtstamp *ps) ps 348 drivers/net/dsa/mv88e6xxx/hwtstamp.c skb = skb_dequeue(&ps->rx_queue); ps 351 drivers/net/dsa/mv88e6xxx/hwtstamp.c mv88e6xxx_get_rxts(chip, ps, skb, ptp_ops->arr0_sts_reg, ps 352 drivers/net/dsa/mv88e6xxx/hwtstamp.c &ps->rx_queue); ps 354 drivers/net/dsa/mv88e6xxx/hwtstamp.c skb = skb_dequeue(&ps->rx_queue2); ps 356 drivers/net/dsa/mv88e6xxx/hwtstamp.c mv88e6xxx_get_rxts(chip, ps, skb, ptp_ops->arr1_sts_reg, ps 357 drivers/net/dsa/mv88e6xxx/hwtstamp.c &ps->rx_queue2); ps 368 drivers/net/dsa/mv88e6xxx/hwtstamp.c struct mv88e6xxx_port_hwtstamp *ps; ps 373 drivers/net/dsa/mv88e6xxx/hwtstamp.c ps = &chip->port_hwtstamp[port]; ps 375 drivers/net/dsa/mv88e6xxx/hwtstamp.c if (ps->tstamp_config.rx_filter != HWTSTAMP_FILTER_PTP_V2_EVENT) ps 385 drivers/net/dsa/mv88e6xxx/hwtstamp.c skb_queue_tail(&ps->rx_queue2, skb); ps 387 drivers/net/dsa/mv88e6xxx/hwtstamp.c skb_queue_tail(&ps->rx_queue, skb); ps 395 drivers/net/dsa/mv88e6xxx/hwtstamp.c struct mv88e6xxx_port_hwtstamp *ps) ps 405 drivers/net/dsa/mv88e6xxx/hwtstamp.c if (!ps->tx_skb) ps 409 drivers/net/dsa/mv88e6xxx/hwtstamp.c err = mv88e6xxx_port_ptp_read(chip, ps->port_id, ps 419 drivers/net/dsa/mv88e6xxx/hwtstamp.c if (time_is_before_jiffies(ps->tx_tstamp_start + ps 422 drivers/net/dsa/mv88e6xxx/hwtstamp.c ps->port_id); ps 434 drivers/net/dsa/mv88e6xxx/hwtstamp.c mv88e6xxx_port_ptp_write(chip, ps->port_id, ptp_ops->dep_sts_reg, 0); ps 439 drivers/net/dsa/mv88e6xxx/hwtstamp.c dev_warn(chip->dev, "p%d: tx timestamp overrun\n", ps->port_id); ps 443 drivers/net/dsa/mv88e6xxx/hwtstamp.c if (departure_block[3] != ps->tx_seq_id) { ps 444 drivers/net/dsa/mv88e6xxx/hwtstamp.c dev_warn(chip->dev, "p%d: unexpected seq. id\n", ps->port_id); ps 457 drivers/net/dsa/mv88e6xxx/hwtstamp.c ps->port_id, ktime_to_ns(shhwtstamps.hwtstamp), ps 458 drivers/net/dsa/mv88e6xxx/hwtstamp.c departure_block[0], ps->tx_seq_id, departure_block[3]); ps 465 drivers/net/dsa/mv88e6xxx/hwtstamp.c tmp_skb = ps->tx_skb; ps 466 drivers/net/dsa/mv88e6xxx/hwtstamp.c ps->tx_skb = NULL; ps 467 drivers/net/dsa/mv88e6xxx/hwtstamp.c clear_bit_unlock(MV88E6XXX_HWTSTAMP_TX_IN_PROGRESS, &ps->state); ps 473 drivers/net/dsa/mv88e6xxx/hwtstamp.c dev_kfree_skb_any(ps->tx_skb); ps 474 drivers/net/dsa/mv88e6xxx/hwtstamp.c ps->tx_skb = NULL; ps 475 drivers/net/dsa/mv88e6xxx/hwtstamp.c clear_bit_unlock(MV88E6XXX_HWTSTAMP_TX_IN_PROGRESS, &ps->state); ps 484 drivers/net/dsa/mv88e6xxx/hwtstamp.c struct mv88e6xxx_port_hwtstamp *ps; ps 491 drivers/net/dsa/mv88e6xxx/hwtstamp.c ps = &chip->port_hwtstamp[i]; ps 492 drivers/net/dsa/mv88e6xxx/hwtstamp.c if (test_bit(MV88E6XXX_HWTSTAMP_TX_IN_PROGRESS, &ps->state)) ps 493 drivers/net/dsa/mv88e6xxx/hwtstamp.c restart |= mv88e6xxx_txtstamp_work(chip, ps); ps 495 drivers/net/dsa/mv88e6xxx/hwtstamp.c mv88e6xxx_rxtstamp_work(chip, ps); ps 505 drivers/net/dsa/mv88e6xxx/hwtstamp.c struct mv88e6xxx_port_hwtstamp *ps = &chip->port_hwtstamp[port]; ps 519 drivers/net/dsa/mv88e6xxx/hwtstamp.c &ps->state)) ps 522 drivers/net/dsa/mv88e6xxx/hwtstamp.c ps->tx_skb = clone; ps 523 drivers/net/dsa/mv88e6xxx/hwtstamp.c ps->tx_tstamp_start = jiffies; ps 524 drivers/net/dsa/mv88e6xxx/hwtstamp.c ps->tx_seq_id = be16_to_cpup(seq_ptr); ps 572 drivers/net/dsa/mv88e6xxx/hwtstamp.c struct mv88e6xxx_port_hwtstamp *ps = &chip->port_hwtstamp[port]; ps 574 drivers/net/dsa/mv88e6xxx/hwtstamp.c ps->port_id = port; ps 576 drivers/net/dsa/mv88e6xxx/hwtstamp.c skb_queue_head_init(&ps->rx_queue); ps 577 drivers/net/dsa/mv88e6xxx/hwtstamp.c skb_queue_head_init(&ps->rx_queue2); ps 749 drivers/net/ethernet/amd/au1000_eth.c struct net_device_stats *ps = &dev->stats; ps 751 drivers/net/ethernet/amd/au1000_eth.c ps->rx_packets++; ps 753 drivers/net/ethernet/amd/au1000_eth.c ps->multicast++; ps 756 drivers/net/ethernet/amd/au1000_eth.c ps->rx_errors++; ps 758 drivers/net/ethernet/amd/au1000_eth.c ps->rx_missed_errors++; ps 760 drivers/net/ethernet/amd/au1000_eth.c ps->rx_length_errors++; ps 762 drivers/net/ethernet/amd/au1000_eth.c ps->rx_crc_errors++; ps 764 drivers/net/ethernet/amd/au1000_eth.c ps->collisions++; ps 766 drivers/net/ethernet/amd/au1000_eth.c ps->rx_bytes += status & RX_FRAME_LEN_MASK; ps 843 drivers/net/ethernet/amd/au1000_eth.c struct net_device_stats *ps = &dev->stats; ps 851 drivers/net/ethernet/amd/au1000_eth.c ps->tx_errors++; ps 852 drivers/net/ethernet/amd/au1000_eth.c ps->tx_aborted_errors++; ps 855 drivers/net/ethernet/amd/au1000_eth.c ps->tx_errors++; ps 856 drivers/net/ethernet/amd/au1000_eth.c ps->tx_aborted_errors++; ps 858 drivers/net/ethernet/amd/au1000_eth.c ps->tx_carrier_errors++; ps 966 drivers/net/ethernet/amd/au1000_eth.c struct net_device_stats *ps = &dev->stats; ps 1003 drivers/net/ethernet/amd/au1000_eth.c ps->tx_packets++; ps 1004 drivers/net/ethernet/amd/au1000_eth.c ps->tx_bytes += ptxd->len; ps 2495 drivers/net/ethernet/marvell/mv643xx_eth.c u32 ps = rdlp(mp, PORT_STATUS); ps 2497 drivers/net/ethernet/marvell/mv643xx_eth.c if ((ps & (TX_IN_PROGRESS | TX_FIFO_EMPTY)) == TX_FIFO_EMPTY) ps 193 drivers/net/ethernet/mellanox/mlx5/core/port.c enum mlx5_port_status ps; ps 195 drivers/net/ethernet/mellanox/mlx5/core/port.c mlx5_query_port_admin_status(dev, &ps); ps 197 drivers/net/ethernet/mellanox/mlx5/core/port.c if (ps == MLX5_PORT_UP) ps 451 drivers/net/ethernet/stmicro/stmmac/common.h unsigned int ps; ps 47 drivers/net/ethernet/stmicro/stmmac/dwmac1000_core.c if (hw->ps) { ps 51 drivers/net/ethernet/stmicro/stmmac/dwmac1000_core.c switch (hw->ps) { ps 31 drivers/net/ethernet/stmicro/stmmac/dwmac4_core.c if (hw->ps) { ps 35 drivers/net/ethernet/stmicro/stmmac/dwmac4_core.c switch (hw->ps) { ps 26 drivers/net/ethernet/stmicro/stmmac/dwxgmac2_core.c if (hw->ps) { ps 30 drivers/net/ethernet/stmicro/stmmac/dwxgmac2_core.c switch (hw->ps) { ps 381 drivers/net/ethernet/stmicro/stmmac/stmmac_ethtool.c stmmac_pcs_ctrl_ane(priv, priv->ioaddr, 1, priv->hw->ps, 0); ps 2551 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c priv->hw->ps = speed; ps 2554 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c priv->hw->ps = 0; ps 2603 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c stmmac_pcs_ctrl_ane(priv, priv->ioaddr, 1, priv->hw->ps, 0); ps 548 drivers/net/wireless/ath/ath10k/core.h bool ps; ps 1921 drivers/net/wireless/ath/ath10k/mac.c enable_ps = arvif->ps; ps 5827 drivers/net/wireless/ath/ath10k/mac.c arvif->ps = vif->bss_conf.ps; ps 452 drivers/net/wireless/ath/carl9170/carl9170.h } ps; ps 202 drivers/net/wireless/ath/carl9170/cmd.c int carl9170_powersave(struct ar9170 *ar, const bool ps) ps 212 drivers/net/wireless/ath/carl9170/cmd.c if (ps) { ps 385 drivers/net/wireless/ath/carl9170/debug.c ADD(buf, *len, bufsize, "psm state: %s\n", (ar->ps.off_override ? ps 386 drivers/net/wireless/ath/carl9170/debug.c "FORCE CAM" : (ar->ps.state ? "PSM" : "CAM"))); ps 388 drivers/net/wireless/ath/carl9170/debug.c ADD(buf, *len, bufsize, "sleep duration: %d ms.\n", ar->ps.sleep_ms); ps 390 drivers/net/wireless/ath/carl9170/debug.c jiffies_to_msecs(jiffies - ar->ps.last_action)); ps 392 drivers/net/wireless/ath/carl9170/debug.c jiffies_to_msecs(jiffies - ar->ps.last_slept)); ps 357 drivers/net/wireless/ath/carl9170/main.c ar->ps.last_action = jiffies; ps 358 drivers/net/wireless/ath/carl9170/main.c ar->ps.last_slept = jiffies; ps 756 drivers/net/wireless/ath/carl9170/main.c ar->ps.off_override |= PS_OFF_VIF; ps 818 drivers/net/wireless/ath/carl9170/main.c ar->ps.off_override &= ~PS_OFF_VIF; ps 834 drivers/net/wireless/ath/carl9170/main.c bool ps = false; ps 837 drivers/net/wireless/ath/carl9170/main.c if (!ar->ps.off_override) ps 838 drivers/net/wireless/ath/carl9170/main.c ps = (ar->hw->conf.flags & IEEE80211_CONF_PS); ps 840 drivers/net/wireless/ath/carl9170/main.c if (ps != ar->ps.state) { ps 841 drivers/net/wireless/ath/carl9170/main.c err = carl9170_powersave(ar, ps); ps 845 drivers/net/wireless/ath/carl9170/main.c if (ar->ps.state && !ps) { ps 846 drivers/net/wireless/ath/carl9170/main.c ar->ps.sleep_ms = jiffies_to_msecs(jiffies - ps 847 drivers/net/wireless/ath/carl9170/main.c ar->ps.last_action); ps 850 drivers/net/wireless/ath/carl9170/main.c if (ps) ps 851 drivers/net/wireless/ath/carl9170/main.c ar->ps.last_slept = jiffies; ps 853 drivers/net/wireless/ath/carl9170/main.c ar->ps.last_action = jiffies; ps 854 drivers/net/wireless/ath/carl9170/main.c ar->ps.state = ps; ps 78 drivers/net/wireless/ath/carl9170/rx.c u32 ps; ps 81 drivers/net/wireless/ath/carl9170/rx.c ps = le32_to_cpu(rsp->psm.state); ps 83 drivers/net/wireless/ath/carl9170/rx.c new_ps = (ps & CARL9170_PSM_COUNTER) != CARL9170_PSM_WAKE; ps 84 drivers/net/wireless/ath/carl9170/rx.c if (ar->ps.state != new_ps) { ps 86 drivers/net/wireless/ath/carl9170/rx.c ar->ps.sleep_ms = jiffies_to_msecs(jiffies - ps 87 drivers/net/wireless/ath/carl9170/rx.c ar->ps.last_action); ps 90 drivers/net/wireless/ath/carl9170/rx.c ar->ps.last_action = jiffies; ps 92 drivers/net/wireless/ath/carl9170/rx.c ar->ps.state = new_ps; ps 539 drivers/net/wireless/ath/carl9170/rx.c ar->ps.last_beacon = jiffies; ps 552 drivers/net/wireless/ath/carl9170/rx.c ar->ps.dtim_counter = (tim_ie->dtim_count - 1) % ps 565 drivers/net/wireless/ath/carl9170/rx.c ar->ps.off_override &= ~PS_OFF_BCN; ps 569 drivers/net/wireless/ath/carl9170/rx.c ar->ps.off_override |= PS_OFF_BCN; ps 296 drivers/net/wireless/ath/dfs_pattern_detector.c struct pri_sequence *ps = pd->add_pulse(pd, event); ps 297 drivers/net/wireless/ath/dfs_pattern_detector.c if (ps != NULL) { ps 304 drivers/net/wireless/ath/dfs_pattern_detector.c ps->pri, ps->count, ps->count_falses); ps 102 drivers/net/wireless/ath/dfs_pri_detector.c struct pri_sequence *ps, *ps0; ps 110 drivers/net/wireless/ath/dfs_pri_detector.c list_for_each_entry_safe(ps, ps0, &pseq_pool, head) { ps 111 drivers/net/wireless/ath/dfs_pri_detector.c list_del(&ps->head); ps 113 drivers/net/wireless/ath/dfs_pri_detector.c kfree(ps); ps 230 drivers/net/wireless/ath/dfs_pri_detector.c struct pri_sequence ps, *new_ps; ps 245 drivers/net/wireless/ath/dfs_pri_detector.c ps.count = 2; ps 246 drivers/net/wireless/ath/dfs_pri_detector.c ps.count_falses = 0; ps 247 drivers/net/wireless/ath/dfs_pri_detector.c ps.first_ts = p->ts; ps 248 drivers/net/wireless/ath/dfs_pri_detector.c ps.last_ts = ts; ps 249 drivers/net/wireless/ath/dfs_pri_detector.c ps.pri = GET_PRI_TO_USE(pde->rs->pri_min, ps 251 drivers/net/wireless/ath/dfs_pri_detector.c ps.dur = ps.pri * (pde->rs->ppb - 1) ps 256 drivers/net/wireless/ath/dfs_pri_detector.c min_valid_ts = ts - ps.dur; ps 264 drivers/net/wireless/ath/dfs_pri_detector.c factor = pde_get_multiple(ps.last_ts - p2->ts, ps.pri, ps 267 drivers/net/wireless/ath/dfs_pri_detector.c ps.count++; ps 268 drivers/net/wireless/ath/dfs_pri_detector.c ps.first_ts = p2->ts; ps 273 drivers/net/wireless/ath/dfs_pri_detector.c ps.count_falses += tmp_false_count; ps 280 drivers/net/wireless/ath/dfs_pri_detector.c if (ps.count <= min_count) ps 285 drivers/net/wireless/ath/dfs_pri_detector.c ps.deadline_ts = ps.first_ts + ps.dur; ps 296 drivers/net/wireless/ath/dfs_pri_detector.c memcpy(new_ps, &ps, sizeof(ps)); ps 308 drivers/net/wireless/ath/dfs_pri_detector.c struct pri_sequence *ps, *ps2; ps 309 drivers/net/wireless/ath/dfs_pri_detector.c list_for_each_entry_safe(ps, ps2, &pde->sequences, head) { ps 314 drivers/net/wireless/ath/dfs_pri_detector.c if (ts > ps->deadline_ts) { ps 315 drivers/net/wireless/ath/dfs_pri_detector.c list_del_init(&ps->head); ps 316 drivers/net/wireless/ath/dfs_pri_detector.c pool_put_pseq_elem(ps); ps 320 drivers/net/wireless/ath/dfs_pri_detector.c delta_ts = ts - ps->last_ts; ps 321 drivers/net/wireless/ath/dfs_pri_detector.c factor = pde_get_multiple(delta_ts, ps->pri, ps 324 drivers/net/wireless/ath/dfs_pri_detector.c ps->last_ts = ts; ps 325 drivers/net/wireless/ath/dfs_pri_detector.c ps->count++; ps 327 drivers/net/wireless/ath/dfs_pri_detector.c if (max_count < ps->count) ps 328 drivers/net/wireless/ath/dfs_pri_detector.c max_count = ps->count; ps 330 drivers/net/wireless/ath/dfs_pri_detector.c ps->count_falses++; ps 339 drivers/net/wireless/ath/dfs_pri_detector.c struct pri_sequence *ps; ps 344 drivers/net/wireless/ath/dfs_pri_detector.c list_for_each_entry(ps, &pde->sequences, head) { ps 350 drivers/net/wireless/ath/dfs_pri_detector.c if ((ps->count >= pde->rs->ppb_thresh) && ps 351 drivers/net/wireless/ath/dfs_pri_detector.c (ps->count * pde->rs->num_pri >= ps->count_falses)) ps 352 drivers/net/wireless/ath/dfs_pri_detector.c return ps; ps 361 drivers/net/wireless/ath/dfs_pri_detector.c struct pri_sequence *ps, *ps0; ps 363 drivers/net/wireless/ath/dfs_pri_detector.c list_for_each_entry_safe(ps, ps0, &pde->sequences, head) { ps 364 drivers/net/wireless/ath/dfs_pri_detector.c list_del_init(&ps->head); ps 365 drivers/net/wireless/ath/dfs_pri_detector.c pool_put_pseq_elem(ps); ps 386 drivers/net/wireless/ath/dfs_pri_detector.c struct pri_sequence *ps; ps 410 drivers/net/wireless/ath/dfs_pri_detector.c ps = pseq_handler_check_detection(de); ps 412 drivers/net/wireless/ath/dfs_pri_detector.c if (ps == NULL) ps 415 drivers/net/wireless/ath/dfs_pri_detector.c return ps; ps 405 drivers/net/wireless/ath/wcn36xx/main.c if (vif->bss_conf.ps) /* ps allowed ? */ ps 424 drivers/net/wireless/intel/iwlwifi/mvm/power.c if (!vif->bss_conf.ps || !mvmvif->pm_enabled) ps 953 drivers/net/wireless/intel/iwlwifi/mvm/power.c !vif->bss_conf.ps || ps 1904 drivers/net/wireless/intel/iwlwifi/mvm/rs.c !vif->bss_conf.ps); ps 526 drivers/net/wireless/mac80211_hwsim.c } ps; ps 661 drivers/net/wireless/mac80211_hwsim.c struct ieee80211_vif *vif, int ps) ps 672 drivers/net/wireless/mac80211_hwsim.c __func__, vp->bssid, ps); ps 681 drivers/net/wireless/mac80211_hwsim.c (ps ? IEEE80211_FCTL_PM : 0)); ps 711 drivers/net/wireless/mac80211_hwsim.c *val = data->ps; ps 725 drivers/net/wireless/mac80211_hwsim.c if (data->ps != PS_ENABLED) ps 734 drivers/net/wireless/mac80211_hwsim.c old_ps = data->ps; ps 735 drivers/net/wireless/mac80211_hwsim.c data->ps = val; ps 958 drivers/net/wireless/mac80211_hwsim.c switch (data->ps) { ps 1049 drivers/net/wireless/mac80211_hwsim.c if (data->ps != PS_DISABLED) ps 1257 drivers/net/wireless/mac80211_hwsim.c if (data->ps != PS_DISABLED) ps 1932 drivers/net/wireless/mac80211_hwsim.c int err, ps; ps 1946 drivers/net/wireless/mac80211_hwsim.c ps = nla_get_u32(tb[HWSIM_TM_ATTR_PS]); ps 1947 drivers/net/wireless/mac80211_hwsim.c return hwsim_fops_ps_write(hwsim, ps); ps 1953 drivers/net/wireless/mac80211_hwsim.c if (nla_put_u32(skb, HWSIM_TM_ATTR_PS, hwsim->ps)) ps 2330 drivers/net/wireless/mac80211_hwsim.c data[i++] = ar->ps; ps 561 drivers/net/wireless/mediatek/mt76/mac80211.c bool ps; ps 593 drivers/net/wireless/mediatek/mt76/mac80211.c ps = ieee80211_has_pm(hdr->frame_control); ps 595 drivers/net/wireless/mediatek/mt76/mac80211.c if (ps && (ieee80211_is_data_qos(hdr->frame_control) || ps 599 drivers/net/wireless/mediatek/mt76/mac80211.c if (!!test_bit(MT_WCID_FLAG_PS, &wcid->flags) == ps) ps 602 drivers/net/wireless/mediatek/mt76/mac80211.c if (ps) ps 607 drivers/net/wireless/mediatek/mt76/mac80211.c dev->drv->sta_ps(dev, sta, ps); ps 608 drivers/net/wireless/mediatek/mt76/mac80211.c ieee80211_sta_ps_transition(sta, ps); ps 610 drivers/net/wireless/mediatek/mt76/mac80211.c if (ps) ps 310 drivers/net/wireless/mediatek/mt76/mt76.h bool ps); ps 220 drivers/net/wireless/mediatek/mt76/mt7603/mac.c if (sta->ps == enabled) ps 240 drivers/net/wireless/mediatek/mt76/mt7603/mac.c sta->ps = enabled; ps 328 drivers/net/wireless/mediatek/mt76/mt7603/main.c msta->ps = ~0; ps 377 drivers/net/wireless/mediatek/mt76/mt7603/main.c mt7603_sta_ps(struct mt76_dev *mdev, struct ieee80211_sta *sta, bool ps) ps 384 drivers/net/wireless/mediatek/mt76/mt7603/main.c mt7603_wtbl_set_ps(dev, msta, ps); ps 385 drivers/net/wireless/mediatek/mt76/mt7603/main.c if (ps) ps 77 drivers/net/wireless/mediatek/mt76/mt7603/mt7603.h u8 ps; ps 238 drivers/net/wireless/mediatek/mt76/mt7603/mt7603.h void mt7603_sta_ps(struct mt76_dev *mdev, struct ieee80211_sta *sta, bool ps); ps 218 drivers/net/wireless/mediatek/mt76/mt7615/mac.c void mt7615_sta_ps(struct mt76_dev *mdev, struct ieee80211_sta *sta, bool ps) ps 263 drivers/net/wireless/mediatek/mt76/mt7615/mt7615.h void mt7615_sta_ps(struct mt76_dev *mdev, struct ieee80211_sta *sta, bool ps); ps 182 drivers/net/wireless/mediatek/mt76/mt76x02.h void mt76x02_sta_ps(struct mt76_dev *dev, struct ieee80211_sta *sta, bool ps); ps 620 drivers/net/wireless/mediatek/mt76/mt76x02_util.c bool ps) ps 628 drivers/net/wireless/mediatek/mt76/mt76x02_util.c mt76x02_mac_wcid_set_drop(dev, idx, ps); ps 294 drivers/net/wireless/mediatek/mt76/tx.c mt76_txq_dequeue(struct mt76_dev *dev, struct mt76_txq *mtxq, bool ps) ps 303 drivers/net/wireless/mediatek/mt76/tx.c if (ps && skb_queue_empty(&mtxq->retry_q)) ps 99 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c static inline void qtnf_init_hdp_irqs(struct qtnf_pcie_pearl_state *ps) ps 103 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c spin_lock_irqsave(&ps->irq_lock, flags); ps 104 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c ps->pcie_irq_mask = (PCIE_HDP_INT_RX_BITS | PCIE_HDP_INT_TX_BITS); ps 105 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c spin_unlock_irqrestore(&ps->irq_lock, flags); ps 108 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c static inline void qtnf_enable_hdp_irqs(struct qtnf_pcie_pearl_state *ps) ps 112 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c spin_lock_irqsave(&ps->irq_lock, flags); ps 113 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c writel(ps->pcie_irq_mask, PCIE_HDP_INT_EN(ps->pcie_reg_base)); ps 114 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c spin_unlock_irqrestore(&ps->irq_lock, flags); ps 117 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c static inline void qtnf_disable_hdp_irqs(struct qtnf_pcie_pearl_state *ps) ps 121 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c spin_lock_irqsave(&ps->irq_lock, flags); ps 122 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c writel(0x0, PCIE_HDP_INT_EN(ps->pcie_reg_base)); ps 123 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c spin_unlock_irqrestore(&ps->irq_lock, flags); ps 126 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c static inline void qtnf_en_rxdone_irq(struct qtnf_pcie_pearl_state *ps) ps 130 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c spin_lock_irqsave(&ps->irq_lock, flags); ps 131 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c ps->pcie_irq_mask |= PCIE_HDP_INT_RX_BITS; ps 132 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c writel(ps->pcie_irq_mask, PCIE_HDP_INT_EN(ps->pcie_reg_base)); ps 133 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c spin_unlock_irqrestore(&ps->irq_lock, flags); ps 136 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c static inline void qtnf_dis_rxdone_irq(struct qtnf_pcie_pearl_state *ps) ps 140 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c spin_lock_irqsave(&ps->irq_lock, flags); ps 141 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c ps->pcie_irq_mask &= ~PCIE_HDP_INT_RX_BITS; ps 142 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c writel(ps->pcie_irq_mask, PCIE_HDP_INT_EN(ps->pcie_reg_base)); ps 143 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c spin_unlock_irqrestore(&ps->irq_lock, flags); ps 146 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c static inline void qtnf_en_txdone_irq(struct qtnf_pcie_pearl_state *ps) ps 150 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c spin_lock_irqsave(&ps->irq_lock, flags); ps 151 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c ps->pcie_irq_mask |= PCIE_HDP_INT_TX_BITS; ps 152 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c writel(ps->pcie_irq_mask, PCIE_HDP_INT_EN(ps->pcie_reg_base)); ps 153 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c spin_unlock_irqrestore(&ps->irq_lock, flags); ps 156 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c static inline void qtnf_dis_txdone_irq(struct qtnf_pcie_pearl_state *ps) ps 160 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c spin_lock_irqsave(&ps->irq_lock, flags); ps 161 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c ps->pcie_irq_mask &= ~PCIE_HDP_INT_TX_BITS; ps 162 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c writel(ps->pcie_irq_mask, PCIE_HDP_INT_EN(ps->pcie_reg_base)); ps 163 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c spin_unlock_irqrestore(&ps->irq_lock, flags); ps 166 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c static void qtnf_deassert_intx(struct qtnf_pcie_pearl_state *ps) ps 168 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c void __iomem *reg = ps->base.sysctl_bar + PEARL_PCIE_CFG0_OFFSET; ps 176 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c static void qtnf_pearl_reset_ep(struct qtnf_pcie_pearl_state *ps) ps 179 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c void __iomem *reg = ps->base.sysctl_bar + ps 184 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c pci_restore_state(ps->base.pdev); ps 189 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c const struct qtnf_pcie_pearl_state *ps = arg; ps 191 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c void __iomem *reg = ps->base.sysctl_bar + ps 231 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c static int pearl_alloc_bd_table(struct qtnf_pcie_pearl_state *ps) ps 233 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c struct qtnf_pcie_bus_priv *priv = &ps->base; ps 249 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c ps->bd_table_vaddr = vaddr; ps 250 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c ps->bd_table_paddr = paddr; ps 251 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c ps->bd_table_len = len; ps 253 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c ps->tx_bd_vbase = vaddr; ps 254 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c ps->tx_bd_pbase = paddr; ps 266 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c ps->rx_bd_vbase = vaddr; ps 267 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c ps->rx_bd_pbase = paddr; ps 271 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c PCIE_HDP_TX_HOST_Q_BASE_H(ps->pcie_reg_base)); ps 274 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c PCIE_HDP_TX_HOST_Q_BASE_L(ps->pcie_reg_base)); ps 276 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c PCIE_HDP_TX_HOST_Q_SZ_CTRL(ps->pcie_reg_base)); ps 283 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c static int pearl_skb2rbd_attach(struct qtnf_pcie_pearl_state *ps, u16 index) ps 285 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c struct qtnf_pcie_bus_priv *priv = &ps->base; ps 297 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c rxbd = &ps->rx_bd_vbase[index]; ps 318 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c PCIE_HDP_HHBM_BUF_PTR_H(ps->pcie_reg_base)); ps 321 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c PCIE_HDP_HHBM_BUF_PTR(ps->pcie_reg_base)); ps 323 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c writel(index, PCIE_HDP_TX_HOST_Q_WR_PTR(ps->pcie_reg_base)); ps 327 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c static int pearl_alloc_rx_buffers(struct qtnf_pcie_pearl_state *ps) ps 332 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c memset(ps->rx_bd_vbase, 0x0, ps 333 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c ps->base.rx_bd_num * sizeof(struct qtnf_pearl_rx_bd)); ps 335 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c for (i = 0; i < ps->base.rx_bd_num; i++) { ps 336 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c ret = pearl_skb2rbd_attach(ps, i); ps 345 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c static void qtnf_pearl_free_xfer_buffers(struct qtnf_pcie_pearl_state *ps) ps 347 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c struct qtnf_pcie_bus_priv *priv = &ps->base; ps 357 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c rxbd = &ps->rx_bd_vbase[i]; ps 371 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c txbd = &ps->tx_bd_vbase[i]; ps 383 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c static int pearl_hhbm_init(struct qtnf_pcie_pearl_state *ps) ps 387 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c val = readl(PCIE_HHBM_CONFIG(ps->pcie_reg_base)); ps 389 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c writel(val, PCIE_HHBM_CONFIG(ps->pcie_reg_base)); ps 395 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c writel(val, PCIE_HHBM_CONFIG(ps->pcie_reg_base)); ps 396 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c writel(ps->base.rx_bd_num, PCIE_HHBM_Q_LIMIT_REG(ps->pcie_reg_base)); ps 401 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c static int qtnf_pcie_pearl_init_xfer(struct qtnf_pcie_pearl_state *ps, ps 404 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c struct qtnf_pcie_bus_priv *priv = &ps->base; ps 436 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c ret = pearl_hhbm_init(ps); ps 448 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c ret = pearl_alloc_bd_table(ps); ps 454 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c ret = pearl_alloc_rx_buffers(ps); ps 463 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c static void qtnf_pearl_data_tx_reclaim(struct qtnf_pcie_pearl_state *ps) ps 465 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c struct qtnf_pcie_bus_priv *priv = &ps->base; ps 476 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c tx_done_index = readl(PCIE_HDP_RX0DMA_CNT(ps->pcie_reg_base)) ps 484 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c txbd = &ps->tx_bd_vbase[i]; ps 515 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c static int qtnf_tx_queue_ready(struct qtnf_pcie_pearl_state *ps) ps 517 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c struct qtnf_pcie_bus_priv *priv = &ps->base; ps 521 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c qtnf_pearl_data_tx_reclaim(ps); ps 536 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c struct qtnf_pcie_pearl_state *ps = get_bus_priv(bus); ps 537 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c struct qtnf_pcie_bus_priv *priv = &ps->base; ps 547 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c if (!qtnf_tx_queue_ready(ps)) { ps 569 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c txbd = &ps->tx_bd_vbase[i]; ps 580 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c txbd_paddr = ps->tx_bd_pbase + i * sizeof(struct qtnf_pearl_tx_bd); ps 584 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c PCIE_HDP_HOST_WR_DESC0_H(ps->pcie_reg_base)); ps 587 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c PCIE_HDP_HOST_WR_DESC0(ps->pcie_reg_base)); ps 605 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c qtnf_pearl_data_tx_reclaim(ps); ps 613 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c struct qtnf_pcie_pearl_state *ps = get_bus_priv(bus); ps 614 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c struct qtnf_pcie_bus_priv *priv = &ps->base; ps 618 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c status = readl(PCIE_HDP_INT_STATUS(ps->pcie_reg_base)); ps 623 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c if (!(status & ps->pcie_irq_mask)) ps 627 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c ps->pcie_irq_rx_count++; ps 630 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c ps->pcie_irq_tx_count++; ps 633 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c ps->pcie_irq_uf_count++; ps 636 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c qtnf_dis_rxdone_irq(ps); ps 641 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c qtnf_dis_txdone_irq(ps); ps 647 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c qtnf_non_posted_write(~0U, PCIE_HDP_INT_STATUS(ps->pcie_reg_base)); ps 650 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c qtnf_deassert_intx(ps); ps 655 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c static int qtnf_rx_data_ready(struct qtnf_pcie_pearl_state *ps) ps 657 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c u16 index = ps->base.rx_bd_r_index; ps 661 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c rxbd = &ps->rx_bd_vbase[index]; ps 673 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c struct qtnf_pcie_pearl_state *ps = get_bus_priv(bus); ps 674 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c struct qtnf_pcie_bus_priv *priv = &ps->base; ps 688 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c if (!qtnf_rx_data_ready(ps)) ps 692 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c rxbd = &ps->rx_bd_vbase[r_idx]; ps 754 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c ret = pearl_skb2rbd_attach(ps, w_idx); ps 768 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c qtnf_en_rxdone_irq(ps); ps 777 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c struct qtnf_pcie_pearl_state *ps = (void *)get_bus_priv(bus); ps 779 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c tasklet_hi_schedule(&ps->base.reclaim_tq); ps 784 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c struct qtnf_pcie_pearl_state *ps = (void *)get_bus_priv(bus); ps 786 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c qtnf_enable_hdp_irqs(ps); ps 792 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c struct qtnf_pcie_pearl_state *ps = (void *)get_bus_priv(bus); ps 795 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c qtnf_disable_hdp_irqs(ps); ps 812 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c struct qtnf_pcie_pearl_state *ps = get_bus_priv(bus); ps 813 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c u32 reg = readl(PCIE_HDP_INT_EN(ps->pcie_reg_base)); ps 816 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c seq_printf(s, "pcie_irq_count(%u)\n", ps->base.pcie_irq_count); ps 817 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c seq_printf(s, "pcie_irq_tx_count(%u)\n", ps->pcie_irq_tx_count); ps 821 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c seq_printf(s, "pcie_irq_rx_count(%u)\n", ps->pcie_irq_rx_count); ps 825 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c seq_printf(s, "pcie_irq_uf_count(%u)\n", ps->pcie_irq_uf_count); ps 836 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c struct qtnf_pcie_pearl_state *ps = get_bus_priv(bus); ps 837 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c struct qtnf_pcie_bus_priv *priv = &ps->base; ps 846 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c readl(PCIE_HDP_RX0DMA_CNT(ps->pcie_reg_base)) ps 855 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c readl(PCIE_HDP_TX0DMA_CNT(ps->pcie_reg_base)) ps 913 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c qtnf_ep_fw_load(struct qtnf_pcie_pearl_state *ps, const u8 *fw, u32 fw_size) ps 930 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c len = qtnf_ep_fw_send(ps->base.pdev, fw_size, blk, pblk, fw); ps 936 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c qtnf_set_state(&ps->bda->bda_rc_state, ps 938 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c if (qtnf_poll_state(&ps->bda->bda_ep_state, ps 945 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c qtnf_clear_state(&ps->bda->bda_ep_state, ps 948 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c if (qtnf_is_state(&ps->bda->bda_ep_state, ps 961 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c qtnf_clear_state(&ps->bda->bda_ep_state, ps 968 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c qtnf_pearl_data_tx_reclaim(ps); ps 982 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c struct qtnf_pcie_pearl_state *ps = (void *)get_bus_priv(bus); ps 985 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c struct pci_dev *pdev = ps->base.pdev; ps 989 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c if (ps->base.flashboot) { ps 999 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c qtnf_set_state(&ps->bda->bda_rc_state, state); ps 1001 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c if (qtnf_poll_state(&ps->bda->bda_ep_state, QTN_EP_FW_LOADRDY, ps 1005 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c if (!ps->base.flashboot) ps 1011 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c qtnf_clear_state(&ps->bda->bda_ep_state, QTN_EP_FW_LOADRDY); ps 1013 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c if (ps->base.flashboot) { ps 1019 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c ret = qtnf_ep_fw_load(ps, fw->data, fw->size); ps 1027 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c if (qtnf_poll_state(&ps->bda->bda_ep_state, QTN_EP_FW_DONE, ps 1033 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c if (qtnf_poll_state(&ps->bda->bda_ep_state, ps 1054 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c struct qtnf_pcie_pearl_state *ps = (void *)data; ps 1056 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c qtnf_pearl_data_tx_reclaim(ps); ps 1057 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c qtnf_en_txdone_irq(ps); ps 1072 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c struct qtnf_pcie_pearl_state *ps = get_bus_priv(bus); ps 1073 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c struct pci_dev *pdev = ps->base.pdev; ps 1077 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c spin_lock_init(&ps->irq_lock); ps 1080 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c ps->pcie_reg_base = ps->base.dmareg_bar; ps 1081 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c ps->bda = ps->base.epmem_bar; ps 1082 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c writel(ps->base.msi_enabled, &ps->bda->bda_rc_msi_enabled); ps 1084 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c ret = qtnf_pcie_pearl_init_xfer(ps, tx_bd_size); ps 1091 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c qtnf_init_hdp_irqs(ps); ps 1094 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c qtnf_disable_hdp_irqs(ps); ps 1101 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c qtnf_pearl_free_xfer_buffers(ps); ps 1105 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c tasklet_init(&ps->base.reclaim_tq, qtnf_pearl_reclaim_tasklet_fn, ps 1106 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c (unsigned long)ps); ps 1111 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c ipc_int.arg = ps; ps 1112 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c qtnf_pcie_init_shm_ipc(&ps->base, &ps->bda->bda_shm_reg1, ps 1113 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c &ps->bda->bda_shm_reg2, &ipc_int); ps 1120 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c struct qtnf_pcie_pearl_state *ps = get_bus_priv(bus); ps 1122 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c qtnf_pearl_reset_ep(ps); ps 1123 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c qtnf_pearl_free_xfer_buffers(ps); ps 1141 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c struct qtnf_pcie_pearl_state *ps; ps 1143 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c bus = devm_kzalloc(&pdev->dev, sizeof(*bus) + sizeof(*ps), GFP_KERNEL); ps 1147 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c ps = get_bus_priv(bus); ps 1148 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c ps->base.probe_cb = qtnf_pcie_pearl_probe; ps 1149 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c ps->base.remove_cb = qtnf_pcie_pearl_remove; ps 1150 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c ps->base.dma_mask_get_cb = qtnf_pearl_dma_mask_get; ps 1152 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c ps->base.resume_cb = qtnf_pcie_pearl_resume; ps 1153 drivers/net/wireless/quantenna/qtnfmac/pcie/pearl_pcie.c ps->base.suspend_cb = qtnf_pcie_pearl_suspend; ps 689 drivers/net/wireless/realtek/rtlwifi/ps.c bool ps = false; ps 691 drivers/net/wireless/realtek/rtlwifi/ps.c ps = (hw->conf.flags & IEEE80211_CONF_PS); ps 697 drivers/net/wireless/realtek/rtlwifi/ps.c if (rtlpriv->psc.state && !ps) { ps 702 drivers/net/wireless/realtek/rtlwifi/ps.c if (ps) ps 706 drivers/net/wireless/realtek/rtlwifi/ps.c rtlpriv->psc.state = ps; ps 1617 drivers/net/wireless/rsi/rsi_91x_mgmt.c struct rsi_request_ps *ps; ps 1620 drivers/net/wireless/rsi/rsi_91x_mgmt.c int frame_len = sizeof(*ps); ps 1627 drivers/net/wireless/rsi/rsi_91x_mgmt.c ps = (struct rsi_request_ps *)skb->data; ps 1630 drivers/net/wireless/rsi/rsi_91x_mgmt.c rsi_set_len_qno(&ps->desc.desc_dword0.len_qno, ps 1632 drivers/net/wireless/rsi/rsi_91x_mgmt.c ps->desc.desc_dword0.frame_type = WAKEUP_SLEEP_REQUEST; ps 1634 drivers/net/wireless/rsi/rsi_91x_mgmt.c ps->ps_sleep.enable = RSI_PS_ENABLE; ps 1635 drivers/net/wireless/rsi/rsi_91x_mgmt.c ps->desc.desc_dword3.token = cpu_to_le16(RSI_SLEEP_REQUEST); ps 1637 drivers/net/wireless/rsi/rsi_91x_mgmt.c ps->ps_sleep.enable = RSI_PS_DISABLE; ps 1638 drivers/net/wireless/rsi/rsi_91x_mgmt.c ps->desc.desc_dword0.len_qno |= cpu_to_le16(RSI_PS_DISABLE_IND); ps 1639 drivers/net/wireless/rsi/rsi_91x_mgmt.c ps->desc.desc_dword3.token = cpu_to_le16(RSI_WAKEUP_REQUEST); ps 1642 drivers/net/wireless/rsi/rsi_91x_mgmt.c ps->ps_uapsd_acs = common->uapsd_bitmap; ps 1644 drivers/net/wireless/rsi/rsi_91x_mgmt.c ps->ps_sleep.sleep_type = ps_info->sleep_type; ps 1645 drivers/net/wireless/rsi/rsi_91x_mgmt.c ps->ps_sleep.num_bcns_per_lis_int = ps 1647 drivers/net/wireless/rsi/rsi_91x_mgmt.c ps->ps_sleep.sleep_duration = ps 1651 drivers/net/wireless/rsi/rsi_91x_mgmt.c ps->ps_sleep.connected_sleep = RSI_CONNECTED_SLEEP; ps 1653 drivers/net/wireless/rsi/rsi_91x_mgmt.c ps->ps_sleep.connected_sleep = RSI_DEEP_SLEEP; ps 1655 drivers/net/wireless/rsi/rsi_91x_mgmt.c ps->ps_listen_interval = cpu_to_le32(ps_info->listen_interval); ps 1656 drivers/net/wireless/rsi/rsi_91x_mgmt.c ps->ps_dtim_interval_duration = ps 1660 drivers/net/wireless/rsi/rsi_91x_mgmt.c ps->ps_listen_interval = cpu_to_le32(RSI_PS_DISABLE); ps 1662 drivers/net/wireless/rsi/rsi_91x_mgmt.c ps->ps_num_dtim_intervals = cpu_to_le16(ps_info->num_dtims_per_sleep); ps 1649 drivers/net/wireless/st/cw1200/sta.c int link_id, bool ps) ps 1655 drivers/net/wireless/st/cw1200/sta.c ps ? "Stop" : "Start", ps 1659 drivers/net/wireless/st/cw1200/sta.c ps ? STA_NOTIFY_SLEEP : STA_NOTIFY_AWAKE, link_id); ps 1072 drivers/net/wireless/ti/wl1251/acx.h struct acx_ps_statistics ps; ps 178 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_FILE(ps, pspoll_timeouts, 20, "%u"); ps 179 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_FILE(ps, upsd_timeouts, 20, "%u"); ps 180 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_FILE(ps, upsd_max_sptime, 20, "%u"); ps 181 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_FILE(ps, upsd_max_apturn, 20, "%u"); ps 182 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_FILE(ps, pspoll_max_apturn, 20, "%u"); ps 183 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_FILE(ps, pspoll_utilization, 20, "%u"); ps 184 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_FILE(ps, upsd_utilization, 20, "%u"); ps 318 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_DEL(ps, pspoll_timeouts); ps 319 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_DEL(ps, upsd_timeouts); ps 320 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_DEL(ps, upsd_max_sptime); ps 321 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_DEL(ps, upsd_max_apturn); ps 322 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_DEL(ps, pspoll_max_apturn); ps 323 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_DEL(ps, pspoll_utilization); ps 324 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_DEL(ps, upsd_utilization); ps 417 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_ADD(ps, pspoll_timeouts); ps 418 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_ADD(ps, upsd_timeouts); ps 419 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_ADD(ps, upsd_max_sptime); ps 420 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_ADD(ps, upsd_max_apturn); ps 421 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_ADD(ps, pspoll_max_apturn); ps 422 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_ADD(ps, pspoll_utilization); ps 423 drivers/net/wireless/ti/wl1251/debugfs.c DEBUGFS_FWSTATS_ADD(ps, upsd_utilization); ps 253 drivers/net/wireless/ti/wl12xx/acx.h struct wl12xx_acx_ps_statistics ps; ps 96 drivers/net/wireless/ti/wl12xx/debugfs.c WL12XX_DEBUGFS_FWSTATS_FILE(ps, pspoll_timeouts, "%u"); ps 97 drivers/net/wireless/ti/wl12xx/debugfs.c WL12XX_DEBUGFS_FWSTATS_FILE(ps, upsd_timeouts, "%u"); ps 98 drivers/net/wireless/ti/wl12xx/debugfs.c WL12XX_DEBUGFS_FWSTATS_FILE(ps, upsd_max_sptime, "%u"); ps 99 drivers/net/wireless/ti/wl12xx/debugfs.c WL12XX_DEBUGFS_FWSTATS_FILE(ps, upsd_max_apturn, "%u"); ps 100 drivers/net/wireless/ti/wl12xx/debugfs.c WL12XX_DEBUGFS_FWSTATS_FILE(ps, pspoll_max_apturn, "%u"); ps 101 drivers/net/wireless/ti/wl12xx/debugfs.c WL12XX_DEBUGFS_FWSTATS_FILE(ps, pspoll_utilization, "%u"); ps 102 drivers/net/wireless/ti/wl12xx/debugfs.c WL12XX_DEBUGFS_FWSTATS_FILE(ps, upsd_utilization, "%u"); ps 196 drivers/net/wireless/ti/wl12xx/debugfs.c DEBUGFS_FWSTATS_ADD(ps, pspoll_timeouts); ps 197 drivers/net/wireless/ti/wl12xx/debugfs.c DEBUGFS_FWSTATS_ADD(ps, upsd_timeouts); ps 198 drivers/net/wireless/ti/wl12xx/debugfs.c DEBUGFS_FWSTATS_ADD(ps, upsd_max_sptime); ps 199 drivers/net/wireless/ti/wl12xx/debugfs.c DEBUGFS_FWSTATS_ADD(ps, upsd_max_apturn); ps 200 drivers/net/wireless/ti/wl12xx/debugfs.c DEBUGFS_FWSTATS_ADD(ps, pspoll_max_apturn); ps 201 drivers/net/wireless/ti/wl12xx/debugfs.c DEBUGFS_FWSTATS_ADD(ps, pspoll_utilization); ps 202 drivers/net/wireless/ti/wl12xx/debugfs.c DEBUGFS_FWSTATS_ADD(ps, upsd_utilization); ps 4524 drivers/net/wireless/ti/wlcore/main.c if ((bss_conf->ps) && ps 4544 drivers/net/wireless/ti/wlcore/main.c } else if (!bss_conf->ps && ps 109 drivers/ntb/hw/mscc/ntb_hw_switchtec.c u32 ps; ps 134 drivers/ntb/hw/mscc/ntb_hw_switchtec.c ps = ioread32(&ctl->partition_status) & 0xFFFF; ps 136 drivers/ntb/hw/mscc/ntb_hw_switchtec.c if (ps != status) ps 140 drivers/ntb/hw/mscc/ntb_hw_switchtec.c if (ps == wait_status) ps 143 drivers/ntb/hw/mscc/ntb_hw_switchtec.c if (ps == status) { ps 2923 drivers/nvme/host/pci.c static int nvme_get_power_state(struct nvme_ctrl *ctrl, u32 *ps) ps 2925 drivers/nvme/host/pci.c return nvme_get_features(ctrl, NVME_FEAT_POWER_MGMT, 0, NULL, 0, ps); ps 2928 drivers/nvme/host/pci.c static int nvme_set_power_state(struct nvme_ctrl *ctrl, u32 ps) ps 2930 drivers/nvme/host/pci.c return nvme_set_features(ctrl, NVME_FEAT_POWER_MGMT, ps, NULL, 0, NULL); ps 175 drivers/of/fdt.c const char *p = nodename, *ps = p, *pa = NULL; ps 182 drivers/of/fdt.c ps = p + 1; ps 186 drivers/of/fdt.c if (pa < ps) ps 188 drivers/of/fdt.c len = (pa - ps) + 1; ps 197 drivers/of/fdt.c memcpy(pp->value, ps, len - 1); ps 40 drivers/pci/ats.c int pci_enable_ats(struct pci_dev *dev, int ps) ps 51 drivers/pci/ats.c if (ps < PCI_ATS_MIN_STU) ps 61 drivers/pci/ats.c if (pdev->ats_stu != ps) ps 66 drivers/pci/ats.c dev->ats_stu = ps; ps 434 drivers/power/supply/axp288_fuel_gauge.c static int fuel_gauge_get_property(struct power_supply *ps, ps 438 drivers/power/supply/axp288_fuel_gauge.c struct axp288_fg_info *info = power_supply_get_drvdata(ps); ps 527 drivers/power/supply/axp288_fuel_gauge.c static int fuel_gauge_set_property(struct power_supply *ps, ps 531 drivers/power/supply/axp288_fuel_gauge.c struct axp288_fg_info *info = power_supply_get_drvdata(ps); ps 139 drivers/pwm/pwm-fsl-ftm.c unsigned int ps; ps 148 drivers/pwm/pwm-fsl-ftm.c for (ps = 0; ps < 8 ; ++ps, c >>= 1) { ps 151 drivers/pwm/pwm-fsl-ftm.c periodcfg->clk_ps = ps; ps 128 drivers/pwm/pwm-sti.c unsigned int ps; ps 145 drivers/pwm/pwm-sti.c ps = period / value - 1; ps 146 drivers/pwm/pwm-sti.c if (ps > cdata->max_prescale) ps 149 drivers/pwm/pwm-sti.c *prescale = ps; ps 52 drivers/s390/cio/css.h struct path_state ps; /* SNID path state */ ps 314 drivers/s390/cio/device_pgid.c if (pgid->inf.ps.state2 == SNID_STATE2_RESVD_ELSE) ps 347 drivers/s390/cio/device_pgid.c if (pgid->inf.ps.state1 != SNID_STATE1_GROUPED) ps 350 drivers/s390/cio/device_pgid.c if (pgid->inf.ps.state1 != SNID_STATE1_UNGROUPED) ps 354 drivers/s390/cio/device_pgid.c if (pgid->inf.ps.state3 != SNID_STATE3_MULTI_PATH) ps 357 drivers/s390/cio/device_pgid.c if (pgid->inf.ps.state3 != SNID_STATE3_SINGLE_PATH) ps 920 drivers/scsi/aic94xx/aic94xx_sds.c struct asd_ctrla_phy_settings *ps) ps 923 drivers/scsi/aic94xx/aic94xx_sds.c for (i = 0; i < ps->num_phys; i++) { ps 924 drivers/scsi/aic94xx/aic94xx_sds.c struct asd_ctrla_phy_entry *pe = &ps->phy_ent[i]; ps 969 drivers/scsi/aic94xx/aic94xx_sds.c struct asd_ctrla_phy_settings *ps; ps 988 drivers/scsi/aic94xx/aic94xx_sds.c ps = &dflt_ps; ps 1009 drivers/scsi/aic94xx/aic94xx_sds.c ps = asd_find_ll_by_id(el, 'h', 0xFF); ps 1010 drivers/scsi/aic94xx/aic94xx_sds.c if (!ps) { ps 1015 drivers/scsi/aic94xx/aic94xx_sds.c err = asd_process_ctrla_phy_settings(asd_ha, ps); ps 2276 drivers/scsi/scsi_debug.c int pf, sp, ps, md_len, bd_len, off, spf, pg_len; ps 2305 drivers/scsi/scsi_debug.c ps = !!(arr[off] & 0x80); ps 2306 drivers/scsi/scsi_debug.c if (ps) { ps 691 drivers/scsi/st.c STps = &(STp->ps[STp->partition]); ps 795 drivers/scsi/st.c STps = &(STp->ps[STp->partition]); ps 844 drivers/scsi/st.c STps = &(STp->ps[STp->partition]); ps 940 drivers/scsi/st.c STps = &(STp->ps[i]); ps 1076 drivers/scsi/st.c STps = &(STp->ps[i]); ps 1098 drivers/scsi/st.c STp->ps[0].drv_file = STp->ps[0].drv_block = (-1); ps 1305 drivers/scsi/st.c STps = &(STp->ps[i]); ps 1347 drivers/scsi/st.c struct st_partstat *STps = &(STp->ps[STp->partition]); ps 1420 drivers/scsi/st.c STps = &(STp->ps[STp->partition]); ps 1651 drivers/scsi/st.c STps = &(STp->ps[STp->partition]); ps 1914 drivers/scsi/st.c STps = &(STp->ps[STp->partition]); ps 2123 drivers/scsi/st.c STps = &(STp->ps[STp->partition]); ps 2665 drivers/scsi/st.c STps = &(STp->ps[STp->partition]); ps 2717 drivers/scsi/st.c STps = &(STp->ps[STp->partition]); ps 3150 drivers/scsi/st.c STp->ps[0].drv_block = STp->ps[0].drv_file = 0; ps 3177 drivers/scsi/st.c STps = &(STp->ps[STp->partition]); ps 3244 drivers/scsi/st.c STps = &(STp->ps[partition]); ps 3286 drivers/scsi/st.c STps = &(STp->ps[STp->new_partition]); ps 3524 drivers/scsi/st.c STps = &(STp->ps[STp->partition]); ps 3684 drivers/scsi/st.c STp->ps[i].rw = ST_IDLE; ps 3685 drivers/scsi/st.c STp->ps[i].at_sm = 0; ps 3686 drivers/scsi/st.c STp->ps[i].last_block_valid = 0; ps 3698 drivers/scsi/st.c STp->ps[0].rw = ST_IDLE; ps 4359 drivers/scsi/st.c STps = &(tpnt->ps[i]); ps 160 drivers/scsi/st.h struct st_partstat ps[ST_NBR_PARTITIONS]; ps 451 drivers/staging/comedi/drivers/ni_tio.c u64 ps = 0; ps 485 drivers/staging/comedi/drivers/ni_tio.c ret = ni_tio_clock_period_ps(counter, clk_src, &ps); ps 492 drivers/staging/comedi/drivers/ni_tio.c if (force_alt_sync || (ps && ps < min_normal_sync_period_ps)) ps 1120 drivers/staging/rtl8192e/rtl8192e/rtl_core.c switch (priv->rtllib->ps) { ps 215 drivers/staging/rtl8192e/rtl8192e/rtl_ps.c priv->rtllib->ps = rtPsMode; ps 242 drivers/staging/rtl8192e/rtl8192e/rtl_ps.c pPSC->bLeisurePs, priv->rtllib->ps, pPSC->LpsIdleCount, ps 254 drivers/staging/rtl8192e/rtl8192e/rtl_ps.c if (priv->rtllib->ps == RTLLIB_PS_DISABLED) { ps 281 drivers/staging/rtl8192e/rtl8192e/rtl_ps.c pPSC->bLeisurePs, priv->rtllib->ps); ps 284 drivers/staging/rtl8192e/rtl8192e/rtl_ps.c if (priv->rtllib->ps != RTLLIB_PS_DISABLED) { ps 181 drivers/staging/rtl8192e/rtl8192e/rtl_wx.c ieee->ps = *extra; ps 1584 drivers/staging/rtl8192e/rtllib.h short ps; ps 2744 drivers/staging/rtl8192e/rtllib_rx.c if (ieee->sta_sleep || (ieee->ps != RTLLIB_PS_DISABLED && ps 1980 drivers/staging/rtl8192e/rtllib_softmac.c if (dtim & (RTLLIB_DTIM_UCAST & ieee->ps)) ps 2055 drivers/staging/rtl8192e/rtllib_softmac.c if ((ieee->ps == RTLLIB_PS_DISABLED || ps 2060 drivers/staging/rtl8192e/rtllib_softmac.c __func__, ieee->ps, ieee->iw_mode, ieee->state); ps 2985 drivers/staging/rtl8192e/rtllib_softmac.c ieee->ps = RTLLIB_PS_DISABLED; ps 3223 drivers/staging/rtl8192e/rtllib_softmac.c if (rtllib->ps != RTLLIB_PS_DISABLED) ps 579 drivers/staging/rtl8192e/rtllib_softmac_wx.c ieee->ps = RTLLIB_PS_DISABLED; ps 593 drivers/staging/rtl8192e/rtllib_softmac_wx.c ieee->ps = RTLLIB_PS_UNICAST; ps 596 drivers/staging/rtl8192e/rtllib_softmac_wx.c ieee->ps = RTLLIB_PS_MBCAST; ps 599 drivers/staging/rtl8192e/rtllib_softmac_wx.c ieee->ps = RTLLIB_PS_UNICAST | RTLLIB_PS_MBCAST; ps 624 drivers/staging/rtl8192e/rtllib_softmac_wx.c if (ieee->ps == RTLLIB_PS_DISABLED) { ps 639 drivers/staging/rtl8192e/rtllib_softmac_wx.c if ((ieee->ps & (RTLLIB_PS_MBCAST | RTLLIB_PS_UNICAST)) == ps 642 drivers/staging/rtl8192e/rtllib_softmac_wx.c else if (ieee->ps & RTLLIB_PS_MBCAST) ps 1789 drivers/staging/rtl8192u/ieee80211/ieee80211.h short ps; ps 1656 drivers/staging/rtl8192u/ieee80211/ieee80211_softmac.c if (dtim & ((IEEE80211_DTIM_UCAST | IEEE80211_DTIM_MBCAST) & ieee->ps)) ps 1695 drivers/staging/rtl8192u/ieee80211/ieee80211_softmac.c if ((ieee->ps == IEEE80211_PS_DISABLED || ps 1892 drivers/staging/rtl8192u/ieee80211/ieee80211_softmac.c if (ieee->sta_sleep || (ieee->ps != IEEE80211_PS_DISABLED && ps 2560 drivers/staging/rtl8192u/ieee80211/ieee80211_softmac.c ieee->ps = IEEE80211_PS_DISABLED; ps 519 drivers/staging/rtl8192u/ieee80211/ieee80211_softmac_wx.c ieee->ps = IEEE80211_PS_DISABLED; ps 536 drivers/staging/rtl8192u/ieee80211/ieee80211_softmac_wx.c ieee->ps = IEEE80211_PS_UNICAST; ps 539 drivers/staging/rtl8192u/ieee80211/ieee80211_softmac_wx.c ieee->ps = IEEE80211_PS_MBCAST; ps 542 drivers/staging/rtl8192u/ieee80211/ieee80211_softmac_wx.c ieee->ps = IEEE80211_PS_UNICAST | IEEE80211_PS_MBCAST; ps 568 drivers/staging/rtl8192u/ieee80211/ieee80211_softmac_wx.c if (ieee->ps == IEEE80211_PS_DISABLED) { ps 586 drivers/staging/rtl8192u/ieee80211/ieee80211_softmac_wx.c if ((ieee->ps & (IEEE80211_PS_MBCAST | IEEE80211_PS_UNICAST)) == (IEEE80211_PS_MBCAST | IEEE80211_PS_UNICAST)) ps 588 drivers/staging/rtl8192u/ieee80211/ieee80211_softmac_wx.c else if (ieee->ps & IEEE80211_PS_MBCAST) ps 205 drivers/tty/serial/8250/8250_mid.c unsigned short ps = 16; ps 206 drivers/tty/serial/8250/8250_mid.c unsigned long fuart = baud * ps; ps 216 drivers/tty/serial/8250/8250_mid.c ps = mid->board->freq / baud; /* baud rate too high */ ps 218 drivers/tty/serial/8250/8250_mid.c ps = 1; /* PLL case */ ps 219 drivers/tty/serial/8250/8250_mid.c fuart = baud * ps; ps 226 drivers/tty/serial/8250/8250_mid.c p->uartclk = fuart * 16 / ps; /* core uses ps = 16 always */ ps 228 drivers/tty/serial/8250/8250_mid.c writel(ps, p->membase + INTEL_MID_UART_PS); /* set PS */ ps 194 drivers/tty/vt/selection.c int i, ps, pe, multiplier; ps 204 drivers/tty/vt/selection.c ps = v->ys * vc->vc_size_row + (v->xs << 1); ps 219 drivers/tty/vt/selection.c if (ps > pe) /* make sel_start <= sel_end */ ps 220 drivers/tty/vt/selection.c swap(ps, pe); ps 235 drivers/tty/vt/selection.c new_sel_start = ps; ps 239 drivers/tty/vt/selection.c spc = isspace(sel_pos(ps)); ps 240 drivers/tty/vt/selection.c for (new_sel_start = ps; ; ps -= 2) ps 242 drivers/tty/vt/selection.c if ((spc && !isspace(sel_pos(ps))) || ps 243 drivers/tty/vt/selection.c (!spc && !inword(sel_pos(ps)))) ps 245 drivers/tty/vt/selection.c new_sel_start = ps; ps 246 drivers/tty/vt/selection.c if (!(ps % vc->vc_size_row)) ps 261 drivers/tty/vt/selection.c new_sel_start = ps - ps % vc->vc_size_row; ps 90 drivers/usb/core/devio.c struct usb_dev_state *ps; ps 95 drivers/usb/core/devio.c struct usb_dev_state *ps; ps 167 drivers/usb/core/devio.c static int connected(struct usb_dev_state *ps) ps 169 drivers/usb/core/devio.c return (!list_empty(&ps->list) && ps 170 drivers/usb/core/devio.c ps->dev->state != USB_STATE_NOTATTACHED); ps 175 drivers/usb/core/devio.c struct usb_dev_state *ps = usbm->ps; ps 178 drivers/usb/core/devio.c spin_lock_irqsave(&ps->lock, flags); ps 182 drivers/usb/core/devio.c spin_unlock_irqrestore(&ps->lock, flags); ps 184 drivers/usb/core/devio.c usb_free_coherent(ps->dev, usbm->size, usbm->mem, ps 190 drivers/usb/core/devio.c spin_unlock_irqrestore(&ps->lock, flags); ps 199 drivers/usb/core/devio.c spin_lock_irqsave(&usbm->ps->lock, flags); ps 201 drivers/usb/core/devio.c spin_unlock_irqrestore(&usbm->ps->lock, flags); ps 219 drivers/usb/core/devio.c struct usb_dev_state *ps = file->private_data; ps 220 drivers/usb/core/devio.c struct usb_hcd *hcd = bus_to_hcd(ps->dev->bus); ps 237 drivers/usb/core/devio.c mem = usb_alloc_coherent(ps->dev, size, GFP_USER | __GFP_NOWARN, ps 249 drivers/usb/core/devio.c usbm->ps = ps; ps 274 drivers/usb/core/devio.c spin_lock_irqsave(&ps->lock, flags); ps 275 drivers/usb/core/devio.c list_add_tail(&usbm->memlist, &ps->memory_list); ps 276 drivers/usb/core/devio.c spin_unlock_irqrestore(&ps->lock, flags); ps 291 drivers/usb/core/devio.c struct usb_dev_state *ps = file->private_data; ps 292 drivers/usb/core/devio.c struct usb_device *dev = ps->dev; ps 300 drivers/usb/core/devio.c if (!connected(ps)) { ps 419 drivers/usb/core/devio.c struct usb_dev_state *ps = as->ps; ps 422 drivers/usb/core/devio.c spin_lock_irqsave(&ps->lock, flags); ps 423 drivers/usb/core/devio.c list_add_tail(&as->asynclist, &ps->async_pending); ps 424 drivers/usb/core/devio.c spin_unlock_irqrestore(&ps->lock, flags); ps 429 drivers/usb/core/devio.c struct usb_dev_state *ps = as->ps; ps 432 drivers/usb/core/devio.c spin_lock_irqsave(&ps->lock, flags); ps 434 drivers/usb/core/devio.c spin_unlock_irqrestore(&ps->lock, flags); ps 437 drivers/usb/core/devio.c static struct async *async_getcompleted(struct usb_dev_state *ps) ps 442 drivers/usb/core/devio.c spin_lock_irqsave(&ps->lock, flags); ps 443 drivers/usb/core/devio.c if (!list_empty(&ps->async_completed)) { ps 444 drivers/usb/core/devio.c as = list_entry(ps->async_completed.next, struct async, ps 448 drivers/usb/core/devio.c spin_unlock_irqrestore(&ps->lock, flags); ps 452 drivers/usb/core/devio.c static struct async *async_getpending(struct usb_dev_state *ps, ps 457 drivers/usb/core/devio.c list_for_each_entry(as, &ps->async_pending, asynclist) ps 562 drivers/usb/core/devio.c static void cancel_bulk_urbs(struct usb_dev_state *ps, unsigned bulk_addr) ps 563 drivers/usb/core/devio.c __releases(ps->lock) ps 564 drivers/usb/core/devio.c __acquires(ps->lock) ps 574 drivers/usb/core/devio.c list_for_each_entry(as, &ps->async_pending, asynclist) { ps 582 drivers/usb/core/devio.c ps->disabled_bulk_eps |= (1 << bulk_addr); ps 586 drivers/usb/core/devio.c list_for_each_entry(as, &ps->async_pending, asynclist) { ps 591 drivers/usb/core/devio.c spin_unlock(&ps->lock); /* Allow completions */ ps 594 drivers/usb/core/devio.c spin_lock(&ps->lock); ps 603 drivers/usb/core/devio.c struct usb_dev_state *ps = as->ps; ps 610 drivers/usb/core/devio.c spin_lock_irqsave(&ps->lock, flags); ps 611 drivers/usb/core/devio.c list_move_tail(&as->asynclist, &ps->async_completed); ps 628 drivers/usb/core/devio.c cancel_bulk_urbs(ps, as->bulk_addr); ps 630 drivers/usb/core/devio.c wake_up(&ps->wait); ps 631 drivers/usb/core/devio.c spin_unlock_irqrestore(&ps->lock, flags); ps 640 drivers/usb/core/devio.c static void destroy_async(struct usb_dev_state *ps, struct list_head *list) ps 646 drivers/usb/core/devio.c spin_lock_irqsave(&ps->lock, flags); ps 654 drivers/usb/core/devio.c spin_unlock_irqrestore(&ps->lock, flags); ps 657 drivers/usb/core/devio.c spin_lock_irqsave(&ps->lock, flags); ps 659 drivers/usb/core/devio.c spin_unlock_irqrestore(&ps->lock, flags); ps 662 drivers/usb/core/devio.c static void destroy_async_on_interface(struct usb_dev_state *ps, ps 669 drivers/usb/core/devio.c spin_lock_irqsave(&ps->lock, flags); ps 670 drivers/usb/core/devio.c list_for_each_safe(p, q, &ps->async_pending) ps 673 drivers/usb/core/devio.c spin_unlock_irqrestore(&ps->lock, flags); ps 674 drivers/usb/core/devio.c destroy_async(ps, &hitlist); ps 677 drivers/usb/core/devio.c static void destroy_all_async(struct usb_dev_state *ps) ps 679 drivers/usb/core/devio.c destroy_async(ps, &ps->async_pending); ps 696 drivers/usb/core/devio.c struct usb_dev_state *ps = usb_get_intfdata(intf); ps 699 drivers/usb/core/devio.c if (!ps) ps 706 drivers/usb/core/devio.c if (likely(ifnum < 8*sizeof(ps->ifclaimed))) ps 707 drivers/usb/core/devio.c clear_bit(ifnum, &ps->ifclaimed); ps 715 drivers/usb/core/devio.c destroy_async_on_interface(ps, ifnum); ps 737 drivers/usb/core/devio.c struct usb_dev_state *ps; ps 741 drivers/usb/core/devio.c list_for_each_entry(ps, &udev->filelist, list) { ps 742 drivers/usb/core/devio.c WRITE_ONCE(ps->not_yet_resumed, 0); ps 743 drivers/usb/core/devio.c wake_up_all(&ps->wait_for_resume); ps 757 drivers/usb/core/devio.c static int claimintf(struct usb_dev_state *ps, unsigned int ifnum) ps 759 drivers/usb/core/devio.c struct usb_device *dev = ps->dev; ps 763 drivers/usb/core/devio.c if (ifnum >= 8*sizeof(ps->ifclaimed)) ps 766 drivers/usb/core/devio.c if (test_bit(ifnum, &ps->ifclaimed)) ps 769 drivers/usb/core/devio.c if (ps->privileges_dropped && ps 770 drivers/usb/core/devio.c !test_bit(ifnum, &ps->interface_allowed_mask)) ps 782 drivers/usb/core/devio.c err = usb_driver_claim_interface(&usbfs_driver, intf, ps); ps 786 drivers/usb/core/devio.c set_bit(ifnum, &ps->ifclaimed); ps 790 drivers/usb/core/devio.c static int releaseintf(struct usb_dev_state *ps, unsigned int ifnum) ps 797 drivers/usb/core/devio.c if (ifnum >= 8*sizeof(ps->ifclaimed)) ps 799 drivers/usb/core/devio.c dev = ps->dev; ps 803 drivers/usb/core/devio.c else if (test_and_clear_bit(ifnum, &ps->ifclaimed)) { ps 816 drivers/usb/core/devio.c static int checkintf(struct usb_dev_state *ps, unsigned int ifnum) ps 818 drivers/usb/core/devio.c if (ps->dev->state != USB_STATE_CONFIGURED) ps 820 drivers/usb/core/devio.c if (ifnum >= 8*sizeof(ps->ifclaimed)) ps 822 drivers/usb/core/devio.c if (test_bit(ifnum, &ps->ifclaimed)) ps 825 drivers/usb/core/devio.c dev_warn(&ps->dev->dev, "usbfs: process %d (%s) did not claim " ps 828 drivers/usb/core/devio.c return claimintf(ps, ifnum); ps 856 drivers/usb/core/devio.c static int check_ctrlrecip(struct usb_dev_state *ps, unsigned int requesttype, ps 862 drivers/usb/core/devio.c if (ps->dev->state != USB_STATE_UNAUTHENTICATED ps 863 drivers/usb/core/devio.c && ps->dev->state != USB_STATE_ADDRESS ps 864 drivers/usb/core/devio.c && ps->dev->state != USB_STATE_CONFIGURED) ps 875 drivers/usb/core/devio.c alt_setting = usb_find_alt_setting(ps->dev->actconfig, ps 887 drivers/usb/core/devio.c ret = findintfep(ps->dev, index); ps 897 drivers/usb/core/devio.c ret = findintfep(ps->dev, index ^ 0x80); ps 899 drivers/usb/core/devio.c dev_info(&ps->dev->dev, ps 905 drivers/usb/core/devio.c ret = checkintf(ps, ret); ps 909 drivers/usb/core/devio.c ret = checkintf(ps, index); ps 924 drivers/usb/core/devio.c static int parse_usbdevfs_streams(struct usb_dev_state *ps, ps 957 drivers/usb/core/devio.c eps[i] = ep_to_host_endpoint(ps->dev, ep); ps 964 drivers/usb/core/devio.c ifnum = findintfep(ps->dev, ep); ps 971 drivers/usb/core/devio.c ret = checkintf(ps, ifnum); ps 974 drivers/usb/core/devio.c intf = usb_ifnum_to_if(ps->dev, ifnum); ps 1013 drivers/usb/core/devio.c struct usb_dev_state *ps; ps 1017 drivers/usb/core/devio.c ps = kzalloc(sizeof(struct usb_dev_state), GFP_KERNEL); ps 1018 drivers/usb/core/devio.c if (!ps) ps 1037 drivers/usb/core/devio.c ps->dev = dev; ps 1038 drivers/usb/core/devio.c ps->file = file; ps 1039 drivers/usb/core/devio.c ps->interface_allowed_mask = 0xFFFFFFFF; /* 32 bits */ ps 1040 drivers/usb/core/devio.c spin_lock_init(&ps->lock); ps 1041 drivers/usb/core/devio.c INIT_LIST_HEAD(&ps->list); ps 1042 drivers/usb/core/devio.c INIT_LIST_HEAD(&ps->async_pending); ps 1043 drivers/usb/core/devio.c INIT_LIST_HEAD(&ps->async_completed); ps 1044 drivers/usb/core/devio.c INIT_LIST_HEAD(&ps->memory_list); ps 1045 drivers/usb/core/devio.c init_waitqueue_head(&ps->wait); ps 1046 drivers/usb/core/devio.c init_waitqueue_head(&ps->wait_for_resume); ps 1047 drivers/usb/core/devio.c ps->disc_pid = get_pid(task_pid(current)); ps 1048 drivers/usb/core/devio.c ps->cred = get_current_cred(); ps 1052 drivers/usb/core/devio.c list_add_tail(&ps->list, &dev->filelist); ps 1053 drivers/usb/core/devio.c file->private_data = ps; ps 1063 drivers/usb/core/devio.c kfree(ps); ps 1069 drivers/usb/core/devio.c struct usb_dev_state *ps = file->private_data; ps 1070 drivers/usb/core/devio.c struct usb_device *dev = ps->dev; ps 1075 drivers/usb/core/devio.c usb_hub_release_all_ports(dev, ps); ps 1079 drivers/usb/core/devio.c list_del_init(&ps->list); ps 1082 drivers/usb/core/devio.c for (ifnum = 0; ps->ifclaimed && ifnum < 8*sizeof(ps->ifclaimed); ps 1084 drivers/usb/core/devio.c if (test_bit(ifnum, &ps->ifclaimed)) ps 1085 drivers/usb/core/devio.c releaseintf(ps, ifnum); ps 1087 drivers/usb/core/devio.c destroy_all_async(ps); ps 1088 drivers/usb/core/devio.c if (!ps->suspend_allowed) ps 1092 drivers/usb/core/devio.c put_pid(ps->disc_pid); ps 1093 drivers/usb/core/devio.c put_cred(ps->cred); ps 1095 drivers/usb/core/devio.c as = async_getcompleted(ps); ps 1098 drivers/usb/core/devio.c as = async_getcompleted(ps); ps 1101 drivers/usb/core/devio.c kfree(ps); ps 1105 drivers/usb/core/devio.c static int proc_control(struct usb_dev_state *ps, void __user *arg) ps 1107 drivers/usb/core/devio.c struct usb_device *dev = ps->dev; ps 1116 drivers/usb/core/devio.c ret = check_ctrlrecip(ps, ctrl.bRequestType, ctrl.bRequest, ps 1192 drivers/usb/core/devio.c static int proc_bulk(struct usb_dev_state *ps, void __user *arg) ps 1194 drivers/usb/core/devio.c struct usb_device *dev = ps->dev; ps 1203 drivers/usb/core/devio.c ret = findintfep(ps->dev, bulk.ep); ps 1206 drivers/usb/core/devio.c ret = checkintf(ps, ret); ps 1280 drivers/usb/core/devio.c static int proc_resetep(struct usb_dev_state *ps, void __user *arg) ps 1287 drivers/usb/core/devio.c ret = findintfep(ps->dev, ep); ps 1290 drivers/usb/core/devio.c ret = checkintf(ps, ret); ps 1293 drivers/usb/core/devio.c check_reset_of_active_ep(ps->dev, ep, "RESETEP"); ps 1294 drivers/usb/core/devio.c usb_reset_endpoint(ps->dev, ep); ps 1298 drivers/usb/core/devio.c static int proc_clearhalt(struct usb_dev_state *ps, void __user *arg) ps 1306 drivers/usb/core/devio.c ret = findintfep(ps->dev, ep); ps 1309 drivers/usb/core/devio.c ret = checkintf(ps, ret); ps 1312 drivers/usb/core/devio.c check_reset_of_active_ep(ps->dev, ep, "CLEAR_HALT"); ps 1314 drivers/usb/core/devio.c pipe = usb_rcvbulkpipe(ps->dev, ep & 0x7f); ps 1316 drivers/usb/core/devio.c pipe = usb_sndbulkpipe(ps->dev, ep & 0x7f); ps 1318 drivers/usb/core/devio.c return usb_clear_halt(ps->dev, pipe); ps 1321 drivers/usb/core/devio.c static int proc_getdriver(struct usb_dev_state *ps, void __user *arg) ps 1329 drivers/usb/core/devio.c intf = usb_ifnum_to_if(ps->dev, gd.interface); ps 1340 drivers/usb/core/devio.c static int proc_connectinfo(struct usb_dev_state *ps, void __user *arg) ps 1345 drivers/usb/core/devio.c ci.devnum = ps->dev->devnum; ps 1346 drivers/usb/core/devio.c ci.slow = ps->dev->speed == USB_SPEED_LOW; ps 1353 drivers/usb/core/devio.c static int proc_conninfo_ex(struct usb_dev_state *ps, ps 1357 drivers/usb/core/devio.c struct usb_device *udev = ps->dev; ps 1386 drivers/usb/core/devio.c static int proc_resetdevice(struct usb_dev_state *ps) ps 1388 drivers/usb/core/devio.c struct usb_host_config *actconfig = ps->dev->actconfig; ps 1396 drivers/usb/core/devio.c if (ps->privileges_dropped && actconfig) { ps 1401 drivers/usb/core/devio.c !test_bit(number, &ps->ifclaimed)) { ps 1402 drivers/usb/core/devio.c dev_warn(&ps->dev->dev, ps 1410 drivers/usb/core/devio.c return usb_reset_device(ps->dev); ps 1413 drivers/usb/core/devio.c static int proc_setintf(struct usb_dev_state *ps, void __user *arg) ps 1420 drivers/usb/core/devio.c ret = checkintf(ps, setintf.interface); ps 1424 drivers/usb/core/devio.c destroy_async_on_interface(ps, setintf.interface); ps 1426 drivers/usb/core/devio.c return usb_set_interface(ps->dev, setintf.interface, ps 1430 drivers/usb/core/devio.c static int proc_setconfig(struct usb_dev_state *ps, void __user *arg) ps 1439 drivers/usb/core/devio.c actconfig = ps->dev->actconfig; ps 1450 drivers/usb/core/devio.c dev_warn(&ps->dev->dev, ps 1470 drivers/usb/core/devio.c status = usb_reset_configuration(ps->dev); ps 1472 drivers/usb/core/devio.c status = usb_set_configuration(ps->dev, u); ps 1479 drivers/usb/core/devio.c find_memory_area(struct usb_dev_state *ps, const struct usbdevfs_urb *uurb) ps 1485 drivers/usb/core/devio.c spin_lock_irqsave(&ps->lock, flags); ps 1486 drivers/usb/core/devio.c list_for_each_entry(iter, &ps->memory_list, memlist) { ps 1499 drivers/usb/core/devio.c spin_unlock_irqrestore(&ps->lock, flags); ps 1503 drivers/usb/core/devio.c static int proc_do_submiturb(struct usb_dev_state *ps, struct usbdevfs_urb *uurb, ps 1537 drivers/usb/core/devio.c ifnum = findintfep(ps->dev, uurb->endpoint); ps 1540 drivers/usb/core/devio.c ret = checkintf(ps, ifnum); ps 1544 drivers/usb/core/devio.c ep = ep_to_host_endpoint(ps->dev, uurb->endpoint); ps 1568 drivers/usb/core/devio.c ret = check_ctrlrecip(ps, dr->bRequestType, dr->bRequest, ps 1583 drivers/usb/core/devio.c snoop(&ps->dev->dev, "control urb: bRequestType=%02x " ps 1608 drivers/usb/core/devio.c if (num_sgs == 1 || num_sgs > ps->dev->bus->sg_tablesize) ps 1670 drivers/usb/core/devio.c as->usbm = find_memory_area(ps, uurb); ps 1753 drivers/usb/core/devio.c as->urb->dev = ps->dev; ps 1755 drivers/usb/core/devio.c __create_pipe(ps->dev, uurb->endpoint & 0xf) | ps 1774 drivers/usb/core/devio.c dev_warn(&ps->dev->dev, "Requested nonsensical USBDEVFS_URB_SHORT_NOT_OK.\n"); ps 1776 drivers/usb/core/devio.c dev_warn(&ps->dev->dev, "Requested nonsensical USBDEVFS_URB_ZERO_PACKET.\n"); ps 1787 drivers/usb/core/devio.c ps->dev->speed == USB_SPEED_HIGH || ps 1788 drivers/usb/core/devio.c ps->dev->speed >= USB_SPEED_SUPER) ps 1804 drivers/usb/core/devio.c as->ps = ps; ps 1819 drivers/usb/core/devio.c snoop_urb(ps->dev, as->userurb, as->urb->pipe, ps 1828 drivers/usb/core/devio.c spin_lock_irq(&ps->lock); ps 1844 drivers/usb/core/devio.c ps->disabled_bulk_eps &= ~(1 << as->bulk_addr); ps 1849 drivers/usb/core/devio.c if (ps->disabled_bulk_eps & (1 << as->bulk_addr)) ps 1853 drivers/usb/core/devio.c spin_unlock_irq(&ps->lock); ps 1859 drivers/usb/core/devio.c dev_printk(KERN_DEBUG, &ps->dev->dev, ps 1861 drivers/usb/core/devio.c snoop_urb(ps->dev, as->userurb, as->urb->pipe, ps 1876 drivers/usb/core/devio.c static int proc_submiturb(struct usb_dev_state *ps, void __user *arg) ps 1887 drivers/usb/core/devio.c return proc_do_submiturb(ps, &uurb, ps 1892 drivers/usb/core/devio.c static int proc_unlinkurb(struct usb_dev_state *ps, void __user *arg) ps 1898 drivers/usb/core/devio.c spin_lock_irqsave(&ps->lock, flags); ps 1899 drivers/usb/core/devio.c as = async_getpending(ps, arg); ps 1901 drivers/usb/core/devio.c spin_unlock_irqrestore(&ps->lock, flags); ps 1907 drivers/usb/core/devio.c spin_unlock_irqrestore(&ps->lock, flags); ps 1965 drivers/usb/core/devio.c static struct async *reap_as(struct usb_dev_state *ps) ps 1969 drivers/usb/core/devio.c struct usb_device *dev = ps->dev; ps 1971 drivers/usb/core/devio.c add_wait_queue(&ps->wait, &wait); ps 1974 drivers/usb/core/devio.c as = async_getcompleted(ps); ps 1975 drivers/usb/core/devio.c if (as || !connected(ps)) ps 1983 drivers/usb/core/devio.c remove_wait_queue(&ps->wait, &wait); ps 1988 drivers/usb/core/devio.c static int proc_reapurb(struct usb_dev_state *ps, void __user *arg) ps 1990 drivers/usb/core/devio.c struct async *as = reap_as(ps); ps 1995 drivers/usb/core/devio.c snoop(&ps->dev->dev, "reap %pK\n", as->userurb); ps 2005 drivers/usb/core/devio.c static int proc_reapurbnonblock(struct usb_dev_state *ps, void __user *arg) ps 2010 drivers/usb/core/devio.c as = async_getcompleted(ps); ps 2012 drivers/usb/core/devio.c snoop(&ps->dev->dev, "reap %pK\n", as->userurb); ps 2016 drivers/usb/core/devio.c retval = (connected(ps) ? -EAGAIN : -ENODEV); ps 2022 drivers/usb/core/devio.c static int proc_control_compat(struct usb_dev_state *ps, ps 2032 drivers/usb/core/devio.c return proc_control(ps, p); ps 2035 drivers/usb/core/devio.c static int proc_bulk_compat(struct usb_dev_state *ps, ps 2050 drivers/usb/core/devio.c return proc_bulk(ps, p); ps 2052 drivers/usb/core/devio.c static int proc_disconnectsignal_compat(struct usb_dev_state *ps, void __user *arg) ps 2058 drivers/usb/core/devio.c ps->discsignr = ds.signr; ps 2059 drivers/usb/core/devio.c ps->disccontext.sival_int = ds.context; ps 2084 drivers/usb/core/devio.c static int proc_submiturb_compat(struct usb_dev_state *ps, void __user *arg) ps 2095 drivers/usb/core/devio.c return proc_do_submiturb(ps, &uurb, ps 2135 drivers/usb/core/devio.c static int proc_reapurb_compat(struct usb_dev_state *ps, void __user *arg) ps 2137 drivers/usb/core/devio.c struct async *as = reap_as(ps); ps 2142 drivers/usb/core/devio.c snoop(&ps->dev->dev, "reap %pK\n", as->userurb); ps 2152 drivers/usb/core/devio.c static int proc_reapurbnonblock_compat(struct usb_dev_state *ps, void __user *arg) ps 2157 drivers/usb/core/devio.c as = async_getcompleted(ps); ps 2159 drivers/usb/core/devio.c snoop(&ps->dev->dev, "reap %pK\n", as->userurb); ps 2163 drivers/usb/core/devio.c retval = (connected(ps) ? -EAGAIN : -ENODEV); ps 2171 drivers/usb/core/devio.c static int proc_disconnectsignal(struct usb_dev_state *ps, void __user *arg) ps 2177 drivers/usb/core/devio.c ps->discsignr = ds.signr; ps 2178 drivers/usb/core/devio.c ps->disccontext.sival_ptr = ds.context; ps 2182 drivers/usb/core/devio.c static int proc_claiminterface(struct usb_dev_state *ps, void __user *arg) ps 2188 drivers/usb/core/devio.c return claimintf(ps, ifnum); ps 2191 drivers/usb/core/devio.c static int proc_releaseinterface(struct usb_dev_state *ps, void __user *arg) ps 2198 drivers/usb/core/devio.c ret = releaseintf(ps, ifnum); ps 2201 drivers/usb/core/devio.c destroy_async_on_interface(ps, ifnum); ps 2205 drivers/usb/core/devio.c static int proc_ioctl(struct usb_dev_state *ps, struct usbdevfs_ioctl *ctl) ps 2213 drivers/usb/core/devio.c if (ps->privileges_dropped) ps 2216 drivers/usb/core/devio.c if (!connected(ps)) ps 2235 drivers/usb/core/devio.c if (ps->dev->state != USB_STATE_CONFIGURED) ps 2237 drivers/usb/core/devio.c else if (!(intf = usb_ifnum_to_if(ps->dev, ctl->ifno))) ps 2283 drivers/usb/core/devio.c static int proc_ioctl_default(struct usb_dev_state *ps, void __user *arg) ps 2289 drivers/usb/core/devio.c return proc_ioctl(ps, &ctrl); ps 2293 drivers/usb/core/devio.c static int proc_ioctl_compat(struct usb_dev_state *ps, compat_uptr_t arg) ps 2303 drivers/usb/core/devio.c return proc_ioctl(ps, &ctrl); ps 2307 drivers/usb/core/devio.c static int proc_claim_port(struct usb_dev_state *ps, void __user *arg) ps 2314 drivers/usb/core/devio.c rc = usb_hub_claim_port(ps->dev, portnum, ps); ps 2316 drivers/usb/core/devio.c snoop(&ps->dev->dev, "port %d claimed by process %d: %s\n", ps 2321 drivers/usb/core/devio.c static int proc_release_port(struct usb_dev_state *ps, void __user *arg) ps 2327 drivers/usb/core/devio.c return usb_hub_release_port(ps->dev, portnum, ps); ps 2330 drivers/usb/core/devio.c static int proc_get_capabilities(struct usb_dev_state *ps, void __user *arg) ps 2338 drivers/usb/core/devio.c if (!ps->dev->bus->no_stop_on_short) ps 2340 drivers/usb/core/devio.c if (ps->dev->bus->sg_tablesize) ps 2349 drivers/usb/core/devio.c static int proc_disconnect_claim(struct usb_dev_state *ps, void __user *arg) ps 2357 drivers/usb/core/devio.c intf = usb_ifnum_to_if(ps->dev, dc.interface); ps 2364 drivers/usb/core/devio.c if (ps->privileges_dropped) ps 2381 drivers/usb/core/devio.c return claimintf(ps, dc.interface); ps 2384 drivers/usb/core/devio.c static int proc_alloc_streams(struct usb_dev_state *ps, void __user *arg) ps 2391 drivers/usb/core/devio.c r = parse_usbdevfs_streams(ps, arg, &num_streams, &num_eps, ps 2396 drivers/usb/core/devio.c destroy_async_on_interface(ps, ps 2404 drivers/usb/core/devio.c static int proc_free_streams(struct usb_dev_state *ps, void __user *arg) ps 2411 drivers/usb/core/devio.c r = parse_usbdevfs_streams(ps, arg, NULL, &num_eps, &eps, &intf); ps 2415 drivers/usb/core/devio.c destroy_async_on_interface(ps, ps 2423 drivers/usb/core/devio.c static int proc_drop_privileges(struct usb_dev_state *ps, void __user *arg) ps 2434 drivers/usb/core/devio.c ps->interface_allowed_mask &= data; ps 2435 drivers/usb/core/devio.c ps->privileges_dropped = true; ps 2440 drivers/usb/core/devio.c static int proc_forbid_suspend(struct usb_dev_state *ps) ps 2444 drivers/usb/core/devio.c if (ps->suspend_allowed) { ps 2445 drivers/usb/core/devio.c ret = usb_autoresume_device(ps->dev); ps 2447 drivers/usb/core/devio.c ps->suspend_allowed = false; ps 2454 drivers/usb/core/devio.c static int proc_allow_suspend(struct usb_dev_state *ps) ps 2456 drivers/usb/core/devio.c if (!connected(ps)) ps 2459 drivers/usb/core/devio.c WRITE_ONCE(ps->not_yet_resumed, 1); ps 2460 drivers/usb/core/devio.c if (!ps->suspend_allowed) { ps 2461 drivers/usb/core/devio.c usb_autosuspend_device(ps->dev); ps 2462 drivers/usb/core/devio.c ps->suspend_allowed = true; ps 2467 drivers/usb/core/devio.c static int proc_wait_for_resume(struct usb_dev_state *ps) ps 2471 drivers/usb/core/devio.c usb_unlock_device(ps->dev); ps 2472 drivers/usb/core/devio.c ret = wait_event_interruptible(ps->wait_for_resume, ps 2473 drivers/usb/core/devio.c READ_ONCE(ps->not_yet_resumed) == 0); ps 2474 drivers/usb/core/devio.c usb_lock_device(ps->dev); ps 2478 drivers/usb/core/devio.c return proc_forbid_suspend(ps); ps 2489 drivers/usb/core/devio.c struct usb_dev_state *ps = file->private_data; ps 2491 drivers/usb/core/devio.c struct usb_device *dev = ps->dev; ps 2503 drivers/usb/core/devio.c ret = proc_reapurb(ps, p); ps 2508 drivers/usb/core/devio.c ret = proc_reapurbnonblock(ps, p); ps 2514 drivers/usb/core/devio.c ret = proc_reapurb_compat(ps, p); ps 2519 drivers/usb/core/devio.c ret = proc_reapurbnonblock_compat(ps, p); ps 2524 drivers/usb/core/devio.c if (!connected(ps)) { ps 2532 drivers/usb/core/devio.c ret = proc_control(ps, p); ps 2539 drivers/usb/core/devio.c ret = proc_bulk(ps, p); ps 2546 drivers/usb/core/devio.c ret = proc_resetep(ps, p); ps 2553 drivers/usb/core/devio.c ret = proc_resetdevice(ps); ps 2558 drivers/usb/core/devio.c ret = proc_clearhalt(ps, p); ps 2565 drivers/usb/core/devio.c ret = proc_getdriver(ps, p); ps 2570 drivers/usb/core/devio.c ret = proc_connectinfo(ps, p); ps 2575 drivers/usb/core/devio.c ret = proc_setintf(ps, p); ps 2580 drivers/usb/core/devio.c ret = proc_setconfig(ps, p); ps 2585 drivers/usb/core/devio.c ret = proc_submiturb(ps, p); ps 2593 drivers/usb/core/devio.c ret = proc_control_compat(ps, p); ps 2600 drivers/usb/core/devio.c ret = proc_bulk_compat(ps, p); ps 2607 drivers/usb/core/devio.c ret = proc_disconnectsignal_compat(ps, p); ps 2612 drivers/usb/core/devio.c ret = proc_submiturb_compat(ps, p); ps 2619 drivers/usb/core/devio.c ret = proc_ioctl_compat(ps, ptr_to_compat(p)); ps 2625 drivers/usb/core/devio.c ret = proc_unlinkurb(ps, p); ps 2630 drivers/usb/core/devio.c ret = proc_disconnectsignal(ps, p); ps 2635 drivers/usb/core/devio.c ret = proc_claiminterface(ps, p); ps 2640 drivers/usb/core/devio.c ret = proc_releaseinterface(ps, p); ps 2645 drivers/usb/core/devio.c ret = proc_ioctl_default(ps, p); ps 2650 drivers/usb/core/devio.c ret = proc_claim_port(ps, p); ps 2655 drivers/usb/core/devio.c ret = proc_release_port(ps, p); ps 2658 drivers/usb/core/devio.c ret = proc_get_capabilities(ps, p); ps 2661 drivers/usb/core/devio.c ret = proc_disconnect_claim(ps, p); ps 2664 drivers/usb/core/devio.c ret = proc_alloc_streams(ps, p); ps 2667 drivers/usb/core/devio.c ret = proc_free_streams(ps, p); ps 2670 drivers/usb/core/devio.c ret = proc_drop_privileges(ps, p); ps 2673 drivers/usb/core/devio.c ret = ps->dev->speed; ps 2676 drivers/usb/core/devio.c ret = proc_forbid_suspend(ps); ps 2679 drivers/usb/core/devio.c ret = proc_allow_suspend(ps); ps 2682 drivers/usb/core/devio.c ret = proc_wait_for_resume(ps); ps 2689 drivers/usb/core/devio.c ret = proc_conninfo_ex(ps, p, _IOC_SIZE(cmd)); ps 2726 drivers/usb/core/devio.c struct usb_dev_state *ps = file->private_data; ps 2729 drivers/usb/core/devio.c poll_wait(file, &ps->wait, wait); ps 2730 drivers/usb/core/devio.c if (file->f_mode & FMODE_WRITE && !list_empty(&ps->async_completed)) ps 2732 drivers/usb/core/devio.c if (!connected(ps)) ps 2734 drivers/usb/core/devio.c if (list_empty(&ps->list)) ps 2755 drivers/usb/core/devio.c struct usb_dev_state *ps; ps 2760 drivers/usb/core/devio.c ps = list_entry(udev->filelist.next, struct usb_dev_state, list); ps 2761 drivers/usb/core/devio.c destroy_all_async(ps); ps 2762 drivers/usb/core/devio.c wake_up_all(&ps->wait); ps 2763 drivers/usb/core/devio.c WRITE_ONCE(ps->not_yet_resumed, 0); ps 2764 drivers/usb/core/devio.c wake_up_all(&ps->wait_for_resume); ps 2765 drivers/usb/core/devio.c list_del_init(&ps->list); ps 2766 drivers/usb/core/devio.c if (ps->discsignr) ps 2767 drivers/usb/core/devio.c kill_pid_usb_asyncio(ps->discsignr, EPIPE, ps->disccontext, ps 2768 drivers/usb/core/devio.c ps->disc_pid, ps->cred); ps 514 drivers/usb/host/ehci-dbg.c struct ehci_per_sched *ps; ps 576 drivers/usb/host/ehci-dbg.c list_for_each_entry(ps, &tt->ps_list, ps_list) { ps 579 drivers/usb/host/ehci-dbg.c dev_name(&ps->udev->dev), ps 580 drivers/usb/host/ehci-dbg.c ps->ep->desc.bEndpointAddress, ps 581 drivers/usb/host/ehci-dbg.c ps->tt_usecs, ps 582 drivers/usb/host/ehci-dbg.c ps->bw_phase, ps->phase_uf, ps 583 drivers/usb/host/ehci-dbg.c ps->bw_period, ps->cs_mask); ps 616 drivers/usb/host/ehci-dbg.c (scratch >> 8) & 0x000f, type, qh->ps.usecs, ps 617 drivers/usb/host/ehci-dbg.c qh->ps.c_usecs, temp, 0x7ff & (scratch >> 16)); ps 668 drivers/usb/host/ehci-dbg.c p.qh->ps.period, ps 717 drivers/usb/host/ehci-dbg.c p.sitd->stream->ps.period, ps 997 drivers/usb/host/ehci-hcd.c if (qh->ps.bw_uperiod) ps 1046 drivers/usb/host/ehci-hcd.c usb_settoggle(qh->ps.udev, epnum, is_out, 0); ps 99 drivers/usb/host/ehci-q.c if (unlikely(!usb_gettoggle(qh->ps.udev, epnum, is_out))) { ps 101 drivers/usb/host/ehci-q.c usb_settoggle(qh->ps.udev, epnum, is_out, 1); ps 808 drivers/usb/host/ehci-q.c qh->ps.usecs = NS_TO_US(usb_calc_bus_time(USB_SPEED_HIGH, ps 810 drivers/usb/host/ehci-q.c qh->ps.phase = NO_FRAME; ps 813 drivers/usb/host/ehci-q.c qh->ps.c_usecs = 0; ps 825 drivers/usb/host/ehci-q.c qh->ps.period = urb->interval >> 3; ps 832 drivers/usb/host/ehci-q.c qh->ps.bw_uperiod = min_t(unsigned, tmp, urb->interval); ps 833 drivers/usb/host/ehci-q.c qh->ps.bw_period = qh->ps.bw_uperiod >> 3; ps 843 drivers/usb/host/ehci-q.c qh->ps.c_usecs = qh->ps.usecs + HS_USECS(0); ps 844 drivers/usb/host/ehci-q.c qh->ps.usecs = HS_USECS(1); ps 846 drivers/usb/host/ehci-q.c qh->ps.usecs += HS_USECS(1); ps 847 drivers/usb/host/ehci-q.c qh->ps.c_usecs = HS_USECS(0); ps 851 drivers/usb/host/ehci-q.c qh->ps.tt_usecs = NS_TO_US(think_time + ps 856 drivers/usb/host/ehci-q.c qh->ps.period = urb->interval; ps 864 drivers/usb/host/ehci-q.c qh->ps.bw_period = min_t(unsigned, tmp, urb->interval); ps 865 drivers/usb/host/ehci-q.c qh->ps.bw_uperiod = qh->ps.bw_period << 3; ps 870 drivers/usb/host/ehci-q.c qh->ps.udev = urb->dev; ps 871 drivers/usb/host/ehci-q.c qh->ps.ep = urb->ep; ps 195 drivers/usb/host/ehci-sched.c struct ehci_per_sched *ps) ps 197 drivers/usb/host/ehci-sched.c dev_dbg(&ps->udev->dev, ps 199 drivers/usb/host/ehci-sched.c ps->ep->desc.bEndpointAddress, ps 201 drivers/usb/host/ehci-sched.c (ps->bw_phase << 3) + ps->phase_uf, ps->bw_uperiod, ps 202 drivers/usb/host/ehci-sched.c ps->phase, ps->phase_uf, ps->period, ps 203 drivers/usb/host/ehci-sched.c ps->usecs, ps->c_usecs, ps->cs_mask); ps 211 drivers/usb/host/ehci-sched.c int usecs = qh->ps.usecs; ps 212 drivers/usb/host/ehci-sched.c int c_usecs = qh->ps.c_usecs; ps 213 drivers/usb/host/ehci-sched.c int tt_usecs = qh->ps.tt_usecs; ps 216 drivers/usb/host/ehci-sched.c if (qh->ps.phase == NO_FRAME) /* Bandwidth wasn't reserved */ ps 218 drivers/usb/host/ehci-sched.c start_uf = qh->ps.bw_phase << 3; ps 220 drivers/usb/host/ehci-sched.c bandwidth_dbg(ehci, sign, "intr", &qh->ps); ps 229 drivers/usb/host/ehci-sched.c for (i = start_uf + qh->ps.phase_uf; i < EHCI_BANDWIDTH_SIZE; ps 230 drivers/usb/host/ehci-sched.c i += qh->ps.bw_uperiod) ps 234 drivers/usb/host/ehci-sched.c if (qh->ps.c_usecs) { ps 237 drivers/usb/host/ehci-sched.c i += qh->ps.bw_uperiod) { ps 239 drivers/usb/host/ehci-sched.c if (qh->ps.cs_mask & m) ps 247 drivers/usb/host/ehci-sched.c tt = find_tt(qh->ps.udev); ps 249 drivers/usb/host/ehci-sched.c list_add_tail(&qh->ps.ps_list, &tt->ps_list); ps 251 drivers/usb/host/ehci-sched.c list_del(&qh->ps.ps_list); ps 254 drivers/usb/host/ehci-sched.c i += qh->ps.bw_period) ps 264 drivers/usb/host/ehci-sched.c struct ehci_per_sched *ps; ps 273 drivers/usb/host/ehci-sched.c list_for_each_entry(ps, &tt->ps_list, ps_list) { ps 274 drivers/usb/host/ehci-sched.c for (uframe = ps->bw_phase << 3; uframe < EHCI_BANDWIDTH_SIZE; ps 275 drivers/usb/host/ehci-sched.c uframe += ps->bw_uperiod) { ps 277 drivers/usb/host/ehci-sched.c x = ps->tt_usecs; ps 280 drivers/usb/host/ehci-sched.c for (uf = ps->phase_uf; uf < 8; ++uf) { ps 369 drivers/usb/host/ehci-sched.c struct ehci_per_sched *ps, ps 375 drivers/usb/host/ehci-sched.c unsigned period = ps->bw_period; ps 376 drivers/usb/host/ehci-sched.c unsigned usecs = ps->tt_usecs; ps 457 drivers/usb/host/ehci-sched.c if (same_tt(dev, here.qh->ps.udev)) { ps 537 drivers/usb/host/ehci-sched.c unsigned period = qh->ps.period; ps 539 drivers/usb/host/ehci-sched.c dev_dbg(&qh->ps.udev->dev, ps 543 drivers/usb/host/ehci-sched.c qh, qh->ps.phase, qh->ps.usecs, qh->ps.c_usecs); ps 549 drivers/usb/host/ehci-sched.c for (i = qh->ps.phase; i < ehci->periodic_size; i += period) { ps 569 drivers/usb/host/ehci-sched.c if (qh->ps.period > here.qh->ps.period) ps 590 drivers/usb/host/ehci-sched.c ehci_to_hcd(ehci)->self.bandwidth_allocated += qh->ps.bw_period ps 591 drivers/usb/host/ehci-sched.c ? ((qh->ps.usecs + qh->ps.c_usecs) / qh->ps.bw_period) ps 592 drivers/usb/host/ehci-sched.c : (qh->ps.usecs * 8); ps 622 drivers/usb/host/ehci-sched.c period = qh->ps.period ? : 1; ps 624 drivers/usb/host/ehci-sched.c for (i = qh->ps.phase; i < ehci->periodic_size; i += period) ps 628 drivers/usb/host/ehci-sched.c ehci_to_hcd(ehci)->self.bandwidth_allocated -= qh->ps.bw_period ps 629 drivers/usb/host/ehci-sched.c ? ((qh->ps.usecs + qh->ps.c_usecs) / qh->ps.bw_period) ps 630 drivers/usb/host/ehci-sched.c : (qh->ps.usecs * 8); ps 632 drivers/usb/host/ehci-sched.c dev_dbg(&qh->ps.udev->dev, ps 634 drivers/usb/host/ehci-sched.c qh->ps.period, ps 636 drivers/usb/host/ehci-sched.c qh, qh->ps.phase, qh->ps.usecs, qh->ps.c_usecs); ps 793 drivers/usb/host/ehci-sched.c if (qh->ps.c_usecs && uframe >= 6) /* FSTN territory? */ ps 796 drivers/usb/host/ehci-sched.c if (!check_period(ehci, frame, uframe, qh->ps.bw_uperiod, qh->ps.usecs)) ps 798 drivers/usb/host/ehci-sched.c if (!qh->ps.c_usecs) { ps 805 drivers/usb/host/ehci-sched.c if (tt_available(ehci, &qh->ps, tt, frame, uframe)) { ps 811 drivers/usb/host/ehci-sched.c qh->ps.bw_uperiod, qh->ps.c_usecs)) ps 832 drivers/usb/host/ehci-sched.c if (tt_no_collision(ehci, qh->ps.bw_period, qh->ps.udev, frame, mask)) { ps 834 drivers/usb/host/ehci-sched.c qh->ps.bw_uperiod, qh->ps.c_usecs)) ps 837 drivers/usb/host/ehci-sched.c qh->ps.bw_uperiod, qh->ps.c_usecs)) ps 860 drivers/usb/host/ehci-sched.c if (qh->ps.phase != NO_FRAME) { ps 867 drivers/usb/host/ehci-sched.c tt = find_tt(qh->ps.udev); ps 878 drivers/usb/host/ehci-sched.c if (qh->ps.bw_period) { ps 882 drivers/usb/host/ehci-sched.c for (i = qh->ps.bw_period; i > 0; --i) { ps 883 drivers/usb/host/ehci-sched.c frame = ++ehci->random_frame & (qh->ps.bw_period - 1); ps 900 drivers/usb/host/ehci-sched.c qh->ps.phase = (qh->ps.period ? ehci->random_frame & ps 901 drivers/usb/host/ehci-sched.c (qh->ps.period - 1) : 0); ps 902 drivers/usb/host/ehci-sched.c qh->ps.bw_phase = qh->ps.phase & (qh->ps.bw_period - 1); ps 903 drivers/usb/host/ehci-sched.c qh->ps.phase_uf = uframe; ps 904 drivers/usb/host/ehci-sched.c qh->ps.cs_mask = qh->ps.period ? ps 910 drivers/usb/host/ehci-sched.c hw->hw_info2 |= cpu_to_hc32(ehci, qh->ps.cs_mask); ps 1024 drivers/usb/host/ehci-sched.c stream->ps.phase = NO_FRAME; ps 1069 drivers/usb/host/ehci-sched.c stream->ps.usecs = HS_USECS_ISO(maxp); ps 1076 drivers/usb/host/ehci-sched.c stream->ps.bw_uperiod = min_t(unsigned, tmp, urb->interval); ps 1079 drivers/usb/host/ehci-sched.c stream->ps.period = urb->interval >> 3; ps 1080 drivers/usb/host/ehci-sched.c stream->bandwidth = stream->ps.usecs * 8 / ps 1081 drivers/usb/host/ehci-sched.c stream->ps.bw_uperiod; ps 1095 drivers/usb/host/ehci-sched.c stream->ps.usecs = HS_USECS_ISO(maxp); ps 1097 drivers/usb/host/ehci-sched.c stream->ps.tt_usecs = NS_TO_US(think_time + usb_calc_bus_time( ps 1104 drivers/usb/host/ehci-sched.c stream->ps.c_usecs = stream->ps.usecs; ps 1105 drivers/usb/host/ehci-sched.c stream->ps.usecs = HS_USECS_ISO(1); ps 1106 drivers/usb/host/ehci-sched.c stream->ps.cs_mask = 1; ps 1110 drivers/usb/host/ehci-sched.c stream->ps.cs_mask |= tmp << (8 + 2); ps 1112 drivers/usb/host/ehci-sched.c stream->ps.cs_mask = smask_out[hs_transfers - 1]; ps 1119 drivers/usb/host/ehci-sched.c stream->ps.bw_period = min_t(unsigned, tmp, urb->interval); ps 1120 drivers/usb/host/ehci-sched.c stream->ps.bw_uperiod = stream->ps.bw_period << 3; ps 1122 drivers/usb/host/ehci-sched.c stream->ps.period = urb->interval; ps 1124 drivers/usb/host/ehci-sched.c stream->bandwidth = (stream->ps.usecs + stream->ps.c_usecs) / ps 1125 drivers/usb/host/ehci-sched.c stream->ps.bw_period; ps 1131 drivers/usb/host/ehci-sched.c stream->ps.udev = dev; ps 1132 drivers/usb/host/ehci-sched.c stream->ps.ep = urb->ep; ps 1322 drivers/usb/host/ehci-sched.c int usecs = stream->ps.usecs; ps 1323 drivers/usb/host/ehci-sched.c int c_usecs = stream->ps.c_usecs; ps 1324 drivers/usb/host/ehci-sched.c int tt_usecs = stream->ps.tt_usecs; ps 1327 drivers/usb/host/ehci-sched.c if (stream->ps.phase == NO_FRAME) /* Bandwidth wasn't reserved */ ps 1329 drivers/usb/host/ehci-sched.c uframe = stream->ps.bw_phase << 3; ps 1331 drivers/usb/host/ehci-sched.c bandwidth_dbg(ehci, sign, "iso", &stream->ps); ps 1340 drivers/usb/host/ehci-sched.c for (i = uframe + stream->ps.phase_uf; i < EHCI_BANDWIDTH_SIZE; ps 1341 drivers/usb/host/ehci-sched.c i += stream->ps.bw_uperiod) ps 1345 drivers/usb/host/ehci-sched.c s_mask = stream->ps.cs_mask; ps 1350 drivers/usb/host/ehci-sched.c i += stream->ps.bw_uperiod) { ps 1351 drivers/usb/host/ehci-sched.c for ((j = stream->ps.phase_uf, m = 1 << j); j < 8; ps 1360 drivers/usb/host/ehci-sched.c tt = find_tt(stream->ps.udev); ps 1362 drivers/usb/host/ehci-sched.c list_add_tail(&stream->ps.ps_list, &tt->ps_list); ps 1364 drivers/usb/host/ehci-sched.c list_del(&stream->ps.ps_list); ps 1367 drivers/usb/host/ehci-sched.c i += stream->ps.bw_period) ps 1382 drivers/usb/host/ehci-sched.c usecs = ehci->uframe_periodic_max - stream->ps.usecs; ps 1384 drivers/usb/host/ehci-sched.c for (uframe &= stream->ps.bw_uperiod - 1; uframe < EHCI_BANDWIDTH_SIZE; ps 1385 drivers/usb/host/ehci-sched.c uframe += stream->ps.bw_uperiod) { ps 1404 drivers/usb/host/ehci-sched.c mask = stream->ps.cs_mask << (uframe & 7); ps 1407 drivers/usb/host/ehci-sched.c if (((stream->ps.cs_mask & 0xff) << (uframe & 7)) >= (1 << 7)) ps 1415 drivers/usb/host/ehci-sched.c uframe &= stream->ps.bw_uperiod - 1; ps 1423 drivers/usb/host/ehci-sched.c if (!tt_available(ehci, &stream->ps, tt, frame, uf)) ps 1429 drivers/usb/host/ehci-sched.c if (!tt_no_collision(ehci, stream->ps.bw_period, ps 1430 drivers/usb/host/ehci-sched.c stream->ps.udev, frame, mask)) ps 1440 drivers/usb/host/ehci-sched.c max_used = ehci->uframe_periodic_max - stream->ps.usecs; ps 1441 drivers/usb/host/ehci-sched.c for (tmp = stream->ps.cs_mask & 0xff; tmp; tmp >>= 1, uf++) { ps 1447 drivers/usb/host/ehci-sched.c if (stream->ps.c_usecs) { ps 1449 drivers/usb/host/ehci-sched.c stream->ps.c_usecs; ps 1453 drivers/usb/host/ehci-sched.c if ((stream->ps.cs_mask & tmp) == 0) ps 1460 drivers/usb/host/ehci-sched.c uframe += stream->ps.bw_uperiod; ps 1463 drivers/usb/host/ehci-sched.c stream->ps.cs_mask <<= uframe & 7; ps 1464 drivers/usb/host/ehci-sched.c stream->splits = cpu_to_hc32(ehci, stream->ps.cs_mask); ps 1504 drivers/usb/host/ehci-sched.c if (stream->ps.phase == NO_FRAME) { ps 1506 drivers/usb/host/ehci-sched.c struct ehci_tt *tt = find_tt(stream->ps.udev); ps 1544 drivers/usb/host/ehci-sched.c stream->ps.phase = (start >> 3) & ps 1545 drivers/usb/host/ehci-sched.c (stream->ps.period - 1); ps 1546 drivers/usb/host/ehci-sched.c stream->ps.bw_phase = stream->ps.phase & ps 1547 drivers/usb/host/ehci-sched.c (stream->ps.bw_period - 1); ps 1548 drivers/usb/host/ehci-sched.c stream->ps.phase_uf = start & 7; ps 1554 drivers/usb/host/ehci-sched.c start = (stream->ps.phase << 3) + stream->ps.phase_uf; ps 2003 drivers/usb/host/ehci-sched.c iso_sched->span = urb->number_of_packets * stream->ps.period; ps 2315 drivers/usb/host/ehci-sched.c if (urb->interval != stream->ps.period) { ps 2317 drivers/usb/host/ehci-sched.c stream->ps.period, urb->interval); ps 413 drivers/usb/host/ehci.h struct ehci_per_sched ps; /* scheduling info */ ps 480 drivers/usb/host/ehci.h struct ehci_per_sched ps; /* scheduling info */ ps 2053 drivers/video/fbdev/amifb.c u_long pl, ps; ps 2058 drivers/video/fbdev/amifb.c ps = pl = ZTWO_PADDR(dummysprite); ps 2073 drivers/video/fbdev/amifb.c ps = ZTWO_PADDR(shfsprite); ps 2079 drivers/video/fbdev/amifb.c swap(pl, ps); ps 2092 drivers/video/fbdev/amifb.c cops[cop_spr0ptrh].w[1] = highw(ps); ps 2093 drivers/video/fbdev/amifb.c cops[cop_spr0ptrl].w[1] = loww(ps); ps 616 drivers/video/fbdev/omap/hwa742.c static unsigned long round_to_extif_ticks(unsigned long ps, int div) ps 619 drivers/video/fbdev/omap/hwa742.c return (ps + bus_tick - 1) / bus_tick * bus_tick; ps 112 drivers/video/fbdev/omap/sossi.c static u32 ps_to_sossi_ticks(u32 ps, int div) ps 115 drivers/video/fbdev/omap/sossi.c return (clk_period + ps - 1) / clk_period; ps 1840 drivers/video/fbdev/omap2/omapfb/dss/dispc.c static s32 pixinc(int pixels, u8 ps) ps 1845 drivers/video/fbdev/omap2/omapfb/dss/dispc.c return 1 + (pixels - 1) * ps; ps 1847 drivers/video/fbdev/omap2/omapfb/dss/dispc.c return 1 - (-pixels + 1) * ps; ps 1861 drivers/video/fbdev/omap2/omapfb/dss/dispc.c u8 ps; ps 1873 drivers/video/fbdev/omap2/omapfb/dss/dispc.c ps = 4; ps 1876 drivers/video/fbdev/omap2/omapfb/dss/dispc.c ps = color_mode_to_bpp(color_mode) / 8; ps 1902 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *offset0 = field_offset * screen_width * ps; ps 1908 drivers/video/fbdev/omap2/omapfb/dss/dispc.c (fieldmode ? screen_width : 0), ps); ps 1909 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *pix_inc = pixinc(x_predecim, ps); ps 1925 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *offset0 = field_offset * screen_width * ps; ps 1930 drivers/video/fbdev/omap2/omapfb/dss/dispc.c (fieldmode ? screen_width : 0), ps); ps 1931 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *pix_inc = pixinc(x_predecim, ps); ps 1948 drivers/video/fbdev/omap2/omapfb/dss/dispc.c u8 ps; ps 1960 drivers/video/fbdev/omap2/omapfb/dss/dispc.c ps = color_mode_to_bpp(color_mode) / 8; ps 1985 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *offset0 = *offset1 + field_offset * screen_width * ps; ps 1990 drivers/video/fbdev/omap2/omapfb/dss/dispc.c (fieldmode ? screen_width : 0), ps); ps 1993 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *pix_inc = pixinc(x_predecim, 2 * ps); ps 1995 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *pix_inc = pixinc(x_predecim, ps); ps 1998 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *offset1 = screen_width * (fbh - 1) * ps; ps 2000 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *offset0 = *offset1 + field_offset * ps; ps 2004 drivers/video/fbdev/omap2/omapfb/dss/dispc.c y_predecim + (fieldmode ? 1 : 0), ps); ps 2005 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *pix_inc = pixinc(-x_predecim * screen_width, ps); ps 2008 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *offset1 = (screen_width * (fbh - 1) + fbw - 1) * ps; ps 2010 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *offset0 = *offset1 - field_offset * screen_width * ps; ps 2015 drivers/video/fbdev/omap2/omapfb/dss/dispc.c (fieldmode ? screen_width : 0), ps); ps 2018 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *pix_inc = pixinc(-x_predecim, 2 * ps); ps 2020 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *pix_inc = pixinc(-x_predecim, ps); ps 2023 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *offset1 = (fbw - 1) * ps; ps 2025 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *offset0 = *offset1 - field_offset * ps; ps 2029 drivers/video/fbdev/omap2/omapfb/dss/dispc.c y_predecim - (fieldmode ? 1 : 0), ps); ps 2030 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *pix_inc = pixinc(x_predecim * screen_width, ps); ps 2035 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *offset1 = (fbw - 1) * ps; ps 2037 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *offset0 = *offset1 + field_offset * screen_width * ps; ps 2042 drivers/video/fbdev/omap2/omapfb/dss/dispc.c ps); ps 2045 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *pix_inc = pixinc(-x_predecim, 2 * ps); ps 2047 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *pix_inc = pixinc(-x_predecim, ps); ps 2053 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *offset0 = *offset1 + field_offset * ps; ps 2058 drivers/video/fbdev/omap2/omapfb/dss/dispc.c ps); ps 2059 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *pix_inc = pixinc(x_predecim * screen_width, ps); ps 2063 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *offset1 = screen_width * (fbh - 1) * ps; ps 2065 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *offset0 = *offset1 - field_offset * screen_width * ps; ps 2070 drivers/video/fbdev/omap2/omapfb/dss/dispc.c ps); ps 2073 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *pix_inc = pixinc(x_predecim, 2 * ps); ps 2075 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *pix_inc = pixinc(x_predecim, ps); ps 2079 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *offset1 = (screen_width * (fbh - 1) + fbw - 1) * ps; ps 2081 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *offset0 = *offset1 - field_offset * ps; ps 2086 drivers/video/fbdev/omap2/omapfb/dss/dispc.c ps); ps 2087 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *pix_inc = pixinc(-x_predecim * screen_width, ps); ps 2101 drivers/video/fbdev/omap2/omapfb/dss/dispc.c u8 ps; ps 2111 drivers/video/fbdev/omap2/omapfb/dss/dispc.c ps = color_mode_to_bpp(color_mode) / 8; ps 2123 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *offset0 = *offset1 + field_offset * screen_width * ps; ps 2127 drivers/video/fbdev/omap2/omapfb/dss/dispc.c (fieldmode ? screen_width : 0), ps); ps 2130 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *pix_inc = pixinc(x_predecim, 2 * ps); ps 2132 drivers/video/fbdev/omap2/omapfb/dss/dispc.c *pix_inc = pixinc(x_predecim, ps); ps 76 drivers/watchdog/pic32-wdt.c u32 period, ps, terminal; ps 89 drivers/watchdog/pic32-wdt.c ps = pic32_wdt_get_post_scaler(wdt); ps 90 drivers/watchdog/pic32-wdt.c terminal = BIT(ps); ps 586 fs/fat/namei_vfat.c struct msdos_dir_slot *ps; ps 625 fs/fat/namei_vfat.c for (ps = slots, i = *nr_slots; i > 0; i--, ps++) { ps 626 fs/fat/namei_vfat.c ps->id = i; ps 627 fs/fat/namei_vfat.c ps->attr = ATTR_EXT; ps 628 fs/fat/namei_vfat.c ps->reserved = 0; ps 629 fs/fat/namei_vfat.c ps->alias_checksum = cksum; ps 630 fs/fat/namei_vfat.c ps->start = 0; ps 632 fs/fat/namei_vfat.c fatwchar_to16(ps->name0_4, uname + offset, 5); ps 633 fs/fat/namei_vfat.c fatwchar_to16(ps->name5_10, uname + offset + 5, 6); ps 634 fs/fat/namei_vfat.c fatwchar_to16(ps->name11_12, uname + offset + 11, 2); ps 637 fs/fat/namei_vfat.c de = (struct msdos_dir_entry *)ps; ps 1151 fs/hugetlbfs/inode.c unsigned long ps; ps 1193 fs/hugetlbfs/inode.c ps = memparse(param->string, &rest); ps 1194 fs/hugetlbfs/inode.c ctx->hstate = size_to_hstate(ps); ps 1196 fs/hugetlbfs/inode.c pr_err("Unsupported page size %lu MB\n", ps >> 20); ps 62 fs/pstore/inode.c struct pstore_private *ps = s->private; ps 69 fs/pstore/inode.c data->off = ps->total_size % REC_SIZE; ps 71 fs/pstore/inode.c if (data->off + REC_SIZE > ps->total_size) { ps 87 fs/pstore/inode.c struct pstore_private *ps = s->private; ps 92 fs/pstore/inode.c if (data->off + REC_SIZE > ps->total_size) ps 100 fs/pstore/inode.c struct pstore_private *ps = s->private; ps 107 fs/pstore/inode.c rec = (struct pstore_ftrace_record *)(ps->record->buf + data->off); ps 129 fs/pstore/inode.c struct pstore_private *ps = sf->private; ps 131 fs/pstore/inode.c if (ps->record->type == PSTORE_TYPE_FTRACE) ps 134 fs/pstore/inode.c ps->record->buf, ps->total_size); ps 139 fs/pstore/inode.c struct pstore_private *ps = inode->i_private; ps 144 fs/pstore/inode.c if (ps->record->type == PSTORE_TYPE_FTRACE) ps 152 fs/pstore/inode.c sf->private = ps; ps 182 include/linux/cdrom.h __u8 ps : 1; ps 200 include/linux/cdrom.h __u8 ps : 1; ps 62 include/linux/ceph/rados.h __le16 ps; /* placement seed */ ps 39 include/linux/mtd/nand.h #define NAND_MEMORG(bpc, ps, os, ppe, epl, mbb, ppl, lpt, nt) \ ps 42 include/linux/mtd/nand.h .pagesize = (ps), \ ps 115 include/linux/padata.h struct padata_shell *ps; ps 184 include/linux/padata.h extern void padata_free_shell(struct padata_shell *ps); ps 185 include/linux/padata.h extern int padata_do_parallel(struct padata_shell *ps, ps 1781 include/linux/pci.h int pci_enable_ats(struct pci_dev *dev, int ps); ps 1786 include/linux/pci.h static inline int pci_enable_ats(struct pci_dev *d, int ps) { return -ENODEV; } ps 4941 include/net/cfg80211.h bool ps; ps 649 include/net/mac80211.h bool ps; ps 134 include/rdma/rdma_cm.h enum rdma_ucm_port_space ps; ps 141 include/rdma/rdma_cm.h void *context, enum rdma_ucm_port_space ps, ps 161 include/rdma/rdma_cm.h #define rdma_create_id(net, event_handler, context, ps, qp_type) \ ps 162 include/rdma/rdma_cm.h __rdma_create_id((net), (event_handler), (context), (ps), (qp_type), \ ps 24 include/sound/sof/xtensa.h uint32_t ps; ps 29 include/uapi/linux/coff.h #define COFF_SHORT_L(ps) ((short)(((unsigned short)((unsigned char)ps[1])<<8)|\ ps 30 include/uapi/linux/coff.h ((unsigned short)((unsigned char)ps[0])))) ps 33 include/uapi/linux/coff.h #define COFF_LONG_L(ps) (((long)(((unsigned long)((unsigned char)ps[3])<<24) |\ ps 34 include/uapi/linux/coff.h ((unsigned long)((unsigned char)ps[2])<<16) |\ ps 35 include/uapi/linux/coff.h ((unsigned long)((unsigned char)ps[1])<<8) |\ ps 36 include/uapi/linux/coff.h ((unsigned long)((unsigned char)ps[0]))))) ps 39 include/uapi/linux/coff.h #define COFF_SHORT_H(ps) ((short)(((unsigned short)((unsigned char)ps[0])<<8)|\ ps 40 include/uapi/linux/coff.h ((unsigned short)((unsigned char)ps[1])))) ps 43 include/uapi/linux/coff.h #define COFF_LONG_H(ps) (((long)(((unsigned long)((unsigned char)ps[0])<<24) |\ ps 44 include/uapi/linux/coff.h ((unsigned long)((unsigned char)ps[1])<<16) |\ ps 45 include/uapi/linux/coff.h ((unsigned long)((unsigned char)ps[2])<<8) |\ ps 46 include/uapi/linux/coff.h ((unsigned long)((unsigned char)ps[3]))))) ps 93 include/uapi/rdma/rdma_user_cm.h __u16 ps; /* use enum rdma_ucm_port_space */ ps 1080 kernel/futex.c struct futex_pi_state **ps) ps 1176 kernel/futex.c *ps = pi_state; ps 1287 kernel/futex.c struct futex_pi_state **ps, ps 1371 kernel/futex.c *ps = pi_state; ps 1378 kernel/futex.c union futex_key *key, struct futex_pi_state **ps, ps 1388 kernel/futex.c return attach_to_pi_state(uaddr, uval, top_waiter->pi_state, ps); ps 1394 kernel/futex.c return attach_to_pi_owner(uaddr, uval, key, ps, exiting); ps 1439 kernel/futex.c struct futex_pi_state **ps, ps 1473 kernel/futex.c return attach_to_pi_state(uaddr, uval, top_waiter->pi_state, ps); ps 1512 kernel/futex.c return attach_to_pi_owner(uaddr, newval, key, ps, exiting); ps 1951 kernel/futex.c union futex_key *key2, struct futex_pi_state **ps, ps 1988 kernel/futex.c ret = futex_lock_pi_atomic(pifutex, hb2, key2, ps, top_waiter->task, ps 103 kernel/padata.c int padata_do_parallel(struct padata_shell *ps, ps 106 kernel/padata.c struct padata_instance *pinst = ps->pinst; ps 113 kernel/padata.c pd = rcu_dereference_bh(ps->pd); ps 216 kernel/padata.c struct padata_instance *pinst = pd->ps->pinst; ps 429 kernel/padata.c static struct parallel_data *padata_alloc_pd(struct padata_shell *ps) ps 431 kernel/padata.c struct padata_instance *pinst = ps->pinst; ps 451 kernel/padata.c pd->ps = ps; ps 501 kernel/padata.c static int padata_replace_one(struct padata_shell *ps) ps 505 kernel/padata.c pd_new = padata_alloc_pd(ps); ps 509 kernel/padata.c ps->opd = rcu_dereference_protected(ps->pd, 1); ps 510 kernel/padata.c rcu_assign_pointer(ps->pd, pd_new); ps 518 kernel/padata.c struct padata_shell *ps; ps 535 kernel/padata.c list_for_each_entry(ps, &pinst->pslist, list) { ps 536 kernel/padata.c err = padata_replace_one(ps); ps 543 kernel/padata.c list_for_each_entry_continue_reverse(ps, &pinst->pslist, list) ps 544 kernel/padata.c if (atomic_dec_and_test(&ps->opd->refcnt)) ps 545 kernel/padata.c padata_free_pd(ps->opd); ps 1086 kernel/padata.c struct padata_shell *ps; ps 1088 kernel/padata.c ps = kzalloc(sizeof(*ps), GFP_KERNEL); ps 1089 kernel/padata.c if (!ps) ps 1092 kernel/padata.c ps->pinst = pinst; ps 1095 kernel/padata.c pd = padata_alloc_pd(ps); ps 1102 kernel/padata.c RCU_INIT_POINTER(ps->pd, pd); ps 1103 kernel/padata.c list_add(&ps->list, &pinst->pslist); ps 1106 kernel/padata.c return ps; ps 1109 kernel/padata.c kfree(ps); ps 1120 kernel/padata.c void padata_free_shell(struct padata_shell *ps) ps 1122 kernel/padata.c struct padata_instance *pinst = ps->pinst; ps 1125 kernel/padata.c list_del(&ps->list); ps 1126 kernel/padata.c padata_free_pd(rcu_dereference_protected(ps->pd, 1)); ps 1129 kernel/padata.c kfree(ps); ps 897 mm/memory-failure.c static int page_action(struct page_state *ps, struct page *p, ps 903 mm/memory-failure.c result = ps->action(p, pfn); ps 906 mm/memory-failure.c if (ps->action == me_swapcache_dirty && result == MF_DELAYED) ps 910 mm/memory-failure.c pfn, action_page_types[ps->type], count); ps 913 mm/memory-failure.c action_result(pfn, ps->type, result); ps 1064 mm/memory-failure.c struct page_state *ps; ps 1071 mm/memory-failure.c for (ps = error_states;; ps++) ps 1072 mm/memory-failure.c if ((p->flags & ps->mask) == ps->res) ps 1077 mm/memory-failure.c if (!ps->mask) ps 1078 mm/memory-failure.c for (ps = error_states;; ps++) ps 1079 mm/memory-failure.c if ((page_flags & ps->mask) == ps->res) ps 1081 mm/memory-failure.c return page_action(ps, p, pfn); ps 140 net/l2tp/l2tp_ppp.c struct pppol2tp_session *ps = l2tp_session_priv(session); ps 144 net/l2tp/l2tp_ppp.c sk = rcu_dereference(ps->sk); ps 213 net/l2tp/l2tp_ppp.c struct pppol2tp_session *ps = l2tp_session_priv(session); ps 220 net/l2tp/l2tp_ppp.c sk = rcu_dereference(ps->sk); ps 405 net/l2tp/l2tp_ppp.c struct pppol2tp_session *ps; ps 407 net/l2tp/l2tp_ppp.c ps = container_of(head, typeof(*ps), rcu); ps 408 net/l2tp/l2tp_ppp.c sock_put(ps->__sk); ps 453 net/l2tp/l2tp_ppp.c struct pppol2tp_session *ps; ps 457 net/l2tp/l2tp_ppp.c ps = l2tp_session_priv(session); ps 458 net/l2tp/l2tp_ppp.c mutex_lock(&ps->sk_lock); ps 459 net/l2tp/l2tp_ppp.c ps->__sk = rcu_dereference_protected(ps->sk, ps 460 net/l2tp/l2tp_ppp.c lockdep_is_held(&ps->sk_lock)); ps 461 net/l2tp/l2tp_ppp.c RCU_INIT_POINTER(ps->sk, NULL); ps 462 net/l2tp/l2tp_ppp.c mutex_unlock(&ps->sk_lock); ps 463 net/l2tp/l2tp_ppp.c call_rcu(&ps->rcu, pppol2tp_put_sk); ps 548 net/l2tp/l2tp_ppp.c struct pppol2tp_session *ps; ps 554 net/l2tp/l2tp_ppp.c ps = l2tp_session_priv(session); ps 555 net/l2tp/l2tp_ppp.c mutex_init(&ps->sk_lock); ps 556 net/l2tp/l2tp_ppp.c ps->owner = current->pid; ps 669 net/l2tp/l2tp_ppp.c struct pppol2tp_session *ps; ps 762 net/l2tp/l2tp_ppp.c ps = l2tp_session_priv(session); ps 767 net/l2tp/l2tp_ppp.c mutex_lock(&ps->sk_lock); ps 768 net/l2tp/l2tp_ppp.c if (rcu_dereference_protected(ps->sk, ps 769 net/l2tp/l2tp_ppp.c lockdep_is_held(&ps->sk_lock)) || ps 770 net/l2tp/l2tp_ppp.c ps->__sk) { ps 771 net/l2tp/l2tp_ppp.c mutex_unlock(&ps->sk_lock); ps 787 net/l2tp/l2tp_ppp.c ps = l2tp_session_priv(session); ps 790 net/l2tp/l2tp_ppp.c mutex_lock(&ps->sk_lock); ps 793 net/l2tp/l2tp_ppp.c mutex_unlock(&ps->sk_lock); ps 824 net/l2tp/l2tp_ppp.c mutex_unlock(&ps->sk_lock); ps 831 net/l2tp/l2tp_ppp.c rcu_assign_pointer(ps->sk, sk); ps 832 net/l2tp/l2tp_ppp.c mutex_unlock(&ps->sk_lock); ps 1193 net/mac80211/cfg.c local->total_ps_buffered -= skb_queue_len(&sdata->u.ap.ps.bc_buf); ps 1194 net/mac80211/cfg.c ieee80211_purge_tx_queue(&local->hw, &sdata->u.ap.ps.bc_buf); ps 474 net/mac80211/debugfs_netdev.c IEEE80211_IF_FILE(num_sta_ps, u.ap.ps.num_sta_ps, ATOMIC); ps 475 net/mac80211/debugfs_netdev.c IEEE80211_IF_FILE(dtim_count, u.ap.ps.dtim_count, DEC); ps 482 net/mac80211/debugfs_netdev.c skb_queue_len(&sdata->u.ap.ps.bc_buf)); ps 293 net/mac80211/ieee80211_i.h struct ps_data ps; ps 706 net/mac80211/ieee80211_i.h struct ps_data ps; ps 801 net/mac80211/iface.c struct ps_data *ps; ps 922 net/mac80211/iface.c ps = &sdata->bss->ps; ps 924 net/mac80211/iface.c spin_lock_irqsave(&ps->bc_buf.lock, flags); ps 925 net/mac80211/iface.c skb_queue_walk_safe(&ps->bc_buf, skb, tmp) { ps 927 net/mac80211/iface.c __skb_unlink(skb, &ps->bc_buf); ps 932 net/mac80211/iface.c spin_unlock_irqrestore(&ps->bc_buf.lock, flags); ps 1428 net/mac80211/iface.c skb_queue_head_init(&sdata->u.ap.ps.bc_buf); ps 1007 net/mac80211/mesh.c local->total_ps_buffered -= skb_queue_len(&ifmsh->ps.bc_buf); ps 1008 net/mac80211/mesh.c skb_queue_purge(&ifmsh->ps.bc_buf); ps 1538 net/mac80211/mesh.c skb_queue_head_init(&ifmsh->ps.bc_buf); ps 263 net/mac80211/mesh_ps.c atomic_inc(&sta->sdata->u.mesh.ps.num_sta_ps); ps 1715 net/mac80211/mlme.c if (sdata->vif.bss_conf.ps != ps_allowed) { ps 1716 net/mac80211/mlme.c sdata->vif.bss_conf.ps = ps_allowed; ps 4600 net/mac80211/mlme.c ifmgd->powersave = sdata->wdev.ps; ps 1565 net/mac80211/rx.c struct ps_data *ps; ps 1570 net/mac80211/rx.c ps = &sdata->bss->ps; ps 1574 net/mac80211/rx.c atomic_inc(&ps->num_sta_ps); ps 90 net/mac80211/sta_info.c struct ps_data *ps; ps 97 net/mac80211/sta_info.c ps = &sdata->bss->ps; ps 99 net/mac80211/sta_info.c ps = &sdata->u.mesh.ps; ps 107 net/mac80211/sta_info.c atomic_dec(&ps->num_sta_ps); ps 764 net/mac80211/sta_info.c struct ps_data *ps; ps 775 net/mac80211/sta_info.c ps = &sta->sdata->bss->ps; ps 778 net/mac80211/sta_info.c ps = &sta->sdata->u.mesh.ps; ps 825 net/mac80211/sta_info.c if (indicate_tim == __bss_tim_get(ps->tim, id)) ps 829 net/mac80211/sta_info.c __bss_tim_set(ps->tim, id); ps 831 net/mac80211/sta_info.c __bss_tim_clear(ps->tim, id); ps 1217 net/mac80211/sta_info.c atomic_dec(&sdata->u.mesh.ps.num_sta_ps); ps 1278 net/mac80211/sta_info.c struct ps_data *ps; ps 1285 net/mac80211/sta_info.c ps = &sdata->bss->ps; ps 1287 net/mac80211/sta_info.c ps = &sdata->u.mesh.ps; ps 1341 net/mac80211/sta_info.c atomic_dec(&ps->num_sta_ps); ps 422 net/mac80211/trace.h __field(bool, ps); ps 462 net/mac80211/trace.h __entry->ps = info->ps; ps 361 net/mac80211/tx.c struct ps_data *ps; ps 364 net/mac80211/tx.c ps = &sdata->u.ap.ps; ps 366 net/mac80211/tx.c ps = &sdata->u.mesh.ps; ps 370 net/mac80211/tx.c skb = skb_dequeue(&ps->bc_buf); ps 375 net/mac80211/tx.c total += skb_queue_len(&ps->bc_buf); ps 405 net/mac80211/tx.c struct ps_data *ps; ps 421 net/mac80211/tx.c ps = &tx->sdata->bss->ps; ps 423 net/mac80211/tx.c ps = &tx->sdata->u.mesh.ps; ps 440 net/mac80211/tx.c if (!atomic_read(&ps->num_sta_ps) && skb_queue_empty(&ps->bc_buf)) ps 453 net/mac80211/tx.c if (skb_queue_len(&ps->bc_buf) >= AP_MAX_BC_BUFFER) { ps 456 net/mac80211/tx.c ieee80211_free_txskb(&tx->local->hw, skb_dequeue(&ps->bc_buf)); ps 460 net/mac80211/tx.c skb_queue_tail(&ps->bc_buf, tx->skb); ps 4210 net/mac80211/tx.c struct ps_data *ps, struct sk_buff *skb, ps 4219 net/mac80211/tx.c if (atomic_read(&ps->num_sta_ps) > 0) ps 4222 net/mac80211/tx.c have_bits = !bitmap_empty((unsigned long *)ps->tim, ps 4225 net/mac80211/tx.c if (ps->dtim_count == 0) ps 4226 net/mac80211/tx.c ps->dtim_count = sdata->vif.bss_conf.dtim_period - 1; ps 4228 net/mac80211/tx.c ps->dtim_count--; ps 4234 net/mac80211/tx.c *pos++ = ps->dtim_count; ps 4237 net/mac80211/tx.c if (ps->dtim_count == 0 && !skb_queue_empty(&ps->bc_buf)) ps 4240 net/mac80211/tx.c ps->dtim_bc_mc = aid0 == 1; ps 4248 net/mac80211/tx.c if (ps->tim[i]) { ps 4255 net/mac80211/tx.c if (ps->tim[i]) { ps 4265 net/mac80211/tx.c memcpy(pos, ps->tim + n1, n2 - n1 + 1); ps 4275 net/mac80211/tx.c struct ps_data *ps, struct sk_buff *skb, ps 4288 net/mac80211/tx.c __ieee80211_beacon_add_tim(sdata, ps, skb, is_template); ps 4291 net/mac80211/tx.c __ieee80211_beacon_add_tim(sdata, ps, skb, is_template); ps 4516 net/mac80211/tx.c ieee80211_beacon_add_tim(sdata, &ap->ps, skb, ps 4588 net/mac80211/tx.c ieee80211_beacon_add_tim(sdata, &ifmsh->ps, skb, is_template); ps 4900 net/mac80211/tx.c struct ps_data *ps; ps 4919 net/mac80211/tx.c ps = &sdata->u.ap.ps; ps 4921 net/mac80211/tx.c ps = &sdata->u.mesh.ps; ps 4926 net/mac80211/tx.c if (ps->dtim_count != 0 || !ps->dtim_bc_mc) ps 4930 net/mac80211/tx.c skb = skb_dequeue(&ps->bc_buf); ps 4935 net/mac80211/tx.c if (!skb_queue_empty(&ps->bc_buf) && skb->len >= 2) { ps 245 net/mac80211/util.c struct ps_data *ps = NULL; ps 254 net/mac80211/util.c ps = &sdata->bss->ps; ps 289 net/mac80211/util.c (ps && atomic_read(&ps->num_sta_ps)) || ac != vif->txq->ac) ps 3777 net/mac80211/util.c struct ps_data *ps; ps 3788 net/mac80211/util.c ps = &sdata->bss->ps; ps 3790 net/mac80211/util.c ps = &sdata->u.mesh.ps; ps 3808 net/mac80211/util.c ps->dtim_count = dtim_count; ps 1294 net/wireless/core.c wdev->ps = true; ps 1296 net/wireless/core.c wdev->ps = false; ps 1371 net/wireless/core.c rdev_set_power_mgmt(rdev, dev, wdev->ps, ps 1374 net/wireless/core.c wdev->ps = false; ps 10727 net/wireless/nl80211.c if (state == wdev->ps) ps 10732 net/wireless/nl80211.c wdev->ps = state; ps 10762 net/wireless/nl80211.c if (wdev->ps) ps 1102 net/wireless/wext-compat.c bool ps = wdev->ps; ps 1113 net/wireless/wext-compat.c ps = false; ps 1119 net/wireless/wext-compat.c ps = true; ps 1132 net/wireless/wext-compat.c err = rdev_set_power_mgmt(rdev, dev, ps, timeout); ps 1136 net/wireless/wext-compat.c wdev->ps = ps; ps 1149 net/wireless/wext-compat.c wrq->disabled = !wdev->ps; ps 452 sound/soc/codecs/ak4642.c int ps, fs; ps 454 sound/soc/codecs/ak4642.c for (ps = 0; ps < ARRAY_SIZE(ps_list); ps++) { ps 456 sound/soc/codecs/ak4642.c if (frequency == ps_list[ps] * fs_list[fs]) { ps 458 sound/soc/codecs/ak4642.c PSs(ps) | FSs(fs)); ps 204 sound/soc/fsl/fsl_asrc.c u32 ps; ps 207 sound/soc/fsl/fsl_asrc.c for (ps = 0; div > 8; ps++) ps 210 sound/soc/fsl/fsl_asrc.c return ((div - 1) << ASRCDRi_AxCPi_WIDTH) | ps; ps 98 sound/soc/sof/xtensa/core.c xoops->exccause, xoops->excvaddr, xoops->ps, xoops->sar); ps 46 tools/perf/pmu-events/json.c unsigned ps = sysconf(_SC_PAGESIZE); ps 64 tools/perf/pmu-events/json.c (st.st_size + ps - 1) & ~(ps - 1), ps 75 tools/perf/pmu-events/json.c unsigned ps = sysconf(_SC_PAGESIZE); ps 76 tools/perf/pmu-events/json.c munmap(map, roundup(size, ps)); ps 1370 tools/perf/util/evsel.c struct perf_stat_evsel *ps = leader->stats; ps 1373 tools/perf/util/evsel.c u64 *data = ps->group_data; ps 1386 tools/perf/util/evsel.c ps->group_data = data; ps 51 tools/perf/util/stat-display.c struct perf_stat_evsel *ps; ps 56 tools/perf/util/stat-display.c ps = evsel->stats; ps 57 tools/perf/util/stat-display.c print_noise_pct(config, stddev_stats(&ps->res_stats[0]), avg); ps 787 tools/perf/util/stat-display.c struct perf_stat_evsel *ps = counter->stats; ps 789 tools/perf/util/stat-display.c cd->avg += avg_stats(&ps->res_stats[0]); ps 790 tools/perf/util/stat-display.c cd->avg_enabled += avg_stats(&ps->res_stats[1]); ps 791 tools/perf/util/stat-display.c cd->avg_running += avg_stats(&ps->res_stats[2]); ps 81 tools/perf/util/stat.c struct perf_stat_evsel *ps = evsel->stats; ps 83 tools/perf/util/stat.c return ps->id == id; ps 105 tools/perf/util/stat.c struct perf_stat_evsel *ps = evsel->stats; ps 112 tools/perf/util/stat.c ps->id = i; ps 121 tools/perf/util/stat.c struct perf_stat_evsel *ps = evsel->stats; ps 124 tools/perf/util/stat.c init_stats(&ps->res_stats[i]); ps 140 tools/perf/util/stat.c struct perf_stat_evsel *ps = evsel->stats; ps 142 tools/perf/util/stat.c if (ps) ps 143 tools/perf/util/stat.c zfree(&ps->group_data); ps 357 tools/perf/util/stat.c struct perf_stat_evsel *ps = counter->stats; ps 371 tools/perf/util/stat.c init_stats(ps->res_stats); ps 388 tools/perf/util/stat.c update_stats(&ps->res_stats[i], count[i]); ps 23 tools/testing/selftests/vm/mlock-random-test.c #define PAGE_ALIGN(size, ps) (((size) + ((ps) - 1)) & ~((ps) - 1)) ps 95 tools/testing/selftests/vm/thuge-gen.c void show(unsigned long ps) ps 98 tools/testing/selftests/vm/thuge-gen.c if (ps == getpagesize()) ps 100 tools/testing/selftests/vm/thuge-gen.c printf("%luMB: ", ps >> 20); ps 104 tools/testing/selftests/vm/thuge-gen.c ps >> 10); ps 135 tools/testing/selftests/vm/thuge-gen.c unsigned long read_free(unsigned long ps) ps 137 tools/testing/selftests/vm/thuge-gen.c return read_sysfs(ps != getpagesize(), ps 139 tools/testing/selftests/vm/thuge-gen.c ps >> 10); ps 236 tools/testing/selftests/vm/thuge-gen.c unsigned long ps = page_sizes[i]; ps 237 tools/testing/selftests/vm/thuge-gen.c int arg = ilog2(ps) << MAP_HUGE_SHIFT; ps 238 tools/testing/selftests/vm/thuge-gen.c printf("Testing %luMB mmap with shift %x\n", ps >> 20, arg); ps 239 tools/testing/selftests/vm/thuge-gen.c test_mmap(ps, MAP_HUGETLB | arg); ps 248 tools/testing/selftests/vm/thuge-gen.c unsigned long ps = page_sizes[i]; ps 249 tools/testing/selftests/vm/thuge-gen.c int arg = ilog2(ps) << SHM_HUGE_SHIFT; ps 250 tools/testing/selftests/vm/thuge-gen.c printf("Testing %luMB shmget with shift %x\n", ps >> 20, arg); ps 251 tools/testing/selftests/vm/thuge-gen.c test_shmget(ps, SHM_HUGETLB | arg);