pt 177 arch/alpha/include/asm/core_lca.h unsigned long pt[31]; /* PAL temps */ pt 112 arch/alpha/include/asm/elf.h extern void dump_elf_thread(elf_greg_t *dest, struct pt_regs *pt, pt 448 arch/alpha/kernel/core_lca.c el.l->pt[0], el.l->exc_addr, el.l->dc_stat); pt 289 arch/alpha/kernel/process.c dump_elf_thread(elf_greg_t *dest, struct pt_regs *pt, struct thread_info *ti) pt 292 arch/alpha/kernel/process.c struct switch_stack * sw = ((struct switch_stack *) pt) - 1; pt 294 arch/alpha/kernel/process.c dest[ 0] = pt->r0; pt 295 arch/alpha/kernel/process.c dest[ 1] = pt->r1; pt 296 arch/alpha/kernel/process.c dest[ 2] = pt->r2; pt 297 arch/alpha/kernel/process.c dest[ 3] = pt->r3; pt 298 arch/alpha/kernel/process.c dest[ 4] = pt->r4; pt 299 arch/alpha/kernel/process.c dest[ 5] = pt->r5; pt 300 arch/alpha/kernel/process.c dest[ 6] = pt->r6; pt 301 arch/alpha/kernel/process.c dest[ 7] = pt->r7; pt 302 arch/alpha/kernel/process.c dest[ 8] = pt->r8; pt 310 arch/alpha/kernel/process.c dest[16] = pt->r16; pt 311 arch/alpha/kernel/process.c dest[17] = pt->r17; pt 312 arch/alpha/kernel/process.c dest[18] = pt->r18; pt 313 arch/alpha/kernel/process.c dest[19] = pt->r19; pt 314 arch/alpha/kernel/process.c dest[20] = pt->r20; pt 315 arch/alpha/kernel/process.c dest[21] = pt->r21; pt 316 arch/alpha/kernel/process.c dest[22] = pt->r22; pt 317 arch/alpha/kernel/process.c dest[23] = pt->r23; pt 318 arch/alpha/kernel/process.c dest[24] = pt->r24; pt 319 arch/alpha/kernel/process.c dest[25] = pt->r25; pt 320 arch/alpha/kernel/process.c dest[26] = pt->r26; pt 321 arch/alpha/kernel/process.c dest[27] = pt->r27; pt 322 arch/alpha/kernel/process.c dest[28] = pt->r28; pt 323 arch/alpha/kernel/process.c dest[29] = pt->gp; pt 325 arch/alpha/kernel/process.c dest[31] = pt->pc; pt 92 arch/arm64/include/asm/assembler.h dmb \opt pt 131 arch/ia64/include/asm/ptrace.h extern unsigned long ia64_get_scratch_nat_bits (struct pt_regs *pt, unsigned long scratch_unat); pt 133 arch/ia64/include/asm/ptrace.h extern unsigned long ia64_put_scratch_nat_bits (struct pt_regs *pt, unsigned long nat); pt 135 arch/ia64/include/asm/ptrace.h extern void ia64_increment_ip (struct pt_regs *pt); pt 136 arch/ia64/include/asm/ptrace.h extern void ia64_decrement_ip (struct pt_regs *pt); pt 66 arch/ia64/include/asm/unwind.h unsigned long pt; /* struct pt_regs location */ pt 79 arch/ia64/kernel/crash.c machine_crash_shutdown(struct pt_regs *pt) pt 166 arch/ia64/kernel/process.c if (fsys_mode(current, &scr->pt)) { pt 171 arch/ia64/kernel/process.c if (!ia64_psr(&scr->pt)->lp) pt 172 arch/ia64/kernel/process.c ia64_psr(&scr->pt)->lp = 1; pt 193 arch/ia64/kernel/process.c tracehook_notify_resume(&scr->pt); pt 449 arch/ia64/kernel/process.c struct pt_regs *pt; pt 459 arch/ia64/kernel/process.c pt = (struct pt_regs *) (sp + 16); pt 461 arch/ia64/kernel/process.c urbs_end = ia64_get_user_rbs_end(task, pt, &cfm); pt 463 arch/ia64/kernel/process.c if (ia64_sync_user_rbs(task, info->sw, pt->ar_bspstore, urbs_end) < 0) pt 494 arch/ia64/kernel/process.c dst[42] = ip + ia64_psr(pt)->ri; pt 496 arch/ia64/kernel/process.c dst[44] = pt->cr_ipsr & IA64_PSR_UM; pt 504 arch/ia64/kernel/process.c dst[47] = pt->ar_bspstore; pt 509 arch/ia64/kernel/process.c dst[52] = pt->ar_pfs; /* UNW_AR_PFS is == to pt->cr_ifs for interrupt frames */ pt 550 arch/ia64/kernel/process.c ia64_elf_core_copy_regs (struct pt_regs *pt, elf_gregset_t dst) pt 556 arch/ia64/kernel/process.c dump_fpu (struct pt_regs *pt, elf_fpregset_t dst) pt 67 arch/ia64/kernel/ptrace.c in_syscall (struct pt_regs *pt) pt 69 arch/ia64/kernel/ptrace.c return (long) pt->cr_ifs >= 0; pt 77 arch/ia64/kernel/ptrace.c ia64_get_scratch_nat_bits (struct pt_regs *pt, unsigned long scratch_unat) pt 81 arch/ia64/kernel/ptrace.c unsigned long bit = ia64_unat_pos(&pt->r##first); \ pt 116 arch/ia64/kernel/ptrace.c ia64_put_scratch_nat_bits (struct pt_regs *pt, unsigned long nat) pt 120 arch/ia64/kernel/ptrace.c unsigned long bit = ia64_unat_pos(&pt->r##first); \ pt 260 arch/ia64/kernel/ptrace.c struct pt_regs *pt; pt 262 arch/ia64/kernel/ptrace.c pt = task_pt_regs(task); pt 264 arch/ia64/kernel/ptrace.c ubspstore = (unsigned long *) pt->ar_bspstore; pt 286 arch/ia64/kernel/ptrace.c urnat = (pt->ar_rnat & umask); pt 319 arch/ia64/kernel/ptrace.c struct pt_regs *pt; pt 322 arch/ia64/kernel/ptrace.c pt = task_pt_regs(task); pt 324 arch/ia64/kernel/ptrace.c ubspstore = (unsigned long *) pt->ar_bspstore; pt 327 arch/ia64/kernel/ptrace.c if (in_syscall(pt)) { pt 332 arch/ia64/kernel/ptrace.c cfm = pt->cr_ifs; pt 360 arch/ia64/kernel/ptrace.c pt->ar_rnat = (pt->ar_rnat & ~umask) | (urnat & umask); pt 511 arch/ia64/kernel/ptrace.c ia64_get_user_rbs_end (struct task_struct *child, struct pt_regs *pt, pt 514 arch/ia64/kernel/ptrace.c unsigned long *krbs, *bspstore, cfm = pt->cr_ifs; pt 518 arch/ia64/kernel/ptrace.c bspstore = (unsigned long *) pt->ar_bspstore; pt 519 arch/ia64/kernel/ptrace.c ndirty = ia64_rse_num_regs(krbs, krbs + (pt->loadrs >> 19)); pt 521 arch/ia64/kernel/ptrace.c if (in_syscall(pt)) pt 584 arch/ia64/kernel/ptrace.c struct pt_regs *pt; pt 590 arch/ia64/kernel/ptrace.c pt = task_pt_regs(info->task); pt 591 arch/ia64/kernel/ptrace.c urbs_end = ia64_get_user_rbs_end(info->task, pt, NULL); pt 593 arch/ia64/kernel/ptrace.c fn(info->task, info->sw, pt->ar_bspstore, urbs_end); pt 729 arch/ia64/kernel/ptrace.c convert_to_non_syscall (struct task_struct *child, struct pt_regs *pt, pt 771 arch/ia64/kernel/ptrace.c pt->cr_ifs = (1UL << 63) | cfm; pt 777 arch/ia64/kernel/ptrace.c pt->r2 = 0; pt 778 arch/ia64/kernel/ptrace.c pt->r3 = 0; pt 779 arch/ia64/kernel/ptrace.c pt->r14 = 0; pt 780 arch/ia64/kernel/ptrace.c memset(&pt->r16, 0, 16*8); /* clear r16-r31 */ pt 781 arch/ia64/kernel/ptrace.c memset(&pt->f6, 0, 6*16); /* clear f6-f11 */ pt 782 arch/ia64/kernel/ptrace.c pt->b7 = 0; pt 783 arch/ia64/kernel/ptrace.c pt->ar_ccv = 0; pt 784 arch/ia64/kernel/ptrace.c pt->ar_csd = 0; pt 785 arch/ia64/kernel/ptrace.c pt->ar_ssd = 0; pt 789 arch/ia64/kernel/ptrace.c access_nat_bits (struct task_struct *child, struct pt_regs *pt, pt 798 arch/ia64/kernel/ptrace.c scratch_unat = ia64_put_scratch_nat_bits(pt, nat_bits); pt 813 arch/ia64/kernel/ptrace.c nat_bits = ia64_get_scratch_nat_bits(pt, scratch_unat); pt 834 arch/ia64/kernel/ptrace.c struct pt_regs *pt; pt 842 arch/ia64/kernel/ptrace.c pt = task_pt_regs(child); pt 865 arch/ia64/kernel/ptrace.c retval |= __put_user(pt->cr_iip, &ppr->cr_iip); pt 870 arch/ia64/kernel/ptrace.c retval |= __put_user(pt->ar_pfs, &ppr->ar[PT_AUR_PFS]); pt 871 arch/ia64/kernel/ptrace.c retval |= __put_user(pt->ar_rsc, &ppr->ar[PT_AUR_RSC]); pt 872 arch/ia64/kernel/ptrace.c retval |= __put_user(pt->ar_bspstore, &ppr->ar[PT_AUR_BSPSTORE]); pt 873 arch/ia64/kernel/ptrace.c retval |= __put_user(pt->ar_unat, &ppr->ar[PT_AUR_UNAT]); pt 874 arch/ia64/kernel/ptrace.c retval |= __put_user(pt->ar_ccv, &ppr->ar[PT_AUR_CCV]); pt 875 arch/ia64/kernel/ptrace.c retval |= __put_user(pt->ar_fpsr, &ppr->ar[PT_AUR_FPSR]); pt 885 arch/ia64/kernel/ptrace.c retval |= __copy_to_user(&ppr->gr[1], &pt->r1, sizeof(long)); pt 886 arch/ia64/kernel/ptrace.c retval |= __copy_to_user(&ppr->gr[2], &pt->r2, sizeof(long) *2); pt 898 arch/ia64/kernel/ptrace.c retval |= __copy_to_user(&ppr->gr[8], &pt->r8, sizeof(long) * 4); pt 902 arch/ia64/kernel/ptrace.c retval |= __copy_to_user(&ppr->gr[12], &pt->r12, sizeof(long) * 2); pt 903 arch/ia64/kernel/ptrace.c retval |= __copy_to_user(&ppr->gr[14], &pt->r14, sizeof(long)); pt 904 arch/ia64/kernel/ptrace.c retval |= __copy_to_user(&ppr->gr[15], &pt->r15, sizeof(long)); pt 908 arch/ia64/kernel/ptrace.c retval |= __copy_to_user(&ppr->gr[16], &pt->r16, sizeof(long) * 16); pt 912 arch/ia64/kernel/ptrace.c retval |= __put_user(pt->b0, &ppr->br[0]); pt 924 arch/ia64/kernel/ptrace.c retval |= __put_user(pt->b6, &ppr->br[6]); pt 925 arch/ia64/kernel/ptrace.c retval |= __put_user(pt->b7, &ppr->br[7]); pt 937 arch/ia64/kernel/ptrace.c retval |= __copy_to_user(&ppr->fr[6], &pt->f6, pt 961 arch/ia64/kernel/ptrace.c retval |= __put_user(pt->pr, &ppr->pr); pt 978 arch/ia64/kernel/ptrace.c struct pt_regs *pt; pt 987 arch/ia64/kernel/ptrace.c pt = task_pt_regs(child); pt 1001 arch/ia64/kernel/ptrace.c retval |= __get_user(pt->cr_iip, &ppr->cr_iip); pt 1006 arch/ia64/kernel/ptrace.c retval |= __get_user(pt->ar_pfs, &ppr->ar[PT_AUR_PFS]); pt 1008 arch/ia64/kernel/ptrace.c retval |= __get_user(pt->ar_bspstore, &ppr->ar[PT_AUR_BSPSTORE]); pt 1009 arch/ia64/kernel/ptrace.c retval |= __get_user(pt->ar_unat, &ppr->ar[PT_AUR_UNAT]); pt 1010 arch/ia64/kernel/ptrace.c retval |= __get_user(pt->ar_ccv, &ppr->ar[PT_AUR_CCV]); pt 1011 arch/ia64/kernel/ptrace.c retval |= __get_user(pt->ar_fpsr, &ppr->ar[PT_AUR_FPSR]); pt 1021 arch/ia64/kernel/ptrace.c retval |= __copy_from_user(&pt->r1, &ppr->gr[1], sizeof(long)); pt 1022 arch/ia64/kernel/ptrace.c retval |= __copy_from_user(&pt->r2, &ppr->gr[2], sizeof(long) * 2); pt 1035 arch/ia64/kernel/ptrace.c retval |= __copy_from_user(&pt->r8, &ppr->gr[8], sizeof(long) * 4); pt 1039 arch/ia64/kernel/ptrace.c retval |= __copy_from_user(&pt->r12, &ppr->gr[12], sizeof(long) * 2); pt 1040 arch/ia64/kernel/ptrace.c retval |= __copy_from_user(&pt->r14, &ppr->gr[14], sizeof(long)); pt 1041 arch/ia64/kernel/ptrace.c retval |= __copy_from_user(&pt->r15, &ppr->gr[15], sizeof(long)); pt 1045 arch/ia64/kernel/ptrace.c retval |= __copy_from_user(&pt->r16, &ppr->gr[16], sizeof(long) * 16); pt 1049 arch/ia64/kernel/ptrace.c retval |= __get_user(pt->b0, &ppr->br[0]); pt 1060 arch/ia64/kernel/ptrace.c retval |= __get_user(pt->b6, &ppr->br[6]); pt 1061 arch/ia64/kernel/ptrace.c retval |= __get_user(pt->b7, &ppr->br[7]); pt 1073 arch/ia64/kernel/ptrace.c retval |= __copy_from_user(&pt->f6, &ppr->fr[6], pt 1098 arch/ia64/kernel/ptrace.c retval |= __get_user(pt->pr, &ppr->pr); pt 1281 arch/ia64/kernel/ptrace.c struct pt_regs *pt; pt 1286 arch/ia64/kernel/ptrace.c pt = task_pt_regs(target); pt 1289 arch/ia64/kernel/ptrace.c ptr = &pt->r1; pt 1293 arch/ia64/kernel/ptrace.c ptr = (void *)&pt->r2 + (addr - ELF_GR_OFFSET(2)); pt 1306 arch/ia64/kernel/ptrace.c ptr = (void *)&pt->r8 + addr - ELF_GR_OFFSET(8); pt 1310 arch/ia64/kernel/ptrace.c ptr = (void *)&pt->r12 + addr - ELF_GR_OFFSET(12); pt 1313 arch/ia64/kernel/ptrace.c ptr = &pt->r14; pt 1316 arch/ia64/kernel/ptrace.c ptr = &pt->r15; pt 1329 arch/ia64/kernel/ptrace.c struct pt_regs *pt; pt 1332 arch/ia64/kernel/ptrace.c pt = task_pt_regs(target); pt 1335 arch/ia64/kernel/ptrace.c ptr = &pt->b0; pt 1341 arch/ia64/kernel/ptrace.c ptr = &pt->b6; pt 1344 arch/ia64/kernel/ptrace.c ptr = &pt->b7; pt 1357 arch/ia64/kernel/ptrace.c struct pt_regs *pt; pt 1361 arch/ia64/kernel/ptrace.c pt = task_pt_regs(target); pt 1367 arch/ia64/kernel/ptrace.c pt->ar_rsc = *data | (3 << 2); pt 1369 arch/ia64/kernel/ptrace.c *data = pt->ar_rsc; pt 1396 arch/ia64/kernel/ptrace.c urbs_end = ia64_get_user_rbs_end(target, pt, &cfm); pt 1399 arch/ia64/kernel/ptrace.c if (in_syscall(pt)) pt 1401 arch/ia64/kernel/ptrace.c pt, pt 1407 arch/ia64/kernel/ptrace.c pt->loadrs = 0; pt 1408 arch/ia64/kernel/ptrace.c pt->ar_bspstore = *data; pt 1414 arch/ia64/kernel/ptrace.c ptr = &pt->ar_bspstore; pt 1417 arch/ia64/kernel/ptrace.c ptr = &pt->ar_rnat; pt 1420 arch/ia64/kernel/ptrace.c ptr = &pt->ar_ccv; pt 1423 arch/ia64/kernel/ptrace.c ptr = &pt->ar_unat; pt 1426 arch/ia64/kernel/ptrace.c ptr = &pt->ar_fpsr; pt 1429 arch/ia64/kernel/ptrace.c ptr = &pt->ar_pfs; pt 1438 arch/ia64/kernel/ptrace.c ptr = &pt->ar_csd; pt 1441 arch/ia64/kernel/ptrace.c ptr = &pt->ar_ssd; pt 1446 arch/ia64/kernel/ptrace.c ptr = &pt->cr_iip; pt 1449 arch/ia64/kernel/ptrace.c urbs_end = ia64_get_user_rbs_end(target, pt, &cfm); pt 1452 arch/ia64/kernel/ptrace.c if (in_syscall(pt)) pt 1454 arch/ia64/kernel/ptrace.c pt, pt 1456 arch/ia64/kernel/ptrace.c pt->cr_ifs = ((pt->cr_ifs & ~PFM_MASK) pt 1468 arch/ia64/kernel/ptrace.c pt->cr_ipsr = ((tmp & IPSR_MASK) pt 1469 arch/ia64/kernel/ptrace.c | (pt->cr_ipsr & ~IPSR_MASK)); pt 1471 arch/ia64/kernel/ptrace.c *data = (pt->cr_ipsr & IPSR_MASK); pt 1475 arch/ia64/kernel/ptrace.c return access_nat_bits(target, pt, info, pt 1478 arch/ia64/kernel/ptrace.c ptr = &pt->pr; pt 1504 arch/ia64/kernel/ptrace.c struct pt_regs *pt; pt 1555 arch/ia64/kernel/ptrace.c pt = task_pt_regs(dst->target); pt 1557 arch/ia64/kernel/ptrace.c &dst->u.get.kbuf, &dst->u.get.ubuf, &pt->r16, pt 1604 arch/ia64/kernel/ptrace.c struct pt_regs *pt; pt 1643 arch/ia64/kernel/ptrace.c pt = task_pt_regs(dst->target); pt 1645 arch/ia64/kernel/ptrace.c &dst->u.set.kbuf, &dst->u.set.ubuf, &pt->r16, pt 2149 arch/ia64/kernel/ptrace.c struct pt_regs *pt = args->regs; pt 2156 arch/ia64/kernel/ptrace.c cfm = pt->cr_ifs; pt 2158 arch/ia64/kernel/ptrace.c ndirty = ia64_rse_num_regs(krbs, krbs + (pt->loadrs >> 19)); pt 2161 arch/ia64/kernel/ptrace.c if (in_syscall(pt)) pt 5 arch/ia64/kernel/sigframe.h struct pt_regs pt; pt 59 arch/ia64/kernel/signal.c err |= __get_user(scr->pt.ar_unat, &sc->sc_ar_unat); pt 60 arch/ia64/kernel/signal.c err |= __get_user(scr->pt.ar_fpsr, &sc->sc_ar_fpsr); pt 61 arch/ia64/kernel/signal.c err |= __get_user(scr->pt.ar_pfs, &sc->sc_ar_pfs); pt 62 arch/ia64/kernel/signal.c err |= __get_user(scr->pt.pr, &sc->sc_pr); /* predicates */ pt 63 arch/ia64/kernel/signal.c err |= __get_user(scr->pt.b0, &sc->sc_br[0]); /* b0 (rp) */ pt 64 arch/ia64/kernel/signal.c err |= __get_user(scr->pt.b6, &sc->sc_br[6]); /* b6 */ pt 65 arch/ia64/kernel/signal.c err |= __copy_from_user(&scr->pt.r1, &sc->sc_gr[1], 8); /* r1 */ pt 66 arch/ia64/kernel/signal.c err |= __copy_from_user(&scr->pt.r8, &sc->sc_gr[8], 4*8); /* r8-r11 */ pt 67 arch/ia64/kernel/signal.c err |= __copy_from_user(&scr->pt.r12, &sc->sc_gr[12], 2*8); /* r12-r13 */ pt 68 arch/ia64/kernel/signal.c err |= __copy_from_user(&scr->pt.r15, &sc->sc_gr[15], 8); /* r15 */ pt 70 arch/ia64/kernel/signal.c scr->pt.cr_ifs = cfm | (1UL << 63); pt 71 arch/ia64/kernel/signal.c scr->pt.ar_rsc = rsc | (3 << 2); /* force PL3 */ pt 74 arch/ia64/kernel/signal.c scr->pt.cr_iip = ip & ~0x3UL; pt 75 arch/ia64/kernel/signal.c ia64_psr(&scr->pt)->ri = ip & 0x3; pt 76 arch/ia64/kernel/signal.c scr->pt.cr_ipsr = (scr->pt.cr_ipsr & ~IA64_PSR_UM) | (um & IA64_PSR_UM); pt 78 arch/ia64/kernel/signal.c scr->scratch_unat = ia64_put_scratch_nat_bits(&scr->pt, nat); pt 82 arch/ia64/kernel/signal.c err |= __get_user(scr->pt.ar_ccv, &sc->sc_ar_ccv); /* ar.ccv */ pt 83 arch/ia64/kernel/signal.c err |= __get_user(scr->pt.b7, &sc->sc_br[7]); /* b7 */ pt 84 arch/ia64/kernel/signal.c err |= __get_user(scr->pt.r14, &sc->sc_gr[14]); /* r14 */ pt 85 arch/ia64/kernel/signal.c err |= __copy_from_user(&scr->pt.ar_csd, &sc->sc_ar25, 2*8); /* ar.csd & ar.ssd */ pt 86 arch/ia64/kernel/signal.c err |= __copy_from_user(&scr->pt.r2, &sc->sc_gr[2], 2*8); /* r2-r3 */ pt 87 arch/ia64/kernel/signal.c err |= __copy_from_user(&scr->pt.r16, &sc->sc_gr[16], 16*8); /* r16-r31 */ pt 91 arch/ia64/kernel/signal.c struct ia64_psr *psr = ia64_psr(&scr->pt); pt 116 arch/ia64/kernel/signal.c sc = &((struct sigframe __user *) (scr->pt.r12 + 16))->sc; pt 148 arch/ia64/kernel/signal.c current->comm, current->pid, scr->pt.r12, scr->pt.cr_iip); pt 171 arch/ia64/kernel/signal.c ifs = scr->pt.cr_ifs; pt 185 arch/ia64/kernel/signal.c nat = ia64_get_scratch_nat_bits(&scr->pt, scr->scratch_unat); pt 191 arch/ia64/kernel/signal.c err |= __put_user(scr->pt.cr_ipsr & IA64_PSR_UM, &sc->sc_um); pt 192 arch/ia64/kernel/signal.c err |= __put_user(scr->pt.ar_rsc, &sc->sc_ar_rsc); pt 193 arch/ia64/kernel/signal.c err |= __put_user(scr->pt.ar_unat, &sc->sc_ar_unat); /* ar.unat */ pt 194 arch/ia64/kernel/signal.c err |= __put_user(scr->pt.ar_fpsr, &sc->sc_ar_fpsr); /* ar.fpsr */ pt 195 arch/ia64/kernel/signal.c err |= __put_user(scr->pt.ar_pfs, &sc->sc_ar_pfs); pt 196 arch/ia64/kernel/signal.c err |= __put_user(scr->pt.pr, &sc->sc_pr); /* predicates */ pt 197 arch/ia64/kernel/signal.c err |= __put_user(scr->pt.b0, &sc->sc_br[0]); /* b0 (rp) */ pt 198 arch/ia64/kernel/signal.c err |= __put_user(scr->pt.b6, &sc->sc_br[6]); /* b6 */ pt 199 arch/ia64/kernel/signal.c err |= __copy_to_user(&sc->sc_gr[1], &scr->pt.r1, 8); /* r1 */ pt 200 arch/ia64/kernel/signal.c err |= __copy_to_user(&sc->sc_gr[8], &scr->pt.r8, 4*8); /* r8-r11 */ pt 201 arch/ia64/kernel/signal.c err |= __copy_to_user(&sc->sc_gr[12], &scr->pt.r12, 2*8); /* r12-r13 */ pt 202 arch/ia64/kernel/signal.c err |= __copy_to_user(&sc->sc_gr[15], &scr->pt.r15, 8); /* r15 */ pt 203 arch/ia64/kernel/signal.c err |= __put_user(scr->pt.cr_iip + ia64_psr(&scr->pt)->ri, &sc->sc_ip); pt 207 arch/ia64/kernel/signal.c err |= __put_user(scr->pt.ar_ccv, &sc->sc_ar_ccv); /* ar.ccv */ pt 208 arch/ia64/kernel/signal.c err |= __put_user(scr->pt.b7, &sc->sc_br[7]); /* b7 */ pt 209 arch/ia64/kernel/signal.c err |= __put_user(scr->pt.r14, &sc->sc_gr[14]); /* r14 */ pt 210 arch/ia64/kernel/signal.c err |= __copy_to_user(&sc->sc_ar25, &scr->pt.ar_csd, 2*8); /* ar.csd & ar.ssd */ pt 211 arch/ia64/kernel/signal.c err |= __copy_to_user(&sc->sc_gr[2], &scr->pt.r2, 2*8); /* r2-r3 */ pt 212 arch/ia64/kernel/signal.c err |= __copy_to_user(&sc->sc_gr[16], &scr->pt.r16, 16*8); /* r16-r31 */ pt 234 arch/ia64/kernel/signal.c new_sp = scr->pt.r12; pt 247 arch/ia64/kernel/signal.c if (!rbs_on_sig_stack(scr->pt.ar_bspstore)) pt 281 arch/ia64/kernel/signal.c err |= __save_altstack(&frame->sc.sc_stack, scr->pt.r12); pt 289 arch/ia64/kernel/signal.c scr->pt.r12 = (unsigned long) frame - 16; /* new stack pointer */ pt 290 arch/ia64/kernel/signal.c scr->pt.ar_fpsr = FPSR_DEFAULT; /* reset fpsr for signal handler */ pt 291 arch/ia64/kernel/signal.c scr->pt.cr_iip = tramp_addr; pt 292 arch/ia64/kernel/signal.c ia64_psr(&scr->pt)->ri = 0; /* start executing in first slot */ pt 293 arch/ia64/kernel/signal.c ia64_psr(&scr->pt)->be = 0; /* force little-endian byte-order */ pt 302 arch/ia64/kernel/signal.c scr->pt.cr_ifs = (1UL << 63); pt 312 arch/ia64/kernel/signal.c current->comm, current->pid, ksig->sig, scr->pt.r12, frame->sc.sc_ip, frame->handler); pt 336 arch/ia64/kernel/signal.c long errno = scr->pt.r8; pt 352 arch/ia64/kernel/signal.c if ((long) scr->pt.r10 != -1) pt 368 arch/ia64/kernel/signal.c scr->pt.r8 = EINTR; pt 373 arch/ia64/kernel/signal.c scr->pt.r8 = EINTR; pt 379 arch/ia64/kernel/signal.c ia64_decrement_ip(&scr->pt); pt 403 arch/ia64/kernel/signal.c ia64_decrement_ip(&scr->pt); pt 405 arch/ia64/kernel/signal.c scr->pt.r15 = __NR_restart_syscall; pt 269 arch/ia64/kernel/unwind.c if (!info->pt) { pt 273 arch/ia64/kernel/unwind.c info->pt = (unsigned long) ((struct pt_regs *) info->psp - 1); pt 275 arch/ia64/kernel/unwind.c info->pt = info->sp - 16; pt 277 arch/ia64/kernel/unwind.c UNW_DPRINT(3, "unwind.%s: sp 0x%lx pt 0x%lx\n", __func__, info->sp, info->pt); pt 278 arch/ia64/kernel/unwind.c return (struct pt_regs *) info->pt; pt 288 arch/ia64/kernel/unwind.c struct pt_regs *pt; pt 361 arch/ia64/kernel/unwind.c pt = get_scratch_regs(info); pt 362 arch/ia64/kernel/unwind.c addr = (unsigned long *) ((unsigned long)pt + pt_regs_off(regnum)); pt 413 arch/ia64/kernel/unwind.c struct pt_regs *pt; pt 417 arch/ia64/kernel/unwind.c case 0: pt = get_scratch_regs(info); addr = &pt->b0; break; pt 418 arch/ia64/kernel/unwind.c case 6: pt = get_scratch_regs(info); addr = &pt->b6; break; pt 419 arch/ia64/kernel/unwind.c case 7: pt = get_scratch_regs(info); addr = &pt->b7; break; pt 449 arch/ia64/kernel/unwind.c struct pt_regs *pt; pt 463 arch/ia64/kernel/unwind.c pt = get_scratch_regs(info); pt 464 arch/ia64/kernel/unwind.c addr = &pt->f6 + (regnum - 6); pt 498 arch/ia64/kernel/unwind.c struct pt_regs *pt; pt 554 arch/ia64/kernel/unwind.c pt = get_scratch_regs(info); pt 555 arch/ia64/kernel/unwind.c addr = &pt->ar_rsc; pt 559 arch/ia64/kernel/unwind.c pt = get_scratch_regs(info); pt 560 arch/ia64/kernel/unwind.c addr = &pt->ar_ccv; pt 564 arch/ia64/kernel/unwind.c pt = get_scratch_regs(info); pt 565 arch/ia64/kernel/unwind.c addr = &pt->ar_csd; pt 569 arch/ia64/kernel/unwind.c pt = get_scratch_regs(info); pt 570 arch/ia64/kernel/unwind.c addr = &pt->ar_ssd; pt 1759 arch/ia64/kernel/unwind.c if (state->pt) { pt 1925 arch/ia64/kernel/unwind.c info->pt = info->sp + 16; pt 1929 arch/ia64/kernel/unwind.c (unsigned long *) (info->pt + offsetof(struct pt_regs, ar_pfs)); pt 1930 arch/ia64/kernel/unwind.c UNW_DPRINT(3, "unwind.%s: interrupt_frame pt 0x%lx\n", __func__, info->pt); pt 59 arch/mips/kernel/spinlock_test.c struct spin_multi_per_thread *pt = data; pt 60 arch/mips/kernel/spinlock_test.c struct spin_multi_state *s = pt->state; pt 70 arch/mips/kernel/spinlock_test.c pt->start = ktime_get(); pt 43 arch/openrisc/include/asm/elf.h extern void dump_elf_thread(elf_greg_t *dest, struct pt_regs *pt); pt 277 arch/parisc/include/asm/elf.h #define ELF_CORE_COPY_REGS(dst, pt) \ pt 280 arch/parisc/include/asm/elf.h for (i = 0; i < 32; i++) dst[i] = pt->gr[i]; \ pt 281 arch/parisc/include/asm/elf.h for (i = 0; i < 8; i++) dst[32 + i] = pt->sr[i]; \ pt 283 arch/parisc/include/asm/elf.h dst[40] = pt->iaoq[0]; dst[41] = pt->iaoq[1]; \ pt 284 arch/parisc/include/asm/elf.h dst[42] = pt->iasq[0]; dst[43] = pt->iasq[1]; \ pt 285 arch/parisc/include/asm/elf.h dst[44] = pt->sar; dst[45] = pt->iir; \ pt 286 arch/parisc/include/asm/elf.h dst[46] = pt->isr; dst[47] = pt->ior; \ pt 66 arch/sparc/include/asm/backoff.h brnz,pt tmp, 88b; \ pt 72 arch/sparc/include/asm/backoff.h ba,pt %xcc, label; \ pt 63 arch/sparc/include/asm/head_64.h bne,pt %xcc, 99f; \ pt 67 arch/sparc/include/asm/head_64.h bgeu,pt %xcc, label; \ pt 74 arch/sparc/include/asm/head_64.h bne,pt %xcc, 99f; \ pt 78 arch/sparc/include/asm/head_64.h bgeu,pt %xcc, label; \ pt 174 arch/sparc/include/asm/tsb.h bne,pt %xcc, 697f; \ pt 189 arch/sparc/include/asm/tsb.h ba,pt %xcc, 699f; \ pt 219 arch/sparc/include/asm/tsb.h be,pt %xcc, 700f; \ pt 225 arch/sparc/include/asm/tsb.h brlz,pt REG1, PTE_LABEL; \ pt 248 arch/sparc/include/asm/tsb.h be,pt %xcc, 700f; \ pt 253 arch/sparc/include/asm/tsb.h brlz,pt REG1, PTE_LABEL; \ pt 311 arch/sparc/include/asm/tsb.h bgu,pt %xcc, 98f; \ pt 313 arch/sparc/include/asm/tsb.h bgeu,pt %xcc, 98f; \ pt 316 arch/sparc/include/asm/tsb.h ba,pt %xcc, 99f; \ pt 318 arch/sparc/include/asm/tsb.h 98: ba,pt %xcc, 97b; \ pt 354 arch/sparc/include/asm/tsb.h be,a,pt %xcc, OK_LABEL; \ pt 376 arch/sparc/include/asm/tsb.h be,a,pt %xcc, OK_LABEL; \ pt 27 arch/sparc/include/asm/ttable.h ba,pt %xcc, etrap; \ pt 31 arch/sparc/include/asm/ttable.h ba,pt %xcc, rtrap; \ pt 37 arch/sparc/include/asm/ttable.h ba,pt %xcc, etrap; \ pt 41 arch/sparc/include/asm/ttable.h ba,pt %xcc, rtrap; \ pt 46 arch/sparc/include/asm/ttable.h ba,pt %xcc, do_fptrap; \ pt 50 arch/sparc/include/asm/ttable.h ba,pt %xcc, rtrap; \ pt 55 arch/sparc/include/asm/ttable.h ba,pt %xcc, routine; \ pt 60 arch/sparc/include/asm/ttable.h ba,pt %xcc, routine; \ pt 66 arch/sparc/include/asm/ttable.h ba,pt %xcc, etraptl1; \ pt 70 arch/sparc/include/asm/ttable.h ba,pt %xcc, rtrap; \ pt 76 arch/sparc/include/asm/ttable.h ba,pt %xcc, etrap; \ pt 81 arch/sparc/include/asm/ttable.h ba,pt %xcc, rtrap; \ pt 86 arch/sparc/include/asm/ttable.h ba,pt %xcc, etraptl1; \ pt 91 arch/sparc/include/asm/ttable.h ba,pt %xcc, rtrap; \ pt 98 arch/sparc/include/asm/ttable.h ba,pt %xcc, etrap_syscall; \ pt 101 arch/sparc/include/asm/ttable.h ba,pt %xcc, routine; \ pt 106 arch/sparc/include/asm/ttable.h ba,pt %xcc, utrap_trap; \ pt 130 arch/sparc/include/asm/ttable.h ba,pt %xcc, etrap_irq; \ pt 141 arch/sparc/include/asm/ttable.h ba,a,pt %xcc, rtrap_irq; \ pt 149 arch/sparc/include/asm/ttable.h ba,pt %xcc, etrap_irq; \ pt 154 arch/sparc/include/asm/ttable.h ba,a,pt %xcc, rtrap_irq; pt 161 arch/sparc/include/asm/ttable.h ba,pt %xcc, etrap_irq; \ pt 166 arch/sparc/include/asm/ttable.h ba,a,pt %xcc, rtrap_nmi; pt 175 arch/sparc/include/asm/ttable.h ba,pt %xcc, etrap; \ pt 181 arch/sparc/include/asm/ttable.h ba,pt %xcc, rtrap; \ pt 207 arch/sparc/include/asm/ttable.h ba,pt %xcc, sun4v_itsb_miss; \ pt 217 arch/sparc/include/asm/ttable.h ba,pt %xcc, sun4v_dtsb_miss; \ pt 226 arch/sparc/include/asm/ttable.h ba,pt %xcc, etrap; \ pt 228 arch/sparc/include/asm/ttable.h ba,pt %xcc, sun4v_mcd_detect_precise; \ pt 291 arch/sparc/include/asm/ttable.h ba,pt %xcc, etrap_save; \ pt 325 arch/sparc/include/asm/ttable.h b,a,pt %xcc, spill_fixup_dax; \ pt 326 arch/sparc/include/asm/ttable.h b,a,pt %xcc, spill_fixup_mna; \ pt 327 arch/sparc/include/asm/ttable.h b,a,pt %xcc, spill_fixup; pt 349 arch/sparc/include/asm/ttable.h ba,pt %xcc, etrap_save; \ pt 353 arch/sparc/include/asm/ttable.h ba,a,pt %xcc, etrap_spill_fixup_64bit; \ pt 354 arch/sparc/include/asm/ttable.h ba,a,pt %xcc, etrap_spill_fixup_64bit; \ pt 355 arch/sparc/include/asm/ttable.h ba,a,pt %xcc, etrap_spill_fixup_64bit; pt 386 arch/sparc/include/asm/ttable.h ba,pt %xcc, etrap_save; \ pt 421 arch/sparc/include/asm/ttable.h b,a,pt %xcc, spill_fixup_dax; \ pt 422 arch/sparc/include/asm/ttable.h b,a,pt %xcc, spill_fixup_mna; \ pt 423 arch/sparc/include/asm/ttable.h b,a,pt %xcc, spill_fixup; pt 448 arch/sparc/include/asm/ttable.h ba,pt %xcc, etrap_save; \ pt 452 arch/sparc/include/asm/ttable.h ba,a,pt %xcc, etrap_spill_fixup_32bit; \ pt 453 arch/sparc/include/asm/ttable.h ba,a,pt %xcc, etrap_spill_fixup_32bit; \ pt 454 arch/sparc/include/asm/ttable.h ba,a,pt %xcc, etrap_spill_fixup_32bit; pt 485 arch/sparc/include/asm/ttable.h ba,pt %xcc, etrap_save; \ pt 550 arch/sparc/include/asm/ttable.h ba,pt %xcc, kern_rtt_restore; \ pt 583 arch/sparc/include/asm/ttable.h b,a,pt %xcc, fill_fixup_dax; \ pt 584 arch/sparc/include/asm/ttable.h b,a,pt %xcc, fill_fixup_mna; \ pt 585 arch/sparc/include/asm/ttable.h b,a,pt %xcc, fill_fixup; pt 605 arch/sparc/include/asm/ttable.h ba,pt %xcc, user_rtt_pre_restore; \ pt 609 arch/sparc/include/asm/ttable.h ba,a,pt %xcc, user_rtt_fill_fixup_dax; \ pt 610 arch/sparc/include/asm/ttable.h ba,a,pt %xcc, user_rtt_fill_fixup_mna; \ pt 611 arch/sparc/include/asm/ttable.h ba,a,pt %xcc, user_rtt_fill_fixup; pt 643 arch/sparc/include/asm/ttable.h b,a,pt %xcc, fill_fixup_dax; \ pt 644 arch/sparc/include/asm/ttable.h b,a,pt %xcc, fill_fixup_mna; \ pt 645 arch/sparc/include/asm/ttable.h b,a,pt %xcc, fill_fixup; pt 668 arch/sparc/include/asm/ttable.h ba,pt %xcc, user_rtt_pre_restore; \ pt 672 arch/sparc/include/asm/ttable.h ba,a,pt %xcc, user_rtt_fill_fixup_dax; \ pt 673 arch/sparc/include/asm/ttable.h ba,a,pt %xcc, user_rtt_fill_fixup_mna; \ pt 674 arch/sparc/include/asm/ttable.h ba,a,pt %xcc, user_rtt_fill_fixup; pt 18 arch/sparc/include/asm/visasm.h be,pt %icc, 297f; \ pt 40 arch/sparc/include/asm/visasm.h be,pt %icc, 297f; \ pt 42 arch/sparc/include/asm/visasm.h ba,a,pt %xcc, fail_label; \ pt 25 arch/sparc/vdso/vdso2c.h ELF(Phdr) *pt = (ELF(Phdr) *)(raw_addr + GET_BE(&hdr->e_phoff)); pt 29 arch/sparc/vdso/vdso2c.h if (GET_BE(&pt[i].p_type) == PT_LOAD) { pt 33 arch/sparc/vdso/vdso2c.h if (GET_BE(&pt[i].p_offset) != 0 || pt 34 arch/sparc/vdso/vdso2c.h GET_BE(&pt[i].p_vaddr) != 0) pt 37 arch/sparc/vdso/vdso2c.h if (GET_BE(&pt[i].p_memsz) != GET_BE(&pt[i].p_filesz)) pt 40 arch/sparc/vdso/vdso2c.h load_size = GET_BE(&pt[i].p_memsz); pt 42 arch/sparc/vdso/vdso2c.h } else if (GET_BE(&pt[i].p_type) == PT_DYNAMIC) { pt 43 arch/sparc/vdso/vdso2c.h dyn = raw_addr + GET_BE(&pt[i].p_offset); pt 44 arch/sparc/vdso/vdso2c.h dyn_end = raw_addr + GET_BE(&pt[i].p_offset) + pt 45 arch/sparc/vdso/vdso2c.h GET_BE(&pt[i].p_memsz); pt 120 arch/x86/boot/video-mode.c u8 pt, ov; pt 131 arch/x86/boot/video-mode.c pt = in_idx(crtc, 0x11); pt 132 arch/x86/boot/video-mode.c pt &= ~0x80; /* Unlock CR0-7 */ pt 133 arch/x86/boot/video-mode.c out_idx(pt, crtc, 0x11); pt 24 arch/x86/entry/vdso/vdso2c.h ELF(Phdr) *pt = (ELF(Phdr) *)(raw_addr + GET_LE(&hdr->e_phoff)); pt 31 arch/x86/entry/vdso/vdso2c.h if (GET_LE(&pt[i].p_type) == PT_LOAD) { pt 35 arch/x86/entry/vdso/vdso2c.h if (GET_LE(&pt[i].p_offset) != 0 || pt 36 arch/x86/entry/vdso/vdso2c.h GET_LE(&pt[i].p_vaddr) != 0) pt 39 arch/x86/entry/vdso/vdso2c.h if (GET_LE(&pt[i].p_memsz) != GET_LE(&pt[i].p_filesz)) pt 42 arch/x86/entry/vdso/vdso2c.h load_size = GET_LE(&pt[i].p_memsz); pt 44 arch/x86/entry/vdso/vdso2c.h } else if (GET_LE(&pt[i].p_type) == PT_DYNAMIC) { pt 45 arch/x86/entry/vdso/vdso2c.h dyn = raw_addr + GET_LE(&pt[i].p_offset); pt 46 arch/x86/entry/vdso/vdso2c.h dyn_end = raw_addr + GET_LE(&pt[i].p_offset) + pt 47 arch/x86/entry/vdso/vdso2c.h GET_LE(&pt[i].p_memsz); pt 28 arch/x86/events/intel/pt.c static DEFINE_PER_CPU(struct pt, pt_ctx); pt 102 arch/x86/events/intel/pt.c PMU_FORMAT_ATTR(pt, "config:0" ); pt 431 arch/x86/events/intel/pt.c struct pt *pt = this_cpu_ptr(&pt_ctx); pt 453 arch/x86/events/intel/pt.c if (pt->filters.filter[range].msr_a != filter->msr_a) { pt 455 arch/x86/events/intel/pt.c pt->filters.filter[range].msr_a = filter->msr_a; pt 458 arch/x86/events/intel/pt.c if (pt->filters.filter[range].msr_b != filter->msr_b) { pt 460 arch/x86/events/intel/pt.c pt->filters.filter[range].msr_b = filter->msr_b; pt 471 arch/x86/events/intel/pt.c struct pt *pt = this_cpu_ptr(&pt_ctx); pt 504 arch/x86/events/intel/pt.c if (READ_ONCE(pt->vmx_on)) pt 505 arch/x86/events/intel/pt.c perf_aux_output_flag(&pt->handle, PERF_AUX_FLAG_PARTIAL); pt 512 arch/x86/events/intel/pt.c struct pt *pt = this_cpu_ptr(&pt_ctx); pt 520 arch/x86/events/intel/pt.c if (!READ_ONCE(pt->vmx_on)) pt 800 arch/x86/events/intel/pt.c static void pt_update_head(struct pt *pt) pt 802 arch/x86/events/intel/pt.c struct pt_buffer *buf = perf_get_aux(&pt->handle); pt 846 arch/x86/events/intel/pt.c static void pt_handle_status(struct pt *pt) pt 848 arch/x86/events/intel/pt.c struct pt_buffer *buf = perf_get_aux(&pt->handle); pt 870 arch/x86/events/intel/pt.c perf_aux_output_flag(&pt->handle, pt 1336 arch/x86/events/intel/pt.c struct pt *pt = this_cpu_ptr(&pt_ctx); pt 1338 arch/x86/events/intel/pt.c struct perf_event *event = pt->handle.event; pt 1345 arch/x86/events/intel/pt.c if (!READ_ONCE(pt->handle_nmi)) pt 1353 arch/x86/events/intel/pt.c buf = perf_get_aux(&pt->handle); pt 1359 arch/x86/events/intel/pt.c pt_handle_status(pt); pt 1361 arch/x86/events/intel/pt.c pt_update_head(pt); pt 1363 arch/x86/events/intel/pt.c perf_aux_output_end(&pt->handle, local_xchg(&buf->data_size, 0)); pt 1368 arch/x86/events/intel/pt.c buf = perf_aux_output_begin(&pt->handle, event); pt 1374 arch/x86/events/intel/pt.c pt_buffer_reset_offsets(buf, pt->handle.head); pt 1376 arch/x86/events/intel/pt.c ret = pt_buffer_reset_markers(buf, &pt->handle); pt 1378 arch/x86/events/intel/pt.c perf_aux_output_end(&pt->handle, 0); pt 1390 arch/x86/events/intel/pt.c struct pt *pt = this_cpu_ptr(&pt_ctx); pt 1405 arch/x86/events/intel/pt.c WRITE_ONCE(pt->vmx_on, on); pt 1411 arch/x86/events/intel/pt.c event = pt->handle.event; pt 1413 arch/x86/events/intel/pt.c perf_aux_output_flag(&pt->handle, pt 1431 arch/x86/events/intel/pt.c struct pt *pt = this_cpu_ptr(&pt_ctx); pt 1434 arch/x86/events/intel/pt.c buf = perf_aux_output_begin(&pt->handle, event); pt 1438 arch/x86/events/intel/pt.c pt_buffer_reset_offsets(buf, pt->handle.head); pt 1440 arch/x86/events/intel/pt.c if (pt_buffer_reset_markers(buf, &pt->handle)) pt 1444 arch/x86/events/intel/pt.c WRITE_ONCE(pt->handle_nmi, 1); pt 1454 arch/x86/events/intel/pt.c perf_aux_output_end(&pt->handle, 0); pt 1461 arch/x86/events/intel/pt.c struct pt *pt = this_cpu_ptr(&pt_ctx); pt 1467 arch/x86/events/intel/pt.c WRITE_ONCE(pt->handle_nmi, 0); pt 1477 arch/x86/events/intel/pt.c struct pt_buffer *buf = perf_get_aux(&pt->handle); pt 1482 arch/x86/events/intel/pt.c if (WARN_ON_ONCE(pt->handle.event != event)) pt 1487 arch/x86/events/intel/pt.c pt_handle_status(pt); pt 1489 arch/x86/events/intel/pt.c pt_update_head(pt); pt 1492 arch/x86/events/intel/pt.c pt->handle.head = pt 1495 arch/x86/events/intel/pt.c perf_aux_output_end(&pt->handle, local_xchg(&buf->data_size, 0)); pt 1506 arch/x86/events/intel/pt.c struct pt *pt = this_cpu_ptr(&pt_ctx); pt 1510 arch/x86/events/intel/pt.c if (pt->handle.event) pt 1561 arch/x86/events/intel/pt.c struct pt *pt = this_cpu_ptr(&pt_ctx); pt 1563 arch/x86/events/intel/pt.c if (pt->handle.event) pt 1564 arch/x86/events/intel/pt.c pt_event_stop(pt->handle.event, PERF_EF_UPDATE); pt 21 arch/x86/include/asm/vm86.h struct pt_regs pt; pt 79 arch/x86/kernel/vm86_32.c #define AL(regs) (((unsigned char *)&((regs)->pt.ax))[0]) pt 80 arch/x86/kernel/vm86_32.c #define AH(regs) (((unsigned char *)&((regs)->pt.ax))[1]) pt 81 arch/x86/kernel/vm86_32.c #define IP(regs) (*(unsigned short *)&((regs)->pt.ip)) pt 82 arch/x86/kernel/vm86_32.c #define SP(regs) (*(unsigned short *)&((regs)->pt.sp)) pt 114 arch/x86/kernel/vm86_32.c set_flags(regs->pt.flags, VEFLAGS, X86_EFLAGS_VIF | vm86->veflags_mask); pt 125 arch/x86/kernel/vm86_32.c put_user_ex(regs->pt.bx, &user->regs.ebx); pt 126 arch/x86/kernel/vm86_32.c put_user_ex(regs->pt.cx, &user->regs.ecx); pt 127 arch/x86/kernel/vm86_32.c put_user_ex(regs->pt.dx, &user->regs.edx); pt 128 arch/x86/kernel/vm86_32.c put_user_ex(regs->pt.si, &user->regs.esi); pt 129 arch/x86/kernel/vm86_32.c put_user_ex(regs->pt.di, &user->regs.edi); pt 130 arch/x86/kernel/vm86_32.c put_user_ex(regs->pt.bp, &user->regs.ebp); pt 131 arch/x86/kernel/vm86_32.c put_user_ex(regs->pt.ax, &user->regs.eax); pt 132 arch/x86/kernel/vm86_32.c put_user_ex(regs->pt.ip, &user->regs.eip); pt 133 arch/x86/kernel/vm86_32.c put_user_ex(regs->pt.cs, &user->regs.cs); pt 134 arch/x86/kernel/vm86_32.c put_user_ex(regs->pt.flags, &user->regs.eflags); pt 135 arch/x86/kernel/vm86_32.c put_user_ex(regs->pt.sp, &user->regs.esp); pt 136 arch/x86/kernel/vm86_32.c put_user_ex(regs->pt.ss, &user->regs.ss); pt 157 arch/x86/kernel/vm86_32.c memcpy(®s->pt, &vm86->regs32, sizeof(struct pt_regs)); pt 161 arch/x86/kernel/vm86_32.c regs->pt.ax = retval; pt 289 arch/x86/kernel/vm86_32.c get_user_ex(vm86regs.pt.bx, &user_vm86->regs.ebx); pt 290 arch/x86/kernel/vm86_32.c get_user_ex(vm86regs.pt.cx, &user_vm86->regs.ecx); pt 291 arch/x86/kernel/vm86_32.c get_user_ex(vm86regs.pt.dx, &user_vm86->regs.edx); pt 292 arch/x86/kernel/vm86_32.c get_user_ex(vm86regs.pt.si, &user_vm86->regs.esi); pt 293 arch/x86/kernel/vm86_32.c get_user_ex(vm86regs.pt.di, &user_vm86->regs.edi); pt 294 arch/x86/kernel/vm86_32.c get_user_ex(vm86regs.pt.bp, &user_vm86->regs.ebp); pt 295 arch/x86/kernel/vm86_32.c get_user_ex(vm86regs.pt.ax, &user_vm86->regs.eax); pt 296 arch/x86/kernel/vm86_32.c get_user_ex(vm86regs.pt.ip, &user_vm86->regs.eip); pt 298 arch/x86/kernel/vm86_32.c vm86regs.pt.cs = seg; pt 299 arch/x86/kernel/vm86_32.c get_user_ex(vm86regs.pt.flags, &user_vm86->regs.eflags); pt 300 arch/x86/kernel/vm86_32.c get_user_ex(vm86regs.pt.sp, &user_vm86->regs.esp); pt 302 arch/x86/kernel/vm86_32.c vm86regs.pt.ss = seg; pt 340 arch/x86/kernel/vm86_32.c VEFLAGS = vm86regs.pt.flags; pt 341 arch/x86/kernel/vm86_32.c vm86regs.pt.flags &= SAFE_MASK; pt 342 arch/x86/kernel/vm86_32.c vm86regs.pt.flags |= regs->flags & ~SAFE_MASK; pt 343 arch/x86/kernel/vm86_32.c vm86regs.pt.flags |= X86_VM_MASK; pt 345 arch/x86/kernel/vm86_32.c vm86regs.pt.orig_ax = regs->orig_ax; pt 400 arch/x86/kernel/vm86_32.c regs->pt.flags &= ~X86_EFLAGS_TF; pt 405 arch/x86/kernel/vm86_32.c regs->pt.flags &= ~X86_EFLAGS_AC; pt 423 arch/x86/kernel/vm86_32.c set_flags(regs->pt.flags, flags, SAFE_MASK); pt 433 arch/x86/kernel/vm86_32.c set_flags(regs->pt.flags, flags, SAFE_MASK); pt 442 arch/x86/kernel/vm86_32.c unsigned long flags = regs->pt.flags & RETURN_MASK; pt 544 arch/x86/kernel/vm86_32.c if (regs->pt.cs == BIOSSEG) pt 556 arch/x86/kernel/vm86_32.c pushw(ssp, sp, regs->pt.cs, cannot_handle); pt 558 arch/x86/kernel/vm86_32.c regs->pt.cs = segoffs >> 16; pt 579 arch/x86/kernel/vm86_32.c do_int(regs, trapno, (unsigned char __user *) (regs->pt.ss << 4), SP(regs)); pt 603 arch/x86/kernel/vm86_32.c orig_flags = *(unsigned short *)®s->pt.flags; pt 605 arch/x86/kernel/vm86_32.c csp = (unsigned char __user *) (regs->pt.cs << 4); pt 606 arch/x86/kernel/vm86_32.c ssp = (unsigned char __user *) (regs->pt.ss << 4); pt 695 arch/x86/kernel/vm86_32.c regs->pt.cs = newcs; pt 270 arch/x86/kvm/i8254.c struct kvm_pit *pt = pit_state_to_pit(ps); pt 275 arch/x86/kvm/i8254.c kthread_queue_work(pt->worker, &pt->expired); pt 383 arch/x86/mm/dump_pagetables.c void *pt) pt 385 arch/x86/mm/dump_pagetables.c if (__pa(pt) == __pa(kasan_early_shadow_pmd) || pt 387 arch/x86/mm/dump_pagetables.c __pa(pt) == __pa(kasan_early_shadow_p4d)) || pt 388 arch/x86/mm/dump_pagetables.c __pa(pt) == __pa(kasan_early_shadow_pud)) { pt 397 arch/x86/mm/dump_pagetables.c void *pt) pt 738 arch/x86/xen/mmu_pv.c void *pt = lowmem_page_address(page); pt 769 arch/x86/xen/mmu_pv.c MULTI_update_va_mapping(mcs.mc, (unsigned long)pt, pt 885 arch/x86/xen/mmu_pv.c void *pt = lowmem_page_address(page); pt 906 arch/x86/xen/mmu_pv.c MULTI_update_va_mapping(mcs.mc, (unsigned long)pt, pt 2061 arch/x86/xen/mmu_pv.c pte_t *pt; pt 2104 arch/x86/xen/mmu_pv.c pt = early_memremap(pt_phys, PAGE_SIZE); pt 2105 arch/x86/xen/mmu_pv.c clear_page(pt); pt 2109 arch/x86/xen/mmu_pv.c pt[idx_pte] = pfn_pte(p2m_pfn, pt 2114 arch/x86/xen/mmu_pv.c early_memunmap(pt, PAGE_SIZE); pt 71 block/partitions/msdos.c struct partition *pt = (struct partition *) (p + 0x1be); pt 82 block/partitions/msdos.c for (slot = 1; slot <= 4; slot++, pt++) { pt 83 block/partitions/msdos.c if (pt->sys_ind == LINUX_SWAP_PARTITION || pt 84 block/partitions/msdos.c pt->sys_ind == LINUX_RAID_PARTITION || pt 85 block/partitions/msdos.c pt->sys_ind == LINUX_DATA_PARTITION || pt 86 block/partitions/msdos.c pt->sys_ind == LINUX_LVM_PARTITION || pt 87 block/partitions/msdos.c is_extended_partition(pt)) pt 237 drivers/atm/nicstar.h #define ns_tbd_mkword_4(gfc, vpi, vci, pt, clp) \ pt 238 drivers/atm/nicstar.h (cpu_to_le32((gfc) << 28 | (vpi) << 20 | (vci) << 4 | (pt) << 1 | (clp))) pt 131 drivers/block/aoe/aoenet.c aoenet_rcv(struct sk_buff *skb, struct net_device *ifp, struct packet_type *pt, struct net_device *orig_dev) pt 232 drivers/block/paride/pt.c static struct pt_unit pt[PT_UNITS]; pt 617 drivers/block/paride/pt.c struct pt_unit *tape = &pt[unit]; pt 640 drivers/block/paride/pt.c tape = pt; pt 662 drivers/block/paride/pt.c struct pt_unit *tape = pt + unit; pt 981 drivers/block/paride/pt.c if (pt[unit].present) pt 982 drivers/block/paride/pt.c pi_release(pt[unit].pi); pt 993 drivers/block/paride/pt.c if (pt[unit].present) { pt 1011 drivers/block/paride/pt.c if (pt[unit].present) { pt 1018 drivers/block/paride/pt.c if (pt[unit].present) pt 1019 drivers/block/paride/pt.c pi_release(pt[unit].pi); pt 125 drivers/crypto/ccp/ccp-dev-v5.c } pt; pt 150 drivers/crypto/ccp/ccp-dev-v5.c #define CCP_PT_BYTESWAP(p) ((p)->pt.byteswap) pt 151 drivers/crypto/ccp/ccp-dev-v5.c #define CCP_PT_BITWISE(p) ((p)->pt.bitwise) pt 1949 drivers/crypto/ccp/ccp-ops.c struct ccp_passthru_engine *pt = &cmd->u.passthru; pt 1957 drivers/crypto/ccp/ccp-ops.c if (!pt->final && (pt->src_len & (CCP_PASSTHRU_BLOCKSIZE - 1))) pt 1960 drivers/crypto/ccp/ccp-ops.c if (!pt->src || !pt->dst) pt 1963 drivers/crypto/ccp/ccp-ops.c if (pt->bit_mod != CCP_PASSTHRU_BITWISE_NOOP) { pt 1964 drivers/crypto/ccp/ccp-ops.c if (pt->mask_len != CCP_PASSTHRU_MASKSIZE) pt 1966 drivers/crypto/ccp/ccp-ops.c if (!pt->mask) pt 1976 drivers/crypto/ccp/ccp-ops.c if (pt->bit_mod != CCP_PASSTHRU_BITWISE_NOOP) { pt 1987 drivers/crypto/ccp/ccp-ops.c ret = ccp_set_dm_area(&mask, 0, pt->mask, 0, pt->mask_len); pt 2002 drivers/crypto/ccp/ccp-ops.c if (sg_virt(pt->src) == sg_virt(pt->dst)) pt 2005 drivers/crypto/ccp/ccp-ops.c ret = ccp_init_data(&src, cmd_q, pt->src, pt->src_len, pt 2014 drivers/crypto/ccp/ccp-ops.c ret = ccp_init_data(&dst, cmd_q, pt->dst, pt->src_len, pt 2072 drivers/crypto/ccp/ccp-ops.c if (pt->bit_mod != CCP_PASSTHRU_BITWISE_NOOP) pt 2082 drivers/crypto/ccp/ccp-ops.c struct ccp_passthru_nomap_engine *pt = &cmd->u.passthru_nomap; pt 2087 drivers/crypto/ccp/ccp-ops.c if (!pt->final && (pt->src_len & (CCP_PASSTHRU_BLOCKSIZE - 1))) pt 2090 drivers/crypto/ccp/ccp-ops.c if (!pt->src_dma || !pt->dst_dma) pt 2093 drivers/crypto/ccp/ccp-ops.c if (pt->bit_mod != CCP_PASSTHRU_BITWISE_NOOP) { pt 2094 drivers/crypto/ccp/ccp-ops.c if (pt->mask_len != CCP_PASSTHRU_MASKSIZE) pt 2096 drivers/crypto/ccp/ccp-ops.c if (!pt->mask) pt 2106 drivers/crypto/ccp/ccp-ops.c if (pt->bit_mod != CCP_PASSTHRU_BITWISE_NOOP) { pt 2110 drivers/crypto/ccp/ccp-ops.c mask.length = pt->mask_len; pt 2111 drivers/crypto/ccp/ccp-ops.c mask.dma.address = pt->mask; pt 2112 drivers/crypto/ccp/ccp-ops.c mask.dma.length = pt->mask_len; pt 2127 drivers/crypto/ccp/ccp-ops.c op.src.u.dma.address = pt->src_dma; pt 2129 drivers/crypto/ccp/ccp-ops.c op.src.u.dma.length = pt->src_len; pt 2132 drivers/crypto/ccp/ccp-ops.c op.dst.u.dma.address = pt->dst_dma; pt 2134 drivers/crypto/ccp/ccp-ops.c op.dst.u.dma.length = pt->src_len; pt 133 drivers/dma-buf/sw_sync.c struct sync_pt *pt = dma_fence_to_sync_pt(fence); pt 138 drivers/dma-buf/sw_sync.c if (!list_empty(&pt->link)) { pt 139 drivers/dma-buf/sw_sync.c list_del(&pt->link); pt 140 drivers/dma-buf/sw_sync.c rb_erase(&pt->node, &parent->pt_tree); pt 194 drivers/dma-buf/sw_sync.c struct sync_pt *pt, *next; pt 202 drivers/dma-buf/sw_sync.c list_for_each_entry_safe(pt, next, &obj->pt_list, link) { pt 203 drivers/dma-buf/sw_sync.c if (!timeline_fence_signaled(&pt->base)) pt 206 drivers/dma-buf/sw_sync.c list_del_init(&pt->link); pt 207 drivers/dma-buf/sw_sync.c rb_erase(&pt->node, &obj->pt_tree); pt 217 drivers/dma-buf/sw_sync.c dma_fence_signal_locked(&pt->base); pt 236 drivers/dma-buf/sw_sync.c struct sync_pt *pt; pt 238 drivers/dma-buf/sw_sync.c pt = kzalloc(sizeof(*pt), GFP_KERNEL); pt 239 drivers/dma-buf/sw_sync.c if (!pt) pt 243 drivers/dma-buf/sw_sync.c dma_fence_init(&pt->base, &timeline_fence_ops, &obj->lock, pt 245 drivers/dma-buf/sw_sync.c INIT_LIST_HEAD(&pt->link); pt 248 drivers/dma-buf/sw_sync.c if (!dma_fence_is_signaled_locked(&pt->base)) { pt 257 drivers/dma-buf/sw_sync.c other = rb_entry(parent, typeof(*pt), node); pt 266 drivers/dma-buf/sw_sync.c kfree(pt); pt 267 drivers/dma-buf/sw_sync.c pt = other; pt 273 drivers/dma-buf/sw_sync.c rb_link_node(&pt->node, parent, p); pt 274 drivers/dma-buf/sw_sync.c rb_insert_color(&pt->node, &obj->pt_tree); pt 276 drivers/dma-buf/sw_sync.c parent = rb_next(&pt->node); pt 277 drivers/dma-buf/sw_sync.c list_add_tail(&pt->link, pt 278 drivers/dma-buf/sw_sync.c parent ? &rb_entry(parent, typeof(*pt), node)->link : &obj->pt_list); pt 283 drivers/dma-buf/sw_sync.c return pt; pt 312 drivers/dma-buf/sw_sync.c struct sync_pt *pt, *next; pt 316 drivers/dma-buf/sw_sync.c list_for_each_entry_safe(pt, next, &obj->pt_list, link) { pt 317 drivers/dma-buf/sw_sync.c dma_fence_set_error(&pt->base, -ENOENT); pt 318 drivers/dma-buf/sw_sync.c dma_fence_signal_locked(&pt->base); pt 332 drivers/dma-buf/sw_sync.c struct sync_pt *pt; pt 344 drivers/dma-buf/sw_sync.c pt = sync_pt_create(obj, data.value); pt 345 drivers/dma-buf/sw_sync.c if (!pt) { pt 350 drivers/dma-buf/sw_sync.c sync_file = sync_file_create(&pt->base); pt 351 drivers/dma-buf/sw_sync.c dma_fence_put(&pt->base); pt 115 drivers/dma-buf/sync_debug.c struct sync_pt *pt = container_of(pos, struct sync_pt, link); pt 116 drivers/dma-buf/sync_debug.c sync_print_fence(s, &pt->base, false); pt 1774 drivers/firewire/core-cdev.c static __poll_t fw_device_op_poll(struct file *file, poll_table * pt) pt 1779 drivers/firewire/core-cdev.c poll_wait(file, &client->wait, pt); pt 319 drivers/firewire/nosy.c nosy_poll(struct file *file, poll_table *pt) pt 324 drivers/firewire/nosy.c poll_wait(file, &client->buffer.wait, pt); pt 667 drivers/gpu/drm/amd/amdgpu/amdgpu_dpm.c ATOM_PowerTune_Table *pt; pt 680 drivers/gpu/drm/amd/amdgpu/amdgpu_dpm.c pt = &ppt->power_tune_table; pt 686 drivers/gpu/drm/amd/amdgpu/amdgpu_dpm.c pt = &ppt->power_tune_table; pt 688 drivers/gpu/drm/amd/amdgpu/amdgpu_dpm.c adev->pm.dpm.dyn_state.cac_tdp_table->tdp = le16_to_cpu(pt->usTDP); pt 690 drivers/gpu/drm/amd/amdgpu/amdgpu_dpm.c le16_to_cpu(pt->usConfigurableTDP); pt 691 drivers/gpu/drm/amd/amdgpu/amdgpu_dpm.c adev->pm.dpm.dyn_state.cac_tdp_table->tdc = le16_to_cpu(pt->usTDC); pt 693 drivers/gpu/drm/amd/amdgpu/amdgpu_dpm.c le16_to_cpu(pt->usBatteryPowerLimit); pt 695 drivers/gpu/drm/amd/amdgpu/amdgpu_dpm.c le16_to_cpu(pt->usSmallPowerLimit); pt 697 drivers/gpu/drm/amd/amdgpu/amdgpu_dpm.c le16_to_cpu(pt->usLowCACLeakage); pt 699 drivers/gpu/drm/amd/amdgpu/amdgpu_dpm.c le16_to_cpu(pt->usHighCACLeakage); pt 334 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c static struct amdgpu_vm_pt *amdgpu_vm_pt_parent(struct amdgpu_vm_pt *pt) pt 336 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_bo *parent = pt->base.bo->parent; pt 733 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_vm_pt *pt; pt 735 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c pt = container_of(ancestor->vm_bo, struct amdgpu_vm_pt, base); pt 737 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c if ((pt - vm->root.entries) >= ats_entries) { pt 857 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_bo *pt; pt 876 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c r = amdgpu_bo_create(adev, &bp, &pt); pt 883 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c pt->parent = amdgpu_bo_ref(cursor->parent->base.bo); pt 884 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_bo_base_init(&entry->base, vm, pt); pt 886 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c r = amdgpu_vm_clear_bo(adev, vm, pt); pt 893 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_bo_unref(&pt->shadow); pt 894 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_bo_unref(&pt); pt 1196 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c uint64_t pde, pt, flags; pt 1203 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_gmc_get_pde_for_bo(entry->base.bo, level, &pt, &flags); pt 1205 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c return vm->update_funcs->update(params, bo, pde, pt, 1, 0, flags); pt 1398 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c struct amdgpu_bo *pt; pt 1404 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c pt = cursor.entry->base.bo; pt 1454 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c amdgpu_vm_update_flags(params, pt, cursor.level, pt 2242 drivers/gpu/drm/drm_edid.c struct detailed_pixel_timing *pt) pt 2257 drivers/gpu/drm/drm_edid.c if (!(pt->misc & DRM_EDID_PT_INTERLACED)) pt 2290 drivers/gpu/drm/drm_edid.c struct detailed_pixel_timing *pt = &timing->data.pixel_data; pt 2291 drivers/gpu/drm/drm_edid.c unsigned hactive = (pt->hactive_hblank_hi & 0xf0) << 4 | pt->hactive_lo; pt 2292 drivers/gpu/drm/drm_edid.c unsigned vactive = (pt->vactive_vblank_hi & 0xf0) << 4 | pt->vactive_lo; pt 2293 drivers/gpu/drm/drm_edid.c unsigned hblank = (pt->hactive_hblank_hi & 0xf) << 8 | pt->hblank_lo; pt 2294 drivers/gpu/drm/drm_edid.c unsigned vblank = (pt->vactive_vblank_hi & 0xf) << 8 | pt->vblank_lo; pt 2295 drivers/gpu/drm/drm_edid.c unsigned hsync_offset = (pt->hsync_vsync_offset_pulse_width_hi & 0xc0) << 2 | pt->hsync_offset_lo; pt 2296 drivers/gpu/drm/drm_edid.c unsigned hsync_pulse_width = (pt->hsync_vsync_offset_pulse_width_hi & 0x30) << 4 | pt->hsync_pulse_width_lo; pt 2297 drivers/gpu/drm/drm_edid.c unsigned vsync_offset = (pt->hsync_vsync_offset_pulse_width_hi & 0xc) << 2 | pt->vsync_offset_pulse_width_lo >> 4; pt 2298 drivers/gpu/drm/drm_edid.c unsigned vsync_pulse_width = (pt->hsync_vsync_offset_pulse_width_hi & 0x3) << 4 | (pt->vsync_offset_pulse_width_lo & 0xf); pt 2304 drivers/gpu/drm/drm_edid.c if (pt->misc & DRM_EDID_PT_STEREO) { pt 2308 drivers/gpu/drm/drm_edid.c if (!(pt->misc & DRM_EDID_PT_SEPARATE_SYNC)) { pt 2352 drivers/gpu/drm/drm_edid.c drm_mode_do_interlace_quirk(mode, pt); pt 2355 drivers/gpu/drm/drm_edid.c pt->misc |= DRM_EDID_PT_HSYNC_POSITIVE | DRM_EDID_PT_VSYNC_POSITIVE; pt 2358 drivers/gpu/drm/drm_edid.c mode->flags |= (pt->misc & DRM_EDID_PT_HSYNC_POSITIVE) ? pt 2360 drivers/gpu/drm/drm_edid.c mode->flags |= (pt->misc & DRM_EDID_PT_VSYNC_POSITIVE) ? pt 2364 drivers/gpu/drm/drm_edid.c mode->width_mm = pt->width_mm_lo | (pt->width_height_mm_hi & 0xf0) << 4; pt 2365 drivers/gpu/drm/drm_edid.c mode->height_mm = pt->height_mm_lo | (pt->width_height_mm_hi & 0xf) << 8; pt 441 drivers/gpu/drm/drm_mode_config.c struct drm_property *property, *pt; pt 468 drivers/gpu/drm/drm_mode_config.c list_for_each_entry_safe(property, pt, &dev->mode_config.property_list, pt 1369 drivers/gpu/drm/drm_modes.c struct drm_display_mode *pmode, *pt; pt 1373 drivers/gpu/drm/drm_modes.c list_for_each_entry_safe(pmode, pt, &connector->probed_modes, head) { pt 444 drivers/gpu/drm/drm_property.c struct drm_property_enum *prop_enum, *pt; pt 446 drivers/gpu/drm/drm_property.c list_for_each_entry_safe(prop_enum, pt, &property->enum_list, head) { pt 240 drivers/gpu/drm/drm_vm.c struct drm_vma_entry *pt, *temp; pt 251 drivers/gpu/drm/drm_vm.c list_for_each_entry_safe(pt, temp, &dev->vmalist, head) { pt 252 drivers/gpu/drm/drm_vm.c if (pt->vma->vm_private_data == map) pt 254 drivers/gpu/drm/drm_vm.c if (pt->vma == vma) { pt 255 drivers/gpu/drm/drm_vm.c list_del(&pt->head); pt 256 drivers/gpu/drm/drm_vm.c kfree(pt); pt 426 drivers/gpu/drm/drm_vm.c struct drm_vma_entry *pt, *temp; pt 431 drivers/gpu/drm/drm_vm.c list_for_each_entry_safe(pt, temp, &dev->vmalist, head) { pt 432 drivers/gpu/drm/drm_vm.c if (pt->vma == vma) { pt 433 drivers/gpu/drm/drm_vm.c list_del(&pt->head); pt 434 drivers/gpu/drm/drm_vm.c kfree(pt); pt 179 drivers/gpu/drm/gma500/mid_bios.c dev_priv->gct_data.pt = gct.PD.PanelType; pt 207 drivers/gpu/drm/gma500/mid_bios.c dev_priv->gct_data.pt = gct.PD.PanelType; pt 231 drivers/gpu/drm/gma500/mmu.c static void psb_mmu_free_pt(struct psb_mmu_pt *pt) pt 233 drivers/gpu/drm/gma500/mmu.c __free_page(pt->p); pt 234 drivers/gpu/drm/gma500/mmu.c kfree(pt); pt 242 drivers/gpu/drm/gma500/mmu.c struct psb_mmu_pt *pt; pt 255 drivers/gpu/drm/gma500/mmu.c pt = pd->tables[i]; pt 256 drivers/gpu/drm/gma500/mmu.c if (pt) pt 257 drivers/gpu/drm/gma500/mmu.c psb_mmu_free_pt(pt); pt 270 drivers/gpu/drm/gma500/mmu.c struct psb_mmu_pt *pt = kmalloc(sizeof(*pt), GFP_KERNEL); pt 279 drivers/gpu/drm/gma500/mmu.c if (!pt) pt 282 drivers/gpu/drm/gma500/mmu.c pt->p = alloc_page(GFP_DMA32); pt 283 drivers/gpu/drm/gma500/mmu.c if (!pt->p) { pt 284 drivers/gpu/drm/gma500/mmu.c kfree(pt); pt 290 drivers/gpu/drm/gma500/mmu.c v = kmap_atomic(pt->p); pt 309 drivers/gpu/drm/gma500/mmu.c pt->count = 0; pt 310 drivers/gpu/drm/gma500/mmu.c pt->pd = pd; pt 311 drivers/gpu/drm/gma500/mmu.c pt->index = 0; pt 313 drivers/gpu/drm/gma500/mmu.c return pt; pt 320 drivers/gpu/drm/gma500/mmu.c struct psb_mmu_pt *pt; pt 325 drivers/gpu/drm/gma500/mmu.c pt = pd->tables[index]; pt 326 drivers/gpu/drm/gma500/mmu.c while (!pt) { pt 328 drivers/gpu/drm/gma500/mmu.c pt = psb_mmu_alloc_pt(pd); pt 329 drivers/gpu/drm/gma500/mmu.c if (!pt) pt 335 drivers/gpu/drm/gma500/mmu.c psb_mmu_free_pt(pt); pt 337 drivers/gpu/drm/gma500/mmu.c pt = pd->tables[index]; pt 342 drivers/gpu/drm/gma500/mmu.c pd->tables[index] = pt; pt 343 drivers/gpu/drm/gma500/mmu.c v[index] = (page_to_pfn(pt->p) << 12) | pd->pd_mask; pt 344 drivers/gpu/drm/gma500/mmu.c pt->index = index; pt 352 drivers/gpu/drm/gma500/mmu.c pt->v = kmap_atomic(pt->p); pt 353 drivers/gpu/drm/gma500/mmu.c return pt; pt 360 drivers/gpu/drm/gma500/mmu.c struct psb_mmu_pt *pt; pt 364 drivers/gpu/drm/gma500/mmu.c pt = pd->tables[index]; pt 365 drivers/gpu/drm/gma500/mmu.c if (!pt) { pt 369 drivers/gpu/drm/gma500/mmu.c pt->v = kmap_atomic(pt->p); pt 370 drivers/gpu/drm/gma500/mmu.c return pt; pt 373 drivers/gpu/drm/gma500/mmu.c static void psb_mmu_pt_unmap_unlock(struct psb_mmu_pt *pt) pt 375 drivers/gpu/drm/gma500/mmu.c struct psb_mmu_pd *pd = pt->pd; pt 378 drivers/gpu/drm/gma500/mmu.c kunmap_atomic(pt->v); pt 379 drivers/gpu/drm/gma500/mmu.c if (pt->count == 0) { pt 381 drivers/gpu/drm/gma500/mmu.c v[pt->index] = pd->invalid_pde; pt 382 drivers/gpu/drm/gma500/mmu.c pd->tables[pt->index] = NULL; pt 385 drivers/gpu/drm/gma500/mmu.c psb_mmu_clflush(pd->driver, (void *)&v[pt->index]); pt 390 drivers/gpu/drm/gma500/mmu.c psb_mmu_free_pt(pt); pt 396 drivers/gpu/drm/gma500/mmu.c static inline void psb_mmu_set_pte(struct psb_mmu_pt *pt, unsigned long addr, pt 399 drivers/gpu/drm/gma500/mmu.c pt->v[psb_mmu_pt_index(addr)] = pte; pt 402 drivers/gpu/drm/gma500/mmu.c static inline void psb_mmu_invalidate_pte(struct psb_mmu_pt *pt, pt 405 drivers/gpu/drm/gma500/mmu.c pt->v[psb_mmu_pt_index(addr)] = pt->pd->invalid_pte; pt 503 drivers/gpu/drm/gma500/mmu.c struct psb_mmu_pt *pt; pt 532 drivers/gpu/drm/gma500/mmu.c pt = psb_mmu_pt_map_lock(pd, addr); pt 533 drivers/gpu/drm/gma500/mmu.c if (!pt) pt 536 drivers/gpu/drm/gma500/mmu.c psb_clflush(&pt->v[psb_mmu_pt_index(addr)]); pt 540 drivers/gpu/drm/gma500/mmu.c psb_mmu_pt_unmap_unlock(pt); pt 558 drivers/gpu/drm/gma500/mmu.c struct psb_mmu_pt *pt; pt 571 drivers/gpu/drm/gma500/mmu.c pt = psb_mmu_pt_alloc_map_lock(pd, addr); pt 572 drivers/gpu/drm/gma500/mmu.c if (!pt) pt 575 drivers/gpu/drm/gma500/mmu.c psb_mmu_invalidate_pte(pt, addr); pt 576 drivers/gpu/drm/gma500/mmu.c --pt->count; pt 578 drivers/gpu/drm/gma500/mmu.c psb_mmu_pt_unmap_unlock(pt); pt 598 drivers/gpu/drm/gma500/mmu.c struct psb_mmu_pt *pt; pt 627 drivers/gpu/drm/gma500/mmu.c pt = psb_mmu_pt_map_lock(pd, addr); pt 628 drivers/gpu/drm/gma500/mmu.c if (!pt) pt 631 drivers/gpu/drm/gma500/mmu.c psb_mmu_invalidate_pte(pt, addr); pt 632 drivers/gpu/drm/gma500/mmu.c --pt->count; pt 635 drivers/gpu/drm/gma500/mmu.c psb_mmu_pt_unmap_unlock(pt); pt 654 drivers/gpu/drm/gma500/mmu.c struct psb_mmu_pt *pt; pt 669 drivers/gpu/drm/gma500/mmu.c pt = psb_mmu_pt_alloc_map_lock(pd, addr); pt 670 drivers/gpu/drm/gma500/mmu.c if (!pt) { pt 676 drivers/gpu/drm/gma500/mmu.c psb_mmu_set_pte(pt, addr, pte); pt 677 drivers/gpu/drm/gma500/mmu.c pt->count++; pt 679 drivers/gpu/drm/gma500/mmu.c psb_mmu_pt_unmap_unlock(pt); pt 701 drivers/gpu/drm/gma500/mmu.c struct psb_mmu_pt *pt; pt 733 drivers/gpu/drm/gma500/mmu.c pt = psb_mmu_pt_alloc_map_lock(pd, addr); pt 734 drivers/gpu/drm/gma500/mmu.c if (!pt) pt 739 drivers/gpu/drm/gma500/mmu.c psb_mmu_set_pte(pt, addr, pte); pt 740 drivers/gpu/drm/gma500/mmu.c pt->count++; pt 742 drivers/gpu/drm/gma500/mmu.c psb_mmu_pt_unmap_unlock(pt); pt 767 drivers/gpu/drm/gma500/mmu.c struct psb_mmu_pt *pt; pt 772 drivers/gpu/drm/gma500/mmu.c pt = psb_mmu_pt_map_lock(pd, virtual); pt 773 drivers/gpu/drm/gma500/mmu.c if (!pt) { pt 791 drivers/gpu/drm/gma500/mmu.c tmp = pt->v[psb_mmu_pt_index(virtual)]; pt 798 drivers/gpu/drm/gma500/mmu.c psb_mmu_pt_unmap_unlock(pt); pt 192 drivers/gpu/drm/gma500/oaktrail.h u8 pt; /* panel type, 4 bit field, 0=lvds, 1=mipi */ pt 300 drivers/gpu/drm/i915/gvt/gtt.c static inline int gtt_get_entry64(void *pt, pt 317 drivers/gpu/drm/i915/gvt/gtt.c } else if (!pt) { pt 320 drivers/gpu/drm/i915/gvt/gtt.c e->val64 = *((u64 *)pt + index); pt 325 drivers/gpu/drm/i915/gvt/gtt.c static inline int gtt_set_entry64(void *pt, pt 342 drivers/gpu/drm/i915/gvt/gtt.c } else if (!pt) { pt 345 drivers/gpu/drm/i915/gvt/gtt.c *((u64 *)pt + index) = e->val64; pt 49 drivers/gpu/drm/i915/gvt/gtt.h int (*get_entry)(void *pt, pt 55 drivers/gpu/drm/i915/gvt/gtt.h int (*set_entry)(void *pt, pt 64 drivers/gpu/drm/i915/gvt/mmio.c void *pt = NULL; pt 82 drivers/gpu/drm/i915/gvt/mmio.c pt = vgpu->gtt.ggtt_mm->ggtt_mm.virtual_ggtt + offset; pt 84 drivers/gpu/drm/i915/gvt/mmio.c memcpy(p_data, pt, bytes); pt 86 drivers/gpu/drm/i915/gvt/mmio.c memcpy(pt, p_data, bytes); pt 116 drivers/gpu/drm/i915/i915_gem_gtt.c #define as_pd(x) container_of((x), typeof(struct i915_page_directory), pt) pt 707 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_page_table *pt; pt 709 drivers/gpu/drm/i915/i915_gem_gtt.c pt = kmalloc(sizeof(*pt), I915_GFP_ALLOW_FAIL); pt 710 drivers/gpu/drm/i915/i915_gem_gtt.c if (unlikely(!pt)) pt 713 drivers/gpu/drm/i915/i915_gem_gtt.c if (unlikely(setup_page_dma(vm, &pt->base))) { pt 714 drivers/gpu/drm/i915/i915_gem_gtt.c kfree(pt); pt 718 drivers/gpu/drm/i915/i915_gem_gtt.c atomic_set(&pt->used, 0); pt 719 drivers/gpu/drm/i915/i915_gem_gtt.c return pt; pt 801 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_page_table * const pt, pt 806 drivers/gpu/drm/i915/i915_gem_gtt.c if (atomic_add_unless(&pt->used, -1, 1)) pt 810 drivers/gpu/drm/i915/i915_gem_gtt.c if (atomic_dec_and_test(&pt->used)) { pt 979 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_page_table *pt = pd->entry[idx]; pt 981 drivers/gpu/drm/i915/i915_gem_gtt.c if (atomic_fetch_inc(&pt->used) >> gen8_pd_shift(1) && pt 986 drivers/gpu/drm/i915/i915_gem_gtt.c __gen8_ppgtt_cleanup(vm, as_pd(pt), I915_PDES, lvl); pt 992 drivers/gpu/drm/i915/i915_gem_gtt.c start = __gen8_ppgtt_clear(vm, as_pd(pt), pt 1002 drivers/gpu/drm/i915/i915_gem_gtt.c atomic_read(&pt->used)); pt 1003 drivers/gpu/drm/i915/i915_gem_gtt.c GEM_BUG_ON(!count || count >= atomic_read(&pt->used)); pt 1005 drivers/gpu/drm/i915/i915_gem_gtt.c vaddr = kmap_atomic_px(pt); pt 1011 drivers/gpu/drm/i915/i915_gem_gtt.c atomic_sub(count, &pt->used); pt 1015 drivers/gpu/drm/i915/i915_gem_gtt.c if (release_pd_entry(pd, idx, pt, scratch)) pt 1016 drivers/gpu/drm/i915/i915_gem_gtt.c free_px(vm, pt); pt 1057 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_page_table *pt = pd->entry[idx]; pt 1059 drivers/gpu/drm/i915/i915_gem_gtt.c if (!pt) { pt 1065 drivers/gpu/drm/i915/i915_gem_gtt.c pt = fetch_and_zero(&alloc); pt 1067 drivers/gpu/drm/i915/i915_gem_gtt.c if (!pt) { pt 1068 drivers/gpu/drm/i915/i915_gem_gtt.c pt = &alloc_pd(vm)->pt; pt 1069 drivers/gpu/drm/i915/i915_gem_gtt.c if (IS_ERR(pt)) { pt 1070 drivers/gpu/drm/i915/i915_gem_gtt.c ret = PTR_ERR(pt); pt 1075 drivers/gpu/drm/i915/i915_gem_gtt.c fill_px(pt, vm->scratch[lvl].encode); pt 1077 drivers/gpu/drm/i915/i915_gem_gtt.c if (!pt) { pt 1078 drivers/gpu/drm/i915/i915_gem_gtt.c pt = alloc_pt(vm); pt 1079 drivers/gpu/drm/i915/i915_gem_gtt.c if (IS_ERR(pt)) { pt 1080 drivers/gpu/drm/i915/i915_gem_gtt.c ret = PTR_ERR(pt); pt 1087 drivers/gpu/drm/i915/i915_gem_gtt.c fill_px(pt, vm->scratch[lvl].encode); pt 1092 drivers/gpu/drm/i915/i915_gem_gtt.c set_pd_entry(pd, idx, pt); pt 1094 drivers/gpu/drm/i915/i915_gem_gtt.c alloc = pt, pt = pd->entry[idx]; pt 1098 drivers/gpu/drm/i915/i915_gem_gtt.c atomic_inc(&pt->used); pt 1101 drivers/gpu/drm/i915/i915_gem_gtt.c ret = __gen8_ppgtt_alloc(vm, as_pd(pt), pt 1104 drivers/gpu/drm/i915/i915_gem_gtt.c if (release_pd_entry(pd, idx, pt, scratch)) pt 1105 drivers/gpu/drm/i915/i915_gem_gtt.c free_px(vm, pt); pt 1110 drivers/gpu/drm/i915/i915_gem_gtt.c atomic_dec(&pt->used); pt 1111 drivers/gpu/drm/i915/i915_gem_gtt.c GEM_BUG_ON(!atomic_read(&pt->used)); pt 1118 drivers/gpu/drm/i915/i915_gem_gtt.c atomic_read(&pt->used)); pt 1120 drivers/gpu/drm/i915/i915_gem_gtt.c atomic_add(count, &pt->used); pt 1122 drivers/gpu/drm/i915/i915_gem_gtt.c GEM_BUG_ON(atomic_read(&pt->used) > 2 * I915_PDES); pt 1246 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_page_table *pt = pt 1259 drivers/gpu/drm/i915/i915_gem_gtt.c vaddr = kmap_atomic_px(pt); pt 1544 drivers/gpu/drm/i915/i915_gem_gtt.c const struct i915_page_table *pt) pt 1547 drivers/gpu/drm/i915/i915_gem_gtt.c iowrite32(GEN6_PDE_ADDR_ENCODE(px_dma(pt)) | GEN6_PDE_VALID, pt 1615 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_page_table * const pt = pt 1620 drivers/gpu/drm/i915/i915_gem_gtt.c GEM_BUG_ON(px_base(pt) == px_base(&vm->scratch[1])); pt 1624 drivers/gpu/drm/i915/i915_gem_gtt.c GEM_BUG_ON(count > atomic_read(&pt->used)); pt 1625 drivers/gpu/drm/i915/i915_gem_gtt.c if (!atomic_sub_return(count, &pt->used)) pt 1635 drivers/gpu/drm/i915/i915_gem_gtt.c vaddr = kmap_atomic_px(pt); pt 1690 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_page_table *pt, *alloc = NULL; pt 1700 drivers/gpu/drm/i915/i915_gem_gtt.c gen6_for_each_pde(pt, pd, start, length, pde) { pt 1703 drivers/gpu/drm/i915/i915_gem_gtt.c if (px_base(pt) == px_base(&vm->scratch[1])) { pt 1706 drivers/gpu/drm/i915/i915_gem_gtt.c pt = fetch_and_zero(&alloc); pt 1707 drivers/gpu/drm/i915/i915_gem_gtt.c if (!pt) pt 1708 drivers/gpu/drm/i915/i915_gem_gtt.c pt = alloc_pt(vm); pt 1709 drivers/gpu/drm/i915/i915_gem_gtt.c if (IS_ERR(pt)) { pt 1710 drivers/gpu/drm/i915/i915_gem_gtt.c ret = PTR_ERR(pt); pt 1714 drivers/gpu/drm/i915/i915_gem_gtt.c fill32_px(pt, vm->scratch[0].encode); pt 1718 drivers/gpu/drm/i915/i915_gem_gtt.c pd->entry[pde] = pt; pt 1721 drivers/gpu/drm/i915/i915_gem_gtt.c gen6_write_pde(ppgtt, pde, pt); pt 1725 drivers/gpu/drm/i915/i915_gem_gtt.c alloc = pt; pt 1726 drivers/gpu/drm/i915/i915_gem_gtt.c pt = pd->entry[pde]; pt 1730 drivers/gpu/drm/i915/i915_gem_gtt.c atomic_add(count, &pt->used); pt 1780 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_page_table *pt; pt 1783 drivers/gpu/drm/i915/i915_gem_gtt.c gen6_for_all_pdes(pt, pd, pde) pt 1784 drivers/gpu/drm/i915/i915_gem_gtt.c if (px_base(pt) != scratch) pt 1785 drivers/gpu/drm/i915/i915_gem_gtt.c free_px(&ppgtt->base.vm, pt); pt 1823 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_page_table *pt; pt 1829 drivers/gpu/drm/i915/i915_gem_gtt.c gen6_for_all_pdes(pt, ppgtt->base.pd, pde) pt 1830 drivers/gpu/drm/i915/i915_gem_gtt.c gen6_write_pde(ppgtt, pde, pt); pt 1844 drivers/gpu/drm/i915/i915_gem_gtt.c struct i915_page_table *pt; pt 1851 drivers/gpu/drm/i915/i915_gem_gtt.c gen6_for_all_pdes(pt, ppgtt->base.pd, pde) { pt 1852 drivers/gpu/drm/i915/i915_gem_gtt.c if (px_base(pt) == scratch || atomic_read(&pt->used)) pt 1855 drivers/gpu/drm/i915/i915_gem_gtt.c free_px(&ppgtt->base.vm, pt); pt 242 drivers/gpu/drm/i915/i915_gem_gtt.h struct i915_page_table pt; pt 258 drivers/gpu/drm/i915/i915_gem_gtt.h __px_choose_expr(px, struct i915_page_directory *, &__x->pt.base, \ pt 264 drivers/gpu/drm/i915/i915_gem_gtt.h __px_choose_expr(px, struct i915_page_directory *, &__x->pt, \ pt 455 drivers/gpu/drm/i915/i915_gem_gtt.h #define gen6_for_each_pde(pt, pd, start, length, iter) \ pt 458 drivers/gpu/drm/i915/i915_gem_gtt.h (pt = i915_pt_entry(pd, iter), true); \ pt 463 drivers/gpu/drm/i915/i915_gem_gtt.h #define gen6_for_all_pdes(pt, pd, iter) \ pt 466 drivers/gpu/drm/i915/i915_gem_gtt.h (pt = i915_pt_entry(pd, iter), true); \ pt 533 drivers/gpu/drm/i915/i915_gem_gtt.h struct i915_page_dma *pt = ppgtt->pd->entry[n]; pt 535 drivers/gpu/drm/i915/i915_gem_gtt.h return px_dma(pt ?: px_base(&ppgtt->vm.scratch[ppgtt->vm.top])); pt 41 drivers/gpu/drm/i915/selftests/scatterlist.c static noinline int expect_pfn_sg(struct pfn_table *pt, pt 50 drivers/gpu/drm/i915/selftests/scatterlist.c pfn = pt->start; pt 51 drivers/gpu/drm/i915/selftests/scatterlist.c for_each_sg(pt->st.sgl, sg, pt->st.nents, n) { pt 53 drivers/gpu/drm/i915/selftests/scatterlist.c unsigned int npages = npages_fn(n, pt->st.nents, rnd); pt 72 drivers/gpu/drm/i915/selftests/scatterlist.c if (pfn != pt->end) { pt 74 drivers/gpu/drm/i915/selftests/scatterlist.c __func__, who, pt->end, pfn); pt 81 drivers/gpu/drm/i915/selftests/scatterlist.c static noinline int expect_pfn_sg_page_iter(struct pfn_table *pt, pt 88 drivers/gpu/drm/i915/selftests/scatterlist.c pfn = pt->start; pt 89 drivers/gpu/drm/i915/selftests/scatterlist.c for_each_sg_page(pt->st.sgl, &sgiter, pt->st.nents, 0) { pt 103 drivers/gpu/drm/i915/selftests/scatterlist.c if (pfn != pt->end) { pt 105 drivers/gpu/drm/i915/selftests/scatterlist.c __func__, who, pt->end, pfn); pt 112 drivers/gpu/drm/i915/selftests/scatterlist.c static noinline int expect_pfn_sgtiter(struct pfn_table *pt, pt 120 drivers/gpu/drm/i915/selftests/scatterlist.c pfn = pt->start; pt 121 drivers/gpu/drm/i915/selftests/scatterlist.c for_each_sgt_page(page, sgt, &pt->st) { pt 133 drivers/gpu/drm/i915/selftests/scatterlist.c if (pfn != pt->end) { pt 135 drivers/gpu/drm/i915/selftests/scatterlist.c __func__, who, pt->end, pfn); pt 142 drivers/gpu/drm/i915/selftests/scatterlist.c static int expect_pfn_sgtable(struct pfn_table *pt, pt 150 drivers/gpu/drm/i915/selftests/scatterlist.c err = expect_pfn_sg(pt, npages_fn, rnd, who, timeout); pt 154 drivers/gpu/drm/i915/selftests/scatterlist.c err = expect_pfn_sg_page_iter(pt, who, timeout); pt 158 drivers/gpu/drm/i915/selftests/scatterlist.c err = expect_pfn_sgtiter(pt, who, timeout); pt 214 drivers/gpu/drm/i915/selftests/scatterlist.c static int alloc_table(struct pfn_table *pt, pt 223 drivers/gpu/drm/i915/selftests/scatterlist.c if (sg_alloc_table(&pt->st, max, pt 234 drivers/gpu/drm/i915/selftests/scatterlist.c pt->start = PFN_BIAS; pt 235 drivers/gpu/drm/i915/selftests/scatterlist.c pfn = pt->start; pt 236 drivers/gpu/drm/i915/selftests/scatterlist.c sg = pt->st.sgl; pt 244 drivers/gpu/drm/i915/selftests/scatterlist.c sg_free_table(&pt->st); pt 259 drivers/gpu/drm/i915/selftests/scatterlist.c pt->st.nents = n; pt 260 drivers/gpu/drm/i915/selftests/scatterlist.c pt->end = pfn; pt 289 drivers/gpu/drm/i915/selftests/scatterlist.c struct pfn_table pt; pt 295 drivers/gpu/drm/i915/selftests/scatterlist.c err = alloc_table(&pt, sz, sz, *npages, &prng, pt 304 drivers/gpu/drm/i915/selftests/scatterlist.c err = expect_pfn_sgtable(&pt, *npages, &prng, pt 307 drivers/gpu/drm/i915/selftests/scatterlist.c sg_free_table(&pt.st); pt 325 drivers/gpu/drm/i915/selftests/scatterlist.c struct pfn_table pt; pt 337 drivers/gpu/drm/i915/selftests/scatterlist.c err = alloc_table(&pt, prime, max, *npages, &prng, pt 344 drivers/gpu/drm/i915/selftests/scatterlist.c if (i915_sg_trim(&pt.st)) { pt 345 drivers/gpu/drm/i915/selftests/scatterlist.c if (pt.st.orig_nents != prime || pt 346 drivers/gpu/drm/i915/selftests/scatterlist.c pt.st.nents != prime) { pt 348 drivers/gpu/drm/i915/selftests/scatterlist.c pt.st.nents, pt.st.orig_nents, prime); pt 353 drivers/gpu/drm/i915/selftests/scatterlist.c err = expect_pfn_sgtable(&pt, pt 359 drivers/gpu/drm/i915/selftests/scatterlist.c sg_free_table(&pt.st); pt 258 drivers/gpu/drm/lima/lima_vm.c u32 *pd, *pt; pt 268 drivers/gpu/drm/lima/lima_vm.c pt = vm->bts[i].cpu; pt 275 drivers/gpu/drm/lima/lima_vm.c u32 pte = *pt++; pt 53 drivers/gpu/drm/nouveau/nvkm/engine/dma/usernv04.c device->mmu->vmm->pd->pt[0]->memory; pt 34 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c struct nvkm_mmu_pt *pt; pt 42 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c nvkm_mmu_ptp_put(struct nvkm_mmu *mmu, bool force, struct nvkm_mmu_pt *pt) pt 44 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c const int slot = pt->base >> pt->ptp->shift; pt 45 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c struct nvkm_mmu_ptp *ptp = pt->ptp; pt 56 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c nvkm_mmu_ptc_put(mmu, force, &ptp->pt); pt 61 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c kfree(pt); pt 67 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c struct nvkm_mmu_pt *pt; pt 71 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c if (!(pt = kzalloc(sizeof(*pt), GFP_KERNEL))) pt 78 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c kfree(pt); pt 82 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c ptp->pt = nvkm_mmu_ptc_get(mmu, 0x1000, 0x1000, false); pt 83 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c if (!ptp->pt) { pt 85 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c kfree(pt); pt 90 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c slot = nvkm_memory_size(ptp->pt->memory) >> ptp->shift; pt 95 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c pt->ptp = ptp; pt 96 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c pt->sub = true; pt 106 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c pt->memory = pt->ptp->pt->memory; pt 107 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c pt->base = slot << ptp->shift; pt 108 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c pt->addr = pt->ptp->pt->addr + pt->base; pt 109 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c return pt; pt 143 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c struct nvkm_mmu_pt *pt = *ppt; pt 144 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c if (pt) { pt 146 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c if (pt->sub) { pt 148 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c nvkm_mmu_ptp_put(mmu, force, pt); pt 155 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c if (pt->ptc->refs < 8 /* Heuristic. */ && !force) { pt 156 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c list_add_tail(&pt->head, &pt->ptc->item); pt 157 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c pt->ptc->refs++; pt 159 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c nvkm_memory_unref(&pt->memory); pt 160 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c kfree(pt); pt 170 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c struct nvkm_mmu_pt *pt; pt 176 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c pt = nvkm_mmu_ptp_get(mmu, align, zero); pt 178 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c return pt; pt 190 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c pt = list_first_entry_or_null(&ptc->item, typeof(*pt), head); pt 191 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c if (pt) { pt 193 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c nvkm_fo64(pt->memory, 0, 0, size >> 3); pt 194 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c list_del(&pt->head); pt 197 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c return pt; pt 202 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c if (!(pt = kmalloc(sizeof(*pt), GFP_KERNEL))) pt 204 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c pt->ptc = ptc; pt 205 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c pt->sub = false; pt 208 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c size, align, zero, &pt->memory); pt 210 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c kfree(pt); pt 214 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c pt->base = 0; pt 215 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c pt->addr = nvkm_memory_addr(pt->memory); pt 216 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c return pt; pt 224 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c struct nvkm_mmu_pt *pt, *tt; pt 225 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c list_for_each_entry_safe(pt, tt, &ptc->item, head) { pt 226 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c nvkm_memory_unref(&pt->memory); pt 227 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c list_del(&pt->head); pt 228 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c kfree(pt); pt 35 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/nv41.c nvkm_wr32(device, 0x100800, 0x00000002 | mmu->vmm->pd->pt[0]->addr); pt 35 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/nv44.c struct nvkm_memory *pt = mmu->vmm->pd->pt[0]->memory; pt 43 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/nv44.c addr -= ((nvkm_memory_addr(pt) >> 19) + 1) << 19; pt 47 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/nv44.c nvkm_wr32(device, 0x100804, (nvkm_memory_size(pt) / 4) * 4096); pt 79 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c struct nvkm_vmm_pt *pt[NVKM_VMM_LEVELS_MAX]; pt 142 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c struct nvkm_vmm_pt *pgd = it->pt[it->lvl + 1]; pt 143 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c struct nvkm_vmm_pt *pgt = it->pt[it->lvl]; pt 144 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c struct nvkm_mmu_pt *pt = pgt->pt[type]; pt 154 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c pgt->pt[type] = NULL; pt 157 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c if (pgd->pt[0]) { pt 159 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c func->sparse(vmm, pgd->pt[0], pdei, 1); pt 162 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c func->unmap(vmm, pgd->pt[0], pdei, 1); pt 190 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_mmu_ptc_put(vmm->mmu, vmm->bootstrapped, &pt); pt 244 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c pair->func->sparse(vmm, pgt->pt[0], pteb, ptes); pt 252 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c pair->func->invalid(vmm, pgt->pt[0], pteb, ptes); pt 262 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c struct nvkm_vmm_pt *pgt = it->pt[0]; pt 267 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c dma = desc->func->pfn_clear(it->vmm, pgt->pt[type], ptei, ptes); pt 272 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c desc->func->pfn_unmap(it->vmm, pgt->pt[type], ptei, ptes); pt 348 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c desc->func->sparse(vmm, pgt->pt[1], spti, sptc); pt 351 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c pair->func->unmap(vmm, pgt->pt[0], pteb, ptes); pt 358 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c pair->func->unmap(vmm, pgt->pt[0], pteb, ptes); pt 368 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c struct nvkm_vmm_pt *pgt = it->pt[0]; pt 396 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c struct nvkm_vmm_pt *pt = it->pt[0]; pt 398 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c memset(&pt->pde[ptei], 0x00, sizeof(pt->pde[0]) * ptes); pt 401 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c memset(&pt->pte[ptei], 0x00, sizeof(pt->pte[0]) * ptes); pt 408 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_vmm_sparse_ptes(it->desc, it->pt[0], ptei, ptes); pt 421 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c struct nvkm_mmu_pt *pt; pt 428 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c pgt->pt[type] = nvkm_mmu_ptc_get(mmu, size, desc->align, zero); pt 429 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c if (!pgt->pt[type]) { pt 438 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c pt = pgt->pt[type]; pt 457 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c desc->func->sparse(vmm, pt, pteb, ptes); pt 459 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c desc->func->invalid(vmm, pt, pteb, ptes); pt 462 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c desc->func->unmap(vmm, pt, pteb, ptes); pt 470 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c desc->func->sparse(vmm, pt, 0, pten); pt 472 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c desc->func->invalid(vmm, pt, 0, pten); pt 523 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c it.pt[it.max] = vmm->pd; pt 532 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c struct nvkm_vmm_pt *pgt = it.pt[it.lvl]; pt 548 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c it.pt[it.lvl - 1] = pgt = pgd->pde[pdei]; pt 564 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c struct nvkm_mmu_pt *pt = pgt->pt[type]; pt 567 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c MAP_PTES(vmm, pt, ptei, ptes, map); pt 569 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c CLR_PTES(vmm, pt, ptei, ptes); pt 1013 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_mmu_ptc_put(vmm->mmu, true, &vmm->pd->pt[0]); pt 1083 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c vmm->pd->pt[0] = nvkm_mmu_ptc_get(mmu, size, desc->align, true); pt 1084 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c if (!vmm->pd->pt[0]) pt 1810 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.c nvkm_memory_boot(it->pt[0]->pt[type]->memory, it->vmm); pt 14 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmm.h struct nvkm_mmu_pt *pt[2]; pt 32 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c gf100_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, pt 44 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c VMM_WO064(pt, vmm, ptei++ * 8, data); pt 51 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c VMM_WO064(pt, vmm, ptei++ * 8, data); pt 58 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c gf100_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, pt 61 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte); pt 65 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c gf100_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, pt 70 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c nvkm_kmap(pt->memory); pt 73 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c VMM_WO064(pt, vmm, ptei++ * 8, data); pt 76 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c nvkm_done(pt->memory); pt 80 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte); pt 84 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c gf100_vmm_pgt_mem(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, pt 87 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c VMM_MAP_ITER_MEM(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte); pt 92 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) pt 94 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c VMM_FO064(pt, vmm, ptei * 8, 0ULL, ptes); pt 109 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c struct nvkm_mmu_pt *pd = pgd->pt[0]; pt 110 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c struct nvkm_mmu_pt *pt; pt 113 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c if ((pt = pgt->pt[0])) { pt 114 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c switch (nvkm_memory_target(pt->memory)) { pt 124 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c data |= pt->addr >> 8; pt 127 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c if ((pt = pgt->pt[1])) { pt 128 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c switch (nvkm_memory_target(pt->memory)) { pt 138 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c data |= pt->addr << 24; pt 192 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c struct nvkm_mmu_pt *pd = vmm->pd->pt[0]; pt 213 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c addr |= (vmm->pd->pt[0]->addr >> 12) << 4; pt 343 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c struct nvkm_mmu_pt *pd = vmm->pd->pt[0]; pt 26 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgk104.c struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) pt 29 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgk104.c VMM_FO064(pt, vmm, ptei * 8, BIT_ULL(1) /* PRIV. */, ptes); pt 29 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgm200.c struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) pt 32 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgm200.c VMM_FO064(pt, vmm, ptei * 8, BIT_ULL(32) /* VOL. */, ptes); pt 54 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgm200.c struct nvkm_mmu_pt *pt, u32 pdei, u32 pdes) pt 57 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgm200.c VMM_FO064(pt, vmm, pdei * 8, BIT_ULL(35) /* VOL_BIG. */, pdes); pt 35 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) pt 40 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c nvkm_kmap(pt->memory); pt 42 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c u32 datalo = nvkm_ro32(pt->memory, pt->base + ptei * 8 + 0); pt 43 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c u32 datahi = nvkm_ro32(pt->memory, pt->base + ptei * 8 + 4); pt 51 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c nvkm_done(pt->memory); pt 56 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) pt 59 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c nvkm_kmap(pt->memory); pt 61 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c u32 datalo = nvkm_ro32(pt->memory, pt->base + ptei * 8 + 0); pt 62 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c u32 datahi = nvkm_ro32(pt->memory, pt->base + ptei * 8 + 4); pt 65 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_WO064(pt, vmm, ptei * 8, data & ~BIT_ULL(0)); pt 70 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c nvkm_done(pt->memory); pt 75 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gp100_vmm_pgt_pfn(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, pt 81 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c nvkm_kmap(pt->memory); pt 102 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_WO064(pt, vmm, ptei++ * 8, data); pt 105 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c nvkm_done(pt->memory); pt 109 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gp100_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, pt 117 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_WO064(pt, vmm, ptei++ * 8, data); pt 123 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gp100_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, pt 126 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, gp100_vmm_pgt_pte); pt 130 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gp100_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, pt 135 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c nvkm_kmap(pt->memory); pt 138 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_WO064(pt, vmm, ptei++ * 8, data); pt 141 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c nvkm_done(pt->memory); pt 145 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, gp100_vmm_pgt_pte); pt 149 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gp100_vmm_pgt_mem(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, pt 152 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_MAP_ITER_MEM(vmm, pt, ptei, ptes, map, gp100_vmm_pgt_pte); pt 157 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) pt 160 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_FO064(pt, vmm, ptei * 8, BIT_ULL(3) /* VOL. */, ptes); pt 177 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) pt 180 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_FO064(pt, vmm, ptei * 8, BIT_ULL(5) /* PRIV. */, ptes); pt 192 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gp100_vmm_pd0_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, pt 200 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_WO128(pt, vmm, ptei++ * 0x10, data, 0ULL); pt 206 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gp100_vmm_pd0_mem(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, pt 209 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_MAP_ITER_MEM(vmm, pt, ptei, ptes, map, gp100_vmm_pd0_pte); pt 213 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c gp100_vmm_pde(struct nvkm_mmu_pt *pt, u64 *data) pt 215 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c switch (nvkm_memory_target(pt->memory)) { pt 225 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c *data |= pt->addr >> 4; pt 233 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c struct nvkm_mmu_pt *pd = pgd->pt[0]; pt 236 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c if (pgt->pt[0] && !gp100_vmm_pde(pgt->pt[0], &data[0])) pt 238 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c if (pgt->pt[1] && !gp100_vmm_pde(pgt->pt[1], &data[1])) pt 248 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c struct nvkm_mmu_pt *pt, u32 pdei, u32 pdes) pt 251 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_FO128(pt, vmm, pdei * 0x10, BIT_ULL(3) /* VOL_BIG. */, 0ULL, pdes); pt 256 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c struct nvkm_mmu_pt *pt, u32 pdei, u32 pdes) pt 258 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c VMM_FO128(pt, vmm, pdei * 0x10, 0ULL, 0ULL, pdes); pt 273 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c struct nvkm_mmu_pt *pd = pgd->pt[0]; pt 276 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c if (!gp100_vmm_pde(pgt->pt[0], &data)) pt 28 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c nv04_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, pt 33 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c VMM_WO032(pt, vmm, 8 + ptei++ * 4, data); pt 39 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c nv04_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, pt 42 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); pt 46 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c nv04_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, pt 50 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c nvkm_kmap(pt->memory); pt 52 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c VMM_WO032(pt, vmm, 8 + (ptei++ * 4), *map->dma++ | 0x00000003); pt 53 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c nvkm_done(pt->memory); pt 55 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); pt 61 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) pt 63 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c VMM_FO032(pt, vmm, 8 + (ptei * 4), 0, ptes); pt 135 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv04.c mem = vmm->pd->pt[0]->memory; pt 27 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c nv41_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, pt 32 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c VMM_WO032(pt, vmm, ptei++ * 4, data); pt 38 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c nv41_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, pt 41 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); pt 45 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c nv41_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, pt 49 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c nvkm_kmap(pt->memory); pt 52 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c VMM_WO032(pt, vmm, ptei++ * 4, data); pt 54 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c nvkm_done(pt->memory); pt 56 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); pt 62 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) pt 64 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv41.c VMM_FO032(pt, vmm, ptei * 4, 0, ptes); pt 27 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c nv44_vmm_pgt_fill(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, pt 33 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c tmp[0] = nvkm_ro32(pt->memory, pteo + 0x0); pt 34 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c tmp[1] = nvkm_ro32(pt->memory, pteo + 0x4); pt 35 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c tmp[2] = nvkm_ro32(pt->memory, pteo + 0x8); pt 36 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c tmp[3] = nvkm_ro32(pt->memory, pteo + 0xc); pt 66 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_WO032(pt, vmm, pteo + 0x0, tmp[0]); pt 67 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_WO032(pt, vmm, pteo + 0x4, tmp[1]); pt 68 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_WO032(pt, vmm, pteo + 0x8, tmp[2]); pt 69 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_WO032(pt, vmm, pteo + 0xc, tmp[3] | 0x40000000); pt 73 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c nv44_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, pt 82 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c nv44_vmm_pgt_fill(vmm, pt, tmp, ptei, pten); pt 90 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_WO032(pt, vmm, ptei++ * 4, tmp[0] >> 0 | tmp[1] << 27); pt 91 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_WO032(pt, vmm, ptei++ * 4, tmp[1] >> 5 | tmp[2] << 22); pt 92 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_WO032(pt, vmm, ptei++ * 4, tmp[2] >> 10 | tmp[3] << 17); pt 93 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_WO032(pt, vmm, ptei++ * 4, tmp[3] >> 15 | 0x40000000); pt 100 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c nv44_vmm_pgt_fill(vmm, pt, tmp, ptei, ptes); pt 105 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c nv44_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, pt 108 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv44_vmm_pgt_pte); pt 112 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c nv44_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, pt 116 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c nvkm_kmap(pt->memory); pt 119 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c nv44_vmm_pgt_fill(vmm, pt, map->dma, ptei, pten); pt 129 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_WO032(pt, vmm, ptei++ * 4, tmp[0] >> 0 | tmp[1] << 27); pt 130 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_WO032(pt, vmm, ptei++ * 4, tmp[1] >> 5 | tmp[2] << 22); pt 131 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_WO032(pt, vmm, ptei++ * 4, tmp[2] >> 10 | tmp[3] << 17); pt 132 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_WO032(pt, vmm, ptei++ * 4, tmp[3] >> 15 | 0x40000000); pt 137 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c nv44_vmm_pgt_fill(vmm, pt, map->dma, ptei, ptes); pt 140 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c nvkm_done(pt->memory); pt 142 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv44_vmm_pgt_pte); pt 148 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) pt 150 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c nvkm_kmap(pt->memory); pt 153 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c nv44_vmm_pgt_fill(vmm, pt, NULL, ptei, pten); pt 159 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_WO032(pt, vmm, ptei++ * 4, 0x00000000); pt 160 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_WO032(pt, vmm, ptei++ * 4, 0x00000000); pt 161 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_WO032(pt, vmm, ptei++ * 4, 0x00000000); pt 162 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c VMM_WO032(pt, vmm, ptei++ * 4, 0x00000000); pt 167 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c nv44_vmm_pgt_fill(vmm, pt, NULL, ptei, ptes); pt 168 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv44.c nvkm_done(pt->memory); pt 32 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c nv50_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, pt 53 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c VMM_WO064(pt, vmm, ptei++ * 8, data); pt 58 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c nv50_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, pt 61 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte); pt 65 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c nv50_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, pt 70 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c nvkm_kmap(pt->memory); pt 73 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c VMM_WO064(pt, vmm, ptei++ * 8, data); pt 76 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c nvkm_done(pt->memory); pt 80 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte); pt 84 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c nv50_vmm_pgt_mem(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, pt 87 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c VMM_MAP_ITER_MEM(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte); pt 92 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) pt 94 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c VMM_FO064(pt, vmm, ptei * 8, 0ULL, ptes); pt 108 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c struct nvkm_mmu_pt *pt; pt 110 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c if (pgt && (pt = pgt->pt[0])) { pt 114 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c switch (nvkm_memory_size(pt->memory)) { pt 129 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c switch (nvkm_memory_target(pt->memory)) { pt 138 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmnv50.c data |= pt->addr; pt 39 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmtu102.c nvkm_wr32(device, 0xb830a0, vmm->pd->pt[0]->addr >> 8); pt 298 drivers/gpu/drm/omapdrm/tcm.h #define tcm_is_in(pt, area) __tcm_is_in(&(pt), &(area)) pt 324 drivers/gpu/drm/panel/panel-arm-versatile.c const struct versatile_panel_type *pt; pt 326 drivers/gpu/drm/panel/panel-arm-versatile.c pt = &versatile_panels[i]; pt 327 drivers/gpu/drm/panel/panel-arm-versatile.c if (pt->magic == val) { pt 328 drivers/gpu/drm/panel/panel-arm-versatile.c vpanel->panel_type = pt; pt 1259 drivers/gpu/drm/radeon/r600_dpm.c ATOM_PowerTune_Table *pt; pt 1272 drivers/gpu/drm/radeon/r600_dpm.c pt = &ppt->power_tune_table; pt 1278 drivers/gpu/drm/radeon/r600_dpm.c pt = &ppt->power_tune_table; pt 1280 drivers/gpu/drm/radeon/r600_dpm.c rdev->pm.dpm.dyn_state.cac_tdp_table->tdp = le16_to_cpu(pt->usTDP); pt 1282 drivers/gpu/drm/radeon/r600_dpm.c le16_to_cpu(pt->usConfigurableTDP); pt 1283 drivers/gpu/drm/radeon/r600_dpm.c rdev->pm.dpm.dyn_state.cac_tdp_table->tdc = le16_to_cpu(pt->usTDC); pt 1285 drivers/gpu/drm/radeon/r600_dpm.c le16_to_cpu(pt->usBatteryPowerLimit); pt 1287 drivers/gpu/drm/radeon/r600_dpm.c le16_to_cpu(pt->usSmallPowerLimit); pt 1289 drivers/gpu/drm/radeon/r600_dpm.c le16_to_cpu(pt->usLowCACLeakage); pt 1291 drivers/gpu/drm/radeon/r600_dpm.c le16_to_cpu(pt->usHighCACLeakage); pt 542 drivers/gpu/drm/radeon/radeon_vm.c struct radeon_bo *pt; pt 553 drivers/gpu/drm/radeon/radeon_vm.c NULL, NULL, &pt); pt 557 drivers/gpu/drm/radeon/radeon_vm.c r = radeon_vm_clear_bo(rdev, pt); pt 559 drivers/gpu/drm/radeon/radeon_vm.c radeon_bo_unref(&pt); pt 568 drivers/gpu/drm/radeon/radeon_vm.c radeon_bo_unref(&pt); pt 574 drivers/gpu/drm/radeon/radeon_vm.c vm->page_tables[pt_idx].bo = pt; pt 670 drivers/gpu/drm/radeon/radeon_vm.c uint64_t pde, pt; pt 675 drivers/gpu/drm/radeon/radeon_vm.c pt = radeon_bo_gpu_offset(bo); pt 676 drivers/gpu/drm/radeon/radeon_vm.c if (vm->page_tables[pt_idx].addr == pt) pt 678 drivers/gpu/drm/radeon/radeon_vm.c vm->page_tables[pt_idx].addr = pt; pt 682 drivers/gpu/drm/radeon/radeon_vm.c ((last_pt + incr * count) != pt)) { pt 692 drivers/gpu/drm/radeon/radeon_vm.c last_pt = pt; pt 828 drivers/gpu/drm/radeon/radeon_vm.c struct radeon_bo *pt = vm->page_tables[pt_idx].bo; pt 833 drivers/gpu/drm/radeon/radeon_vm.c radeon_sync_resv(rdev, &ib->sync, pt->tbo.base.resv, true); pt 834 drivers/gpu/drm/radeon/radeon_vm.c r = dma_resv_reserve_shared(pt->tbo.base.resv, 1); pt 843 drivers/gpu/drm/radeon/radeon_vm.c pte = radeon_bo_gpu_offset(pt); pt 60 drivers/gpu/drm/v3d/v3d_drv.h volatile u32 *pt; pt 850 drivers/gpu/drm/v3d/v3d_gem.c v3d->pt = dma_alloc_wc(v3d->dev, pt_size, pt 853 drivers/gpu/drm/v3d/v3d_gem.c if (!v3d->pt) { pt 867 drivers/gpu/drm/v3d/v3d_gem.c dma_free_coherent(v3d->dev, 4096 * 1024, (void *)v3d->pt, pt 889 drivers/gpu/drm/v3d/v3d_gem.c dma_free_coherent(v3d->dev, 4096 * 1024, (void *)v3d->pt, v3d->pt_paddr); pt 105 drivers/gpu/drm/v3d/v3d_mmu.c v3d->pt[page++] = pte + i; pt 122 drivers/gpu/drm/v3d/v3d_mmu.c v3d->pt[page] = 0; pt 77 drivers/infiniband/hw/hfi1/file_ops.c static __poll_t hfi1_poll(struct file *fp, struct poll_table_struct *pt); pt 105 drivers/infiniband/hw/hfi1/file_ops.c static __poll_t poll_urgent(struct file *fp, struct poll_table_struct *pt); pt 106 drivers/infiniband/hw/hfi1/file_ops.c static __poll_t poll_next(struct file *fp, struct poll_table_struct *pt); pt 619 drivers/infiniband/hw/hfi1/file_ops.c static __poll_t hfi1_poll(struct file *fp, struct poll_table_struct *pt) pt 628 drivers/infiniband/hw/hfi1/file_ops.c pollflag = poll_urgent(fp, pt); pt 630 drivers/infiniband/hw/hfi1/file_ops.c pollflag = poll_next(fp, pt); pt 1445 drivers/infiniband/hw/hfi1/file_ops.c struct poll_table_struct *pt) pt 1452 drivers/infiniband/hw/hfi1/file_ops.c poll_wait(fp, &uctxt->wait, pt); pt 1468 drivers/infiniband/hw/hfi1/file_ops.c struct poll_table_struct *pt) pt 1475 drivers/infiniband/hw/hfi1/file_ops.c poll_wait(fp, &uctxt->wait, pt); pt 1083 drivers/infiniband/hw/qib/qib_file_ops.c struct poll_table_struct *pt) pt 1088 drivers/infiniband/hw/qib/qib_file_ops.c poll_wait(fp, &rcd->wait, pt); pt 1105 drivers/infiniband/hw/qib/qib_file_ops.c struct poll_table_struct *pt) pt 1110 drivers/infiniband/hw/qib/qib_file_ops.c poll_wait(fp, &rcd->wait, pt); pt 1124 drivers/infiniband/hw/qib/qib_file_ops.c static __poll_t qib_poll(struct file *fp, struct poll_table_struct *pt) pt 1133 drivers/infiniband/hw/qib/qib_file_ops.c pollflag = qib_poll_urgent(rcd, fp, pt); pt 1135 drivers/infiniband/hw/qib/qib_file_ops.c pollflag = qib_poll_next(rcd, fp, pt); pt 1373 drivers/iommu/amd_iommu.c static struct page *free_pt_page(unsigned long pt, struct page *freelist) pt 1375 drivers/iommu/amd_iommu.c struct page *p = virt_to_page((void *)pt); pt 1386 drivers/iommu/amd_iommu.c u64 *pt; \ pt 1389 drivers/iommu/amd_iommu.c pt = (u64 *)__pt; \ pt 1393 drivers/iommu/amd_iommu.c if (!IOMMU_PTE_PRESENT(pt[i])) \ pt 1397 drivers/iommu/amd_iommu.c if (PM_PTE_LEVEL(pt[i]) == 0 || \ pt 1398 drivers/iommu/amd_iommu.c PM_PTE_LEVEL(pt[i]) == 7) \ pt 1401 drivers/iommu/amd_iommu.c p = (unsigned long)IOMMU_PTE_PAGE(pt[i]); \ pt 1405 drivers/iommu/amd_iommu.c return free_pt_page((unsigned long)pt, freelist); \ pt 1634 drivers/iommu/amd_iommu.c unsigned long pt; pt 1645 drivers/iommu/amd_iommu.c pt = (unsigned long)IOMMU_PTE_PAGE(pteval); pt 1648 drivers/iommu/amd_iommu.c return free_sub_pt(pt, mode, freelist); pt 214 drivers/iommu/iommu.c bool pt; pt 217 drivers/iommu/iommu.c ret = kstrtobool(str, &pt); pt 221 drivers/iommu/iommu.c if (pt) pt 543 drivers/iommu/tegra-smmu.c u32 *pt = page_address(pt_page); pt 545 drivers/iommu/tegra-smmu.c return pt + iova_pt_index(iova); pt 2028 drivers/irqchip/irq-gic-v3-its.c static void its_free_pending_table(struct page *pt) pt 2030 drivers/irqchip/irq-gic-v3-its.c free_pages((unsigned long)page_address(pt), get_order(LPI_PENDBASE_SZ)); pt 3851 drivers/isdn/hardware/mISDN/hfcmulti.c int i, pt; pt 3857 drivers/isdn/hardware/mISDN/hfcmulti.c pt = hc->chan[i].port; pt 3860 drivers/isdn/hardware/mISDN/hfcmulti.c hc->chan[hc->dnum[pt]].slot_tx = -1; pt 3861 drivers/isdn/hardware/mISDN/hfcmulti.c hc->chan[hc->dnum[pt]].slot_rx = -1; pt 3862 drivers/isdn/hardware/mISDN/hfcmulti.c hc->chan[hc->dnum[pt]].conf = -1; pt 3863 drivers/isdn/hardware/mISDN/hfcmulti.c if (hc->dnum[pt]) { pt 3869 drivers/isdn/hardware/mISDN/hfcmulti.c if (!((1 << i) & hc->bmask[pt])) /* skip unused chan */ pt 3877 drivers/isdn/hardware/mISDN/hfcmulti.c if (hc->ctype == HFC_TYPE_E1 && pt == 0) { pt 3982 drivers/isdn/hardware/mISDN/hfcmulti.c HFC_outb(hc, R_ST_SEL, pt); pt 3989 drivers/isdn/hardware/mISDN/hfcmulti.c __func__, pt); pt 3993 drivers/isdn/hardware/mISDN/hfcmulti.c hc->hw.a_st_ctrl0[pt] = V_ST_MD; pt 3998 drivers/isdn/hardware/mISDN/hfcmulti.c __func__, pt); pt 4002 drivers/isdn/hardware/mISDN/hfcmulti.c hc->hw.a_st_ctrl0[pt] = 0; pt 4005 drivers/isdn/hardware/mISDN/hfcmulti.c hc->hw.a_st_ctrl0[pt] |= V_TX_LI; pt 4007 drivers/isdn/hardware/mISDN/hfcmulti.c hc->hw.a_st_ctrl0[pt] |= 0x40 /* V_ST_PU_CTRL */; pt 4012 drivers/isdn/hardware/mISDN/hfcmulti.c HFC_outb(hc, A_ST_CTRL0, hc->hw.a_st_ctrl0[pt]); pt 4025 drivers/isdn/hardware/mISDN/hfcmulti.c hc->hw.r_sci_msk |= 1 << pt; pt 4515 drivers/isdn/hardware/mISDN/hfcmulti.c int pt, ci, i = 0; pt 4520 drivers/isdn/hardware/mISDN/hfcmulti.c pt = hc->chan[ci].port; pt 4524 drivers/isdn/hardware/mISDN/hfcmulti.c __func__, pt + 1); pt 4526 drivers/isdn/hardware/mISDN/hfcmulti.c if (pt >= hc->ports) { pt 4528 drivers/isdn/hardware/mISDN/hfcmulti.c __func__, pt + 1); pt 4534 drivers/isdn/hardware/mISDN/hfcmulti.c __func__, pt + 1); pt 4541 drivers/isdn/hardware/mISDN/hfcmulti.c if (hc->created[pt]) { pt 4542 drivers/isdn/hardware/mISDN/hfcmulti.c hc->created[pt] = 0; pt 4561 drivers/isdn/hardware/mISDN/hfcmulti.c if (!((1 << i) & hc->bmask[pt])) /* skip unused chan */ pt 4619 drivers/isdn/hardware/mISDN/hfcmulti.c pt+1, ci); pt 4796 drivers/isdn/hardware/mISDN/hfcmulti.c init_e1_port(struct hfc_multi *hc, struct hm_map *m, int pt) pt 4815 drivers/isdn/hardware/mISDN/hfcmulti.c dch->slot = hc->dnum[pt]; pt 4816 drivers/isdn/hardware/mISDN/hfcmulti.c hc->chan[hc->dnum[pt]].dch = dch; pt 4817 drivers/isdn/hardware/mISDN/hfcmulti.c hc->chan[hc->dnum[pt]].port = pt; pt 4818 drivers/isdn/hardware/mISDN/hfcmulti.c hc->chan[hc->dnum[pt]].nt_timer = -1; pt 4820 drivers/isdn/hardware/mISDN/hfcmulti.c if (!((1 << ch) & hc->bmask[pt])) /* skip unused channel */ pt 4847 drivers/isdn/hardware/mISDN/hfcmulti.c hc->chan[ch].port = pt; pt 4852 drivers/isdn/hardware/mISDN/hfcmulti.c if (pt == 0) pt 4856 drivers/isdn/hardware/mISDN/hfcmulti.c HFC_cnt + 1, pt+1); pt 4862 drivers/isdn/hardware/mISDN/hfcmulti.c hc->created[pt] = 1; pt 4870 drivers/isdn/hardware/mISDN/hfcmulti.c init_multi_port(struct hfc_multi *hc, int pt) pt 4889 drivers/isdn/hardware/mISDN/hfcmulti.c i = pt << 2; pt 4892 drivers/isdn/hardware/mISDN/hfcmulti.c hc->chan[i + 2].port = pt; pt 4920 drivers/isdn/hardware/mISDN/hfcmulti.c hc->chan[i + ch].port = pt; pt 4929 drivers/isdn/hardware/mISDN/hfcmulti.c __func__, HFC_cnt + 1, pt + 1); pt 4934 drivers/isdn/hardware/mISDN/hfcmulti.c pt + 1, HFC_cnt + 1); pt 4942 drivers/isdn/hardware/mISDN/hfcmulti.c pt + 1, HFC_cnt + 1, hc->masterclk + 1); pt 4946 drivers/isdn/hardware/mISDN/hfcmulti.c hc->masterclk = pt; pt 4954 drivers/isdn/hardware/mISDN/hfcmulti.c __func__, HFC_cnt + 1, pt + 1); pt 4964 drivers/isdn/hardware/mISDN/hfcmulti.c __func__, HFC_cnt + 1, pt + 1); pt 4970 drivers/isdn/hardware/mISDN/hfcmulti.c HFC_cnt + 1, pt + 1); pt 4974 drivers/isdn/hardware/mISDN/hfcmulti.c hc->ctype, HFC_cnt + 1, pt + 1); pt 4979 drivers/isdn/hardware/mISDN/hfcmulti.c hc->created[pt] = 1; pt 4991 drivers/isdn/hardware/mISDN/hfcmulti.c int pt; pt 5032 drivers/isdn/hardware/mISDN/hfcmulti.c pt = 0; pt 5037 drivers/isdn/hardware/mISDN/hfcmulti.c hc->dnum[pt] = ch; pt 5038 drivers/isdn/hardware/mISDN/hfcmulti.c hc->bmask[pt] = bmask[bmask_cnt++]; pt 5039 drivers/isdn/hardware/mISDN/hfcmulti.c if ((maskcheck & hc->bmask[pt]) pt 5040 drivers/isdn/hardware/mISDN/hfcmulti.c || (dmask[E1_cnt] & hc->bmask[pt])) { pt 5043 drivers/isdn/hardware/mISDN/hfcmulti.c E1_cnt + 1, pt); pt 5047 drivers/isdn/hardware/mISDN/hfcmulti.c maskcheck |= hc->bmask[pt]; pt 5050 drivers/isdn/hardware/mISDN/hfcmulti.c E1_cnt + 1, ch, hc->bmask[pt]); pt 5051 drivers/isdn/hardware/mISDN/hfcmulti.c pt++; pt 5053 drivers/isdn/hardware/mISDN/hfcmulti.c hc->ports = pt; pt 5136 drivers/isdn/hardware/mISDN/hfcmulti.c for (pt = 0; pt < hc->ports; pt++) { pt 5144 drivers/isdn/hardware/mISDN/hfcmulti.c ret_err = init_e1_port(hc, m, pt); pt 5146 drivers/isdn/hardware/mISDN/hfcmulti.c ret_err = init_multi_port(hc, pt); pt 5151 drivers/isdn/hardware/mISDN/hfcmulti.c __func__, HFC_cnt + 1, pt + 1, ret_err); pt 5154 drivers/isdn/hardware/mISDN/hfcmulti.c while (pt) { /* release already registered ports */ pt 5155 drivers/isdn/hardware/mISDN/hfcmulti.c pt--; pt 5158 drivers/isdn/hardware/mISDN/hfcmulti.c hc->chan[hc->dnum[pt]].dch); pt 5161 drivers/isdn/hardware/mISDN/hfcmulti.c hc->chan[(pt << 2) + 2].dch); pt 2517 drivers/md/dm-thin.c static bool passdown_enabled(struct pool_c *pt) pt 2519 drivers/md/dm-thin.c return pt->adjusted_pf.discard_passdown; pt 2524 drivers/md/dm-thin.c struct pool_c *pt = pool->ti->private; pt 2526 drivers/md/dm-thin.c if (passdown_enabled(pt)) { pt 2538 drivers/md/dm-thin.c struct pool_c *pt = pool->ti->private; pt 2613 drivers/md/dm-thin.c pool->pf.error_if_no_space = pt->requested_pf.error_if_no_space; pt 2628 drivers/md/dm-thin.c pt->adjusted_pf.mode = new_mode; pt 2811 drivers/md/dm-thin.c struct pool_c *pt = container_of(cb, struct pool_c, callbacks); pt 2814 drivers/md/dm-thin.c if (get_pool_mode(pt->pool) == PM_OUT_OF_DATA_SPACE) pt 2817 drivers/md/dm-thin.c q = bdev_get_queue(pt->data_dev->bdev); pt 2839 drivers/md/dm-thin.c static bool data_dev_supports_discard(struct pool_c *pt) pt 2841 drivers/md/dm-thin.c struct request_queue *q = bdev_get_queue(pt->data_dev->bdev); pt 2855 drivers/md/dm-thin.c static void disable_passdown_if_not_supported(struct pool_c *pt) pt 2857 drivers/md/dm-thin.c struct pool *pool = pt->pool; pt 2858 drivers/md/dm-thin.c struct block_device *data_bdev = pt->data_dev->bdev; pt 2863 drivers/md/dm-thin.c if (!pt->adjusted_pf.discard_passdown) pt 2866 drivers/md/dm-thin.c if (!data_dev_supports_discard(pt)) pt 2874 drivers/md/dm-thin.c pt->adjusted_pf.discard_passdown = false; pt 2880 drivers/md/dm-thin.c struct pool_c *pt = ti->private; pt 2886 drivers/md/dm-thin.c enum pool_mode new_mode = pt->adjusted_pf.mode; pt 2893 drivers/md/dm-thin.c pt->adjusted_pf.mode = old_mode; pt 2896 drivers/md/dm-thin.c pool->pf = pt->adjusted_pf; pt 2897 drivers/md/dm-thin.c pool->low_water_blocks = pt->low_water_blocks; pt 3139 drivers/md/dm-thin.c struct pool_c *pt = ti->private; pt 3143 drivers/md/dm-thin.c unbind_control_target(pt->pool, ti); pt 3144 drivers/md/dm-thin.c __pool_dec(pt->pool); pt 3145 drivers/md/dm-thin.c dm_put_device(ti, pt->metadata_dev); pt 3146 drivers/md/dm-thin.c dm_put_device(ti, pt->data_dev); pt 3147 drivers/md/dm-thin.c bio_uninit(&pt->flush_bio); pt 3148 drivers/md/dm-thin.c kfree(pt); pt 3226 drivers/md/dm-thin.c struct pool_c *pt = context; pt 3227 drivers/md/dm-thin.c struct bio *flush_bio = &pt->flush_bio; pt 3230 drivers/md/dm-thin.c bio_set_dev(flush_bio, pt->data_dev->bdev); pt 3276 drivers/md/dm-thin.c static dm_block_t calc_metadata_threshold(struct pool_c *pt) pt 3283 drivers/md/dm-thin.c dm_block_t quarter = get_metadata_dev_size_in_blocks(pt->metadata_dev->bdev) / 4; pt 3303 drivers/md/dm-thin.c struct pool_c *pt; pt 3373 drivers/md/dm-thin.c pt = kzalloc(sizeof(*pt), GFP_KERNEL); pt 3374 drivers/md/dm-thin.c if (!pt) { pt 3398 drivers/md/dm-thin.c pt->pool = pool; pt 3399 drivers/md/dm-thin.c pt->ti = ti; pt 3400 drivers/md/dm-thin.c pt->metadata_dev = metadata_dev; pt 3401 drivers/md/dm-thin.c pt->data_dev = data_dev; pt 3402 drivers/md/dm-thin.c pt->low_water_blocks = low_water_blocks; pt 3403 drivers/md/dm-thin.c pt->adjusted_pf = pt->requested_pf = pf; pt 3404 drivers/md/dm-thin.c bio_init(&pt->flush_bio, NULL, 0); pt 3422 drivers/md/dm-thin.c ti->private = pt; pt 3424 drivers/md/dm-thin.c r = dm_pool_register_metadata_threshold(pt->pool->pmd, pt 3425 drivers/md/dm-thin.c calc_metadata_threshold(pt), pt 3431 drivers/md/dm-thin.c pt->callbacks.congested_fn = pool_is_congested; pt 3432 drivers/md/dm-thin.c dm_table_add_target_callbacks(ti->table, &pt->callbacks); pt 3441 drivers/md/dm-thin.c kfree(pt); pt 3455 drivers/md/dm-thin.c struct pool_c *pt = ti->private; pt 3456 drivers/md/dm-thin.c struct pool *pool = pt->pool; pt 3463 drivers/md/dm-thin.c bio_set_dev(bio, pt->data_dev->bdev); pt 3473 drivers/md/dm-thin.c struct pool_c *pt = ti->private; pt 3474 drivers/md/dm-thin.c struct pool *pool = pt->pool; pt 3521 drivers/md/dm-thin.c struct pool_c *pt = ti->private; pt 3522 drivers/md/dm-thin.c struct pool *pool = pt->pool; pt 3584 drivers/md/dm-thin.c struct pool_c *pt = ti->private; pt 3585 drivers/md/dm-thin.c struct pool *pool = pt->pool; pt 3595 drivers/md/dm-thin.c metadata_pre_commit_callback, pt); pt 3637 drivers/md/dm-thin.c struct pool_c *pt = ti->private; pt 3638 drivers/md/dm-thin.c struct pool *pool = pt->pool; pt 3658 drivers/md/dm-thin.c struct pool_c *pt = ti->private; pt 3659 drivers/md/dm-thin.c struct pool *pool = pt->pool; pt 3671 drivers/md/dm-thin.c struct pool_c *pt = ti->private; pt 3672 drivers/md/dm-thin.c struct pool *pool = pt->pool; pt 3684 drivers/md/dm-thin.c struct pool_c *pt = ti->private; pt 3685 drivers/md/dm-thin.c struct pool *pool = pt->pool; pt 3861 drivers/md/dm-thin.c struct pool_c *pt = ti->private; pt 3862 drivers/md/dm-thin.c struct pool *pool = pt->pool; pt 3941 drivers/md/dm-thin.c struct pool_c *pt = ti->private; pt 3942 drivers/md/dm-thin.c struct pool *pool = pt->pool; pt 4034 drivers/md/dm-thin.c DMEMIT("%llu ", (unsigned long long)calc_metadata_threshold(pt)); pt 4040 drivers/md/dm-thin.c format_dev_t(buf, pt->metadata_dev->bdev->bd_dev), pt 4041 drivers/md/dm-thin.c format_dev_t(buf2, pt->data_dev->bdev->bd_dev), pt 4043 drivers/md/dm-thin.c (unsigned long long)pt->low_water_blocks); pt 4044 drivers/md/dm-thin.c emit_flags(&pt->requested_pf, result, sz, maxlen); pt 4056 drivers/md/dm-thin.c struct pool_c *pt = ti->private; pt 4058 drivers/md/dm-thin.c return fn(ti, pt->data_dev, 0, ti->len, data); pt 4063 drivers/md/dm-thin.c struct pool_c *pt = ti->private; pt 4064 drivers/md/dm-thin.c struct pool *pool = pt->pool; pt 4102 drivers/md/dm-thin.c if (!pt->adjusted_pf.discard_enabled) { pt 4113 drivers/md/dm-thin.c disable_passdown_if_not_supported(pt); pt 164 drivers/media/common/saa7146/saa7146_core.c void *saa7146_vmalloc_build_pgtable(struct pci_dev *pci, long length, struct saa7146_pgtable *pt) pt 173 drivers/media/common/saa7146/saa7146_core.c if (!(pt->slist = vmalloc_to_sg(mem, pages))) pt 176 drivers/media/common/saa7146/saa7146_core.c if (saa7146_pgtable_alloc(pci, pt)) pt 179 drivers/media/common/saa7146/saa7146_core.c pt->nents = pages; pt 180 drivers/media/common/saa7146/saa7146_core.c slen = pci_map_sg(pci,pt->slist,pt->nents,PCI_DMA_FROMDEVICE); pt 184 drivers/media/common/saa7146/saa7146_core.c if (0 != saa7146_pgtable_build_single(pci, pt, pt->slist, slen)) pt 190 drivers/media/common/saa7146/saa7146_core.c pci_unmap_sg(pci, pt->slist, pt->nents, PCI_DMA_FROMDEVICE); pt 192 drivers/media/common/saa7146/saa7146_core.c saa7146_pgtable_free(pci, pt); pt 194 drivers/media/common/saa7146/saa7146_core.c kfree(pt->slist); pt 195 drivers/media/common/saa7146/saa7146_core.c pt->slist = NULL; pt 202 drivers/media/common/saa7146/saa7146_core.c void saa7146_vfree_destroy_pgtable(struct pci_dev *pci, void *mem, struct saa7146_pgtable *pt) pt 204 drivers/media/common/saa7146/saa7146_core.c pci_unmap_sg(pci, pt->slist, pt->nents, PCI_DMA_FROMDEVICE); pt 205 drivers/media/common/saa7146/saa7146_core.c saa7146_pgtable_free(pci, pt); pt 206 drivers/media/common/saa7146/saa7146_core.c kfree(pt->slist); pt 207 drivers/media/common/saa7146/saa7146_core.c pt->slist = NULL; pt 211 drivers/media/common/saa7146/saa7146_core.c void saa7146_pgtable_free(struct pci_dev *pci, struct saa7146_pgtable *pt) pt 213 drivers/media/common/saa7146/saa7146_core.c if (NULL == pt->cpu) pt 215 drivers/media/common/saa7146/saa7146_core.c pci_free_consistent(pci, pt->size, pt->cpu, pt->dma); pt 216 drivers/media/common/saa7146/saa7146_core.c pt->cpu = NULL; pt 219 drivers/media/common/saa7146/saa7146_core.c int saa7146_pgtable_alloc(struct pci_dev *pci, struct saa7146_pgtable *pt) pt 228 drivers/media/common/saa7146/saa7146_core.c pt->size = PAGE_SIZE; pt 229 drivers/media/common/saa7146/saa7146_core.c pt->cpu = cpu; pt 230 drivers/media/common/saa7146/saa7146_core.c pt->dma = dma_addr; pt 235 drivers/media/common/saa7146/saa7146_core.c int saa7146_pgtable_build_single(struct pci_dev *pci, struct saa7146_pgtable *pt, pt 247 drivers/media/common/saa7146/saa7146_core.c pt->offset = list->offset; pt 249 drivers/media/common/saa7146/saa7146_core.c ptr = pt->cpu; pt 725 drivers/media/common/saa7146/saa7146_hlp.c vdma1.base_page = buf->pt[0].dma | ME1 | sfmt->swap; pt 728 drivers/media/common/saa7146/saa7146_hlp.c vdma1.prot_addr = buf->pt[0].offset; pt 729 drivers/media/common/saa7146/saa7146_hlp.c vdma1.base_even = buf->pt[0].offset+(vdma1.pitch/2)*height; pt 732 drivers/media/common/saa7146/saa7146_hlp.c vdma1.base_even = buf->pt[0].offset; pt 734 drivers/media/common/saa7146/saa7146_hlp.c vdma1.prot_addr = buf->pt[0].offset+(vdma1.pitch/2)*height; pt 776 drivers/media/common/saa7146/saa7146_hlp.c vdma2->prot_addr = buf->pt[1].offset; pt 777 drivers/media/common/saa7146/saa7146_hlp.c vdma2->base_even = ((vdma2->pitch/2)*height)+buf->pt[1].offset; pt 780 drivers/media/common/saa7146/saa7146_hlp.c vdma3->prot_addr = buf->pt[2].offset; pt 781 drivers/media/common/saa7146/saa7146_hlp.c vdma3->base_even = ((vdma3->pitch/2)*height)+buf->pt[2].offset; pt 784 drivers/media/common/saa7146/saa7146_hlp.c vdma3->base_even = buf->pt[2].offset; pt 786 drivers/media/common/saa7146/saa7146_hlp.c vdma3->prot_addr = (vdma3->pitch/2)*height+buf->pt[2].offset; pt 788 drivers/media/common/saa7146/saa7146_hlp.c vdma2->base_even = buf->pt[1].offset; pt 790 drivers/media/common/saa7146/saa7146_hlp.c vdma2->prot_addr = (vdma2->pitch/2)*height+buf->pt[1].offset; pt 805 drivers/media/common/saa7146/saa7146_hlp.c vdma2->prot_addr = buf->pt[2].offset; pt 806 drivers/media/common/saa7146/saa7146_hlp.c vdma2->base_even = ((vdma2->pitch/2)*height)+buf->pt[2].offset; pt 809 drivers/media/common/saa7146/saa7146_hlp.c vdma3->prot_addr = buf->pt[1].offset; pt 810 drivers/media/common/saa7146/saa7146_hlp.c vdma3->base_even = ((vdma3->pitch/2)*height)+buf->pt[1].offset; pt 814 drivers/media/common/saa7146/saa7146_hlp.c vdma3->base_even = buf->pt[2].offset; pt 816 drivers/media/common/saa7146/saa7146_hlp.c vdma3->prot_addr = (vdma3->pitch/2)*height+buf->pt[2].offset; pt 818 drivers/media/common/saa7146/saa7146_hlp.c vdma2->base_even = buf->pt[1].offset; pt 820 drivers/media/common/saa7146/saa7146_hlp.c vdma2->prot_addr = (vdma2->pitch/2)*height+buf->pt[1].offset; pt 838 drivers/media/common/saa7146/saa7146_hlp.c BUG_ON(0 == buf->pt[0].dma); pt 839 drivers/media/common/saa7146/saa7146_hlp.c BUG_ON(0 == buf->pt[1].dma); pt 840 drivers/media/common/saa7146/saa7146_hlp.c BUG_ON(0 == buf->pt[2].dma); pt 853 drivers/media/common/saa7146/saa7146_hlp.c vdma1.base_page = buf->pt[0].dma | ME1; pt 856 drivers/media/common/saa7146/saa7146_hlp.c vdma1.prot_addr = buf->pt[0].offset; pt 857 drivers/media/common/saa7146/saa7146_hlp.c vdma1.base_even = ((vdma1.pitch/2)*height)+buf->pt[0].offset; pt 860 drivers/media/common/saa7146/saa7146_hlp.c vdma1.base_even = buf->pt[0].offset; pt 862 drivers/media/common/saa7146/saa7146_hlp.c vdma1.prot_addr = (vdma1.pitch/2)*height+buf->pt[0].offset; pt 866 drivers/media/common/saa7146/saa7146_hlp.c vdma2.base_page = buf->pt[1].dma | ME1; pt 869 drivers/media/common/saa7146/saa7146_hlp.c vdma3.base_page = buf->pt[2].dma | ME1; pt 153 drivers/media/common/saa7146/saa7146_vbi.c vdma3.base_even = buf->pt[2].offset; pt 154 drivers/media/common/saa7146/saa7146_vbi.c vdma3.base_odd = buf->pt[2].offset + 16 * vbi_pixel_to_capture; pt 155 drivers/media/common/saa7146/saa7146_vbi.c vdma3.prot_addr = buf->pt[2].offset + 16 * 2 * vbi_pixel_to_capture; pt 157 drivers/media/common/saa7146/saa7146_vbi.c vdma3.base_page = buf->pt[2].dma | ME1; pt 251 drivers/media/common/saa7146/saa7146_vbi.c saa7146_pgtable_free(dev->pci, &buf->pt[2]); pt 252 drivers/media/common/saa7146/saa7146_vbi.c saa7146_pgtable_alloc(dev->pci, &buf->pt[2]); pt 257 drivers/media/common/saa7146/saa7146_vbi.c err = saa7146_pgtable_build_single(dev->pci, &buf->pt[2], pt 207 drivers/media/common/saa7146/saa7146_video.c struct saa7146_pgtable *pt1 = &buf->pt[0]; pt 208 drivers/media/common/saa7146/saa7146_video.c struct saa7146_pgtable *pt2 = &buf->pt[1]; pt 209 drivers/media/common/saa7146/saa7146_video.c struct saa7146_pgtable *pt3 = &buf->pt[2]; pt 306 drivers/media/common/saa7146/saa7146_video.c struct saa7146_pgtable *pt = &buf->pt[0]; pt 307 drivers/media/common/saa7146/saa7146_video.c return saa7146_pgtable_build_single(pci, pt, list, length); pt 1033 drivers/media/common/saa7146/saa7146_video.c saa7146_pgtable_free(dev->pci, &buf->pt[0]); pt 1034 drivers/media/common/saa7146/saa7146_video.c saa7146_pgtable_free(dev->pci, &buf->pt[1]); pt 1035 drivers/media/common/saa7146/saa7146_video.c saa7146_pgtable_free(dev->pci, &buf->pt[2]); pt 1094 drivers/media/common/saa7146/saa7146_video.c saa7146_pgtable_alloc(dev->pci, &buf->pt[0]); pt 1095 drivers/media/common/saa7146/saa7146_video.c saa7146_pgtable_alloc(dev->pci, &buf->pt[1]); pt 1096 drivers/media/common/saa7146/saa7146_video.c saa7146_pgtable_alloc(dev->pci, &buf->pt[2]); pt 1098 drivers/media/common/saa7146/saa7146_video.c saa7146_pgtable_alloc(dev->pci, &buf->pt[0]); pt 105 drivers/media/pci/meye/meye.c u32 *pt; pt 123 drivers/media/pci/meye/meye.c pt = meye.mchip_ptable_toc; pt 132 drivers/media/pci/meye/meye.c pt = meye.mchip_ptable_toc; pt 134 drivers/media/pci/meye/meye.c dma = (dma_addr_t) *pt; pt 138 drivers/media/pci/meye/meye.c pt++; pt 148 drivers/media/pci/meye/meye.c *pt = (u32) dma; pt 149 drivers/media/pci/meye/meye.c pt++; pt 156 drivers/media/pci/meye/meye.c u32 *pt; pt 159 drivers/media/pci/meye/meye.c pt = meye.mchip_ptable_toc; pt 161 drivers/media/pci/meye/meye.c dma_addr_t dma = (dma_addr_t) *pt; pt 166 drivers/media/pci/meye/meye.c pt++; pt 559 drivers/media/pci/saa7134/saa7134-alsa.c (dev->dmasound.pt.dma >> 12); pt 686 drivers/media/pci/saa7134/saa7134-alsa.c saa7134_pgtable_free(dev->pci, &dev->dmasound.pt); pt 708 drivers/media/pci/saa7134/saa7134-alsa.c err = saa7134_pgtable_alloc(dev->pci, &dev->dmasound.pt); pt 714 drivers/media/pci/saa7134/saa7134-alsa.c err = saa7134_pgtable_build(dev->pci, &dev->dmasound.pt, pt 717 drivers/media/pci/saa7134/saa7134-alsa.c saa7134_pgtable_free(dev->pci, &dev->dmasound.pt); pt 753 drivers/media/pci/saa7134/saa7134-alsa.c saa7134_pgtable_free(dev->pci, &dev->dmasound.pt); pt 221 drivers/media/pci/saa7134/saa7134-core.c int saa7134_pgtable_alloc(struct pci_dev *pci, struct saa7134_pgtable *pt) pt 229 drivers/media/pci/saa7134/saa7134-core.c pt->size = SAA7134_PGTABLE_SIZE; pt 230 drivers/media/pci/saa7134/saa7134-core.c pt->cpu = cpu; pt 231 drivers/media/pci/saa7134/saa7134-core.c pt->dma = dma_addr; pt 235 drivers/media/pci/saa7134/saa7134-core.c int saa7134_pgtable_build(struct pci_dev *pci, struct saa7134_pgtable *pt, pt 242 drivers/media/pci/saa7134/saa7134-core.c BUG_ON(NULL == pt || NULL == pt->cpu); pt 244 drivers/media/pci/saa7134/saa7134-core.c ptr = pt->cpu + startpage; pt 253 drivers/media/pci/saa7134/saa7134-core.c void saa7134_pgtable_free(struct pci_dev *pci, struct saa7134_pgtable *pt) pt 255 drivers/media/pci/saa7134/saa7134-core.c if (NULL == pt->cpu) pt 257 drivers/media/pci/saa7134/saa7134-core.c pci_free_consistent(pci, pt->size, pt->cpu, pt->dma); pt 258 drivers/media/pci/saa7134/saa7134-core.c pt->cpu = NULL; pt 101 drivers/media/pci/saa7134/saa7134-ts.c return saa7134_pgtable_build(dev->pci, &dmaq->pt, dma->sgl, dma->nents, pt 221 drivers/media/pci/saa7134/saa7134-ts.c saa7134_pgtable_alloc(dev->pci, &dev->ts_q.pt); pt 269 drivers/media/pci/saa7134/saa7134-ts.c (dev->ts_q.pt.dma >> 12)); pt 303 drivers/media/pci/saa7134/saa7134-ts.c saa7134_pgtable_free(dev->pci, &dev->ts_q.pt); pt 88 drivers/media/pci/saa7134/saa7134-vbi.c (dmaq->pt.dma >> 12); pt 124 drivers/media/pci/saa7134/saa7134-vbi.c return saa7134_pgtable_build(dev->pci, &dmaq->pt, dma->sgl, dma->nents, pt 847 drivers/media/pci/saa7134/saa7134-video.c (dmaq->pt.dma >> 12); pt 933 drivers/media/pci/saa7134/saa7134-video.c return saa7134_pgtable_build(dev->pci, &dmaq->pt, dma->sgl, dma->nents, pt 2131 drivers/media/pci/saa7134/saa7134-video.c saa7134_pgtable_alloc(dev->pci, &dev->video_q.pt); pt 2150 drivers/media/pci/saa7134/saa7134-video.c saa7134_pgtable_alloc(dev->pci, &dev->vbi_q.pt); pt 2159 drivers/media/pci/saa7134/saa7134-video.c saa7134_pgtable_free(dev->pci, &dev->video_q.pt); pt 2161 drivers/media/pci/saa7134/saa7134-video.c saa7134_pgtable_free(dev->pci, &dev->vbi_q.pt); pt 483 drivers/media/pci/saa7134/saa7134.h struct saa7134_pgtable pt; pt 508 drivers/media/pci/saa7134/saa7134.h struct saa7134_pgtable pt; pt 755 drivers/media/pci/saa7134/saa7134.h int saa7134_pgtable_alloc(struct pci_dev *pci, struct saa7134_pgtable *pt); pt 756 drivers/media/pci/saa7134/saa7134.h int saa7134_pgtable_build(struct pci_dev *pci, struct saa7134_pgtable *pt, pt 759 drivers/media/pci/saa7134/saa7134.h void saa7134_pgtable_free(struct pci_dev *pci, struct saa7134_pgtable *pt); pt 1252 drivers/media/pci/ttpci/av7110.c pci_dma_sync_sg_for_cpu(budget->dev->pci, budget->pt.slist, budget->pt.nents, PCI_DMA_FROMDEVICE); pt 2497 drivers/media/pci/ttpci/av7110.c &av7110->pt); pt 2516 drivers/media/pci/ttpci/av7110.c saa7146_write(dev, BASE_PAGE3, av7110->pt.dma | ME1 | 0x90); pt 2525 drivers/media/pci/ttpci/av7110.c &av7110->pt); pt 2539 drivers/media/pci/ttpci/av7110.c saa7146_write(dev, BASE_PAGE3, av7110->pt.dma | ME1 | 0x90); pt 2731 drivers/media/pci/ttpci/av7110.c saa7146_vfree_destroy_pgtable(pdev, av7110->grabbing, &av7110->pt); pt 2763 drivers/media/pci/ttpci/av7110.c saa7146_vfree_destroy_pgtable(saa->pci, av7110->grabbing, &av7110->pt); pt 186 drivers/media/pci/ttpci/av7110.h struct saa7146_pgtable pt; pt 132 drivers/media/pci/ttpci/budget-core.c saa7146_write(dev, BASE_PAGE3, budget->pt.dma | ME1 | 0x90); pt 183 drivers/media/pci/ttpci/budget-core.c pci_dma_sync_sg_for_cpu(budget->dev->pci, budget->pt.slist, budget->pt.nents, PCI_DMA_FROMDEVICE); pt 507 drivers/media/pci/ttpci/budget-core.c budget->grabbing = saa7146_vmalloc_build_pgtable(dev->pci, budget->buffer_size, &budget->pt); pt 527 drivers/media/pci/ttpci/budget-core.c saa7146_vfree_destroy_pgtable(dev->pci, budget->grabbing, &budget->pt); pt 556 drivers/media/pci/ttpci/budget-core.c saa7146_vfree_destroy_pgtable(dev->pci, budget->grabbing, &budget->pt); pt 50 drivers/media/pci/ttpci/budget.h struct saa7146_pgtable pt; pt 1381 drivers/misc/mic/scif/scif_api.c poll_table *pt; pt 1388 drivers/misc/mic/scif/scif_api.c pt = &table.pt; pt 1391 drivers/misc/mic/scif/scif_api.c pt->_key = ufds[i].events | EPOLLERR | EPOLLHUP; pt 1393 drivers/misc/mic/scif/scif_api.c pt, ufds[i].epd); pt 1397 drivers/misc/mic/scif/scif_api.c pt->_qproc = NULL; pt 1401 drivers/misc/mic/scif/scif_api.c pt->_qproc = NULL; pt 60 drivers/mtd/maps/impa7.c static struct { u_long addr; u_long size; } pt[NUM_FLASHBANKS] = { pt 69 drivers/mtd/maps/impa7.c pt[i].size, pt[i].addr); pt 71 drivers/mtd/maps/impa7.c impa7_map[i].phys = pt[i].addr; pt 72 drivers/mtd/maps/impa7.c impa7_map[i].virt = ioremap(pt[i].addr, pt[i].size); pt 25 drivers/mtd/ubi/debug.h #define ubi_dbg_print_hex_dump(l, ps, pt, r, g, b, len, a) \ pt 26 drivers/mtd/ubi/debug.h print_hex_dump(l, ps, pt, r, g, b, len, a) pt 793 drivers/net/ethernet/ibm/ehea/ehea_qmr.c static u64 ehea_reg_mr_section(int top, int dir, int idx, u64 *pt, pt 800 drivers/net/ethernet/ibm/ehea/ehea_qmr.c u64 pt_abs = __pa(pt); pt 808 drivers/net/ethernet/ibm/ehea/ehea_qmr.c pt[m] = __pa(pg); pt 824 drivers/net/ethernet/ibm/ehea/ehea_qmr.c static u64 ehea_reg_mr_sections(int top, int dir, u64 *pt, pt 835 drivers/net/ethernet/ibm/ehea/ehea_qmr.c hret = ehea_reg_mr_section(top, dir, idx, pt, adapter, mr); pt 842 drivers/net/ethernet/ibm/ehea/ehea_qmr.c static u64 ehea_reg_mr_dir_sections(int top, u64 *pt, pt 853 drivers/net/ethernet/ibm/ehea/ehea_qmr.c hret = ehea_reg_mr_sections(top, dir, pt, adapter, mr); pt 863 drivers/net/ethernet/ibm/ehea/ehea_qmr.c u64 *pt; pt 869 drivers/net/ethernet/ibm/ehea/ehea_qmr.c pt = (void *)get_zeroed_page(GFP_KERNEL); pt 870 drivers/net/ethernet/ibm/ehea/ehea_qmr.c if (!pt) { pt 897 drivers/net/ethernet/ibm/ehea/ehea_qmr.c hret = ehea_reg_mr_dir_sections(top, pt, adapter, mr); pt 913 drivers/net/ethernet/ibm/ehea/ehea_qmr.c free_page((unsigned long)pt); pt 1170 drivers/net/ethernet/intel/ice/ice_flex_pipe.c u16 pt; pt 1172 drivers/net/ethernet/intel/ice/ice_flex_pipe.c for (pt = 0; pt < hw->blk[blk].xlt1.count; pt++) { pt 1175 drivers/net/ethernet/intel/ice/ice_flex_pipe.c ptg = hw->blk[blk].xlt1.t[pt]; pt 1178 drivers/net/ethernet/intel/ice/ice_flex_pipe.c ice_ptg_add_mv_ptype(hw, blk, pt, ptg); pt 174 drivers/net/ethernet/mellanox/mlx5/core/en_selftest.c struct packet_type pt; pt 183 drivers/net/ethernet/mellanox/mlx5/core/en_selftest.c struct packet_type *pt, pt 186 drivers/net/ethernet/mellanox/mlx5/core/en_selftest.c struct mlx5e_lbt_priv *lbtp = pt->af_packet_priv; pt 244 drivers/net/ethernet/mellanox/mlx5/core/en_selftest.c lbtp->pt.type = htons(ETH_P_IP); pt 245 drivers/net/ethernet/mellanox/mlx5/core/en_selftest.c lbtp->pt.func = mlx5e_test_loopback_validate; pt 246 drivers/net/ethernet/mellanox/mlx5/core/en_selftest.c lbtp->pt.dev = priv->netdev; pt 247 drivers/net/ethernet/mellanox/mlx5/core/en_selftest.c lbtp->pt.af_packet_priv = lbtp; pt 248 drivers/net/ethernet/mellanox/mlx5/core/en_selftest.c dev_add_pack(&lbtp->pt); pt 265 drivers/net/ethernet/mellanox/mlx5/core/en_selftest.c dev_remove_pack(&lbtp->pt); pt 871 drivers/net/ethernet/mellanox/mlxsw/reg.h MLXSW_ITEM32(reg, spvm, pt, 0x00, 31, 1); pt 234 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c struct packet_type pt; pt 243 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c struct packet_type *pt, pt 246 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c struct stmmac_test_priv *tpriv = pt->af_packet_priv; pt 329 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c tpriv->pt.type = htons(ETH_P_IP); pt 330 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c tpriv->pt.func = stmmac_test_loopback_validate; pt 331 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c tpriv->pt.dev = priv->dev; pt 332 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c tpriv->pt.af_packet_priv = tpriv; pt 336 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c dev_add_pack(&tpriv->pt); pt 360 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c dev_remove_pack(&tpriv->pt); pt 712 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c struct packet_type *pt, pt 715 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c struct stmmac_test_priv *tpriv = pt->af_packet_priv; pt 749 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c tpriv->pt.type = htons(ETH_P_PAUSE); pt 750 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c tpriv->pt.func = stmmac_test_flowctrl_validate; pt 751 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c tpriv->pt.dev = priv->dev; pt 752 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c tpriv->pt.af_packet_priv = tpriv; pt 753 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c dev_add_pack(&tpriv->pt); pt 811 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c dev_remove_pack(&tpriv->pt); pt 833 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c struct packet_type *pt, pt 836 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c struct stmmac_test_priv *tpriv = pt->af_packet_priv; pt 907 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c tpriv->pt.type = htons(ETH_P_IP); pt 908 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c tpriv->pt.func = stmmac_test_vlan_validate; pt 909 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c tpriv->pt.dev = priv->dev; pt 910 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c tpriv->pt.af_packet_priv = tpriv; pt 919 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c dev_add_pack(&tpriv->pt); pt 960 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c dev_remove_pack(&tpriv->pt); pt 983 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c tpriv->pt.type = htons(ETH_P_8021Q); pt 984 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c tpriv->pt.func = stmmac_test_vlan_validate; pt 985 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c tpriv->pt.dev = priv->dev; pt 986 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c tpriv->pt.af_packet_priv = tpriv; pt 995 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c dev_add_pack(&tpriv->pt); pt 1036 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c dev_remove_pack(&tpriv->pt); pt 1240 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c tpriv->pt.type = svlan ? htons(ETH_P_8021Q) : htons(ETH_P_IP); pt 1241 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c tpriv->pt.func = stmmac_test_vlan_validate; pt 1242 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c tpriv->pt.dev = priv->dev; pt 1243 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c tpriv->pt.af_packet_priv = tpriv; pt 1246 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c dev_add_pack(&tpriv->pt); pt 1274 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c dev_remove_pack(&tpriv->pt); pt 1555 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c struct packet_type *pt, pt 1558 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c struct stmmac_test_priv *tpriv = pt->af_packet_priv; pt 1598 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c tpriv->pt.type = htons(ETH_P_ARP); pt 1599 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c tpriv->pt.func = stmmac_test_arp_validate; pt 1600 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c tpriv->pt.dev = priv->dev; pt 1601 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c tpriv->pt.af_packet_priv = tpriv; pt 1603 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c dev_add_pack(&tpriv->pt); pt 1636 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c dev_remove_pack(&tpriv->pt); pt 314 drivers/net/fddi/skfp/pmf.c const struct s_p_tab *pt ; pt 427 drivers/net/fddi/skfp/pmf.c pt = smt_get_ptab(pa->p_type) ; pt 428 drivers/net/fddi/skfp/pmf.c if (pt && pt->p_access == AC_GROUP && !set) { pt 429 drivers/net/fddi/skfp/pmf.c pt++ ; pt 430 drivers/net/fddi/skfp/pmf.c while (pt->p_access == AC_G || pt 431 drivers/net/fddi/skfp/pmf.c pt->p_access == AC_GR) { pt 432 drivers/net/fddi/skfp/pmf.c smt_add_para(smc,&pcon,pt->p_num, pt 434 drivers/net/fddi/skfp/pmf.c pt++ ; pt 459 drivers/net/fddi/skfp/pmf.c if (pt && pt->p_access == AC_S) { pt 550 drivers/net/fddi/skfp/pmf.c const struct s_p_tab *pt ; pt 575 drivers/net/fddi/skfp/pmf.c pt = smt_get_ptab(para) ; pt 576 drivers/net/fddi/skfp/pmf.c if (pt && pt->p_access == AC_S) pt 872 drivers/net/fddi/skfp/pmf.c if (!pt) { pt 880 drivers/net/fddi/skfp/pmf.c switch (pt->p_access) { pt 888 drivers/net/fddi/skfp/pmf.c from = mib_addr + pt->p_offset ; pt 890 drivers/net/fddi/skfp/pmf.c swap = pt->p_swap ; /* pointer to swap string */ pt 1076 drivers/net/fddi/skfp/pmf.c const struct s_p_tab *pt ; pt 1154 drivers/net/fddi/skfp/pmf.c pt = smt_get_ptab(pa->p_type) ; pt 1155 drivers/net/fddi/skfp/pmf.c if (!pt) pt 1158 drivers/net/fddi/skfp/pmf.c switch (pt->p_access) { pt 1165 drivers/net/fddi/skfp/pmf.c to = mib_addr + pt->p_offset ; pt 1166 drivers/net/fddi/skfp/pmf.c swap = pt->p_swap ; /* pointer to swap string */ pt 1521 drivers/net/fddi/skfp/pmf.c const struct s_p_tab *pt ; pt 1522 drivers/net/fddi/skfp/pmf.c for (pt = p_tab ; pt->p_num && pt->p_num != para ; pt++) pt 1524 drivers/net/fddi/skfp/pmf.c return pt->p_num ? pt : NULL; pt 416 drivers/net/ppp/pppoe.c struct packet_type *pt, struct net_device *orig_dev) pt 484 drivers/net/ppp/pppoe.c struct packet_type *pt, struct net_device *orig_dev) pt 81 drivers/phy/socionext/phy-uniphier-usb3hs.c struct uniphier_u3hsphy_trim_param *pt); pt 86 drivers/phy/socionext/phy-uniphier-usb3hs.c struct uniphier_u3hsphy_trim_param *pt) pt 89 drivers/phy/socionext/phy-uniphier-usb3hs.c *pconfig |= FIELD_PREP(HSPHY_CFG0_RTERM_MASK, pt->rterm); pt 92 drivers/phy/socionext/phy-uniphier-usb3hs.c *pconfig |= FIELD_PREP(HSPHY_CFG0_SEL_T_MASK, pt->sel_t); pt 95 drivers/phy/socionext/phy-uniphier-usb3hs.c *pconfig |= FIELD_PREP(HSPHY_CFG0_HS_I_MASK, pt->hs_i); pt 120 drivers/phy/socionext/phy-uniphier-usb3hs.c struct uniphier_u3hsphy_trim_param *pt) pt 124 drivers/phy/socionext/phy-uniphier-usb3hs.c ret = uniphier_u3hsphy_get_nvparam(priv, "rterm", &pt->rterm); pt 128 drivers/phy/socionext/phy-uniphier-usb3hs.c ret = uniphier_u3hsphy_get_nvparam(priv, "sel_t", &pt->sel_t); pt 132 drivers/phy/socionext/phy-uniphier-usb3hs.c ret = uniphier_u3hsphy_get_nvparam(priv, "hs_i", &pt->hs_i); pt 1041 drivers/scsi/ips.c ips_passthru_t *pt; pt 1091 drivers/scsi/ips.c pt = (ips_passthru_t *) scsi_sglist(SC); pt 1092 drivers/scsi/ips.c if ((pt->CoppCP.cmd.reset.op_code == IPS_CMD_RESET_CHANNEL) && pt 1093 drivers/scsi/ips.c (pt->CoppCP.cmd.reset.adapter_flag == 1)) { pt 1559 drivers/scsi/ips.c ips_passthru_t *pt; pt 1579 drivers/scsi/ips.c pt = (ips_passthru_t *) ha->ioctl_data; pt 1580 drivers/scsi/ips.c ips_scmd_buf_read(SC, pt, sizeof (ips_passthru_t)); pt 1581 drivers/scsi/ips.c pt->BasicStatus = 0x0B; pt 1582 drivers/scsi/ips.c pt->ExtendedStatus = 0x00; pt 1583 drivers/scsi/ips.c ips_scmd_buf_write(SC, pt, sizeof (ips_passthru_t)); pt 1590 drivers/scsi/ips.c pt = (ips_passthru_t *) ha->ioctl_data; pt 1602 drivers/scsi/ips.c switch (pt->CoppCmd) { pt 1615 drivers/scsi/ips.c if (length < (sizeof (ips_passthru_t) + pt->CmdBSize)) { pt 1625 drivers/scsi/ips.c pt->CoppCP.cmd.flashfw.op_code == pt 1627 drivers/scsi/ips.c ret = ips_flash_copperhead(ha, pt, scb); pt 1632 drivers/scsi/ips.c if (ips_usrcmd(ha, pt, scb)) pt 1651 drivers/scsi/ips.c ips_flash_copperhead(ips_ha_t * ha, ips_passthru_t * pt, ips_scb_t * scb) pt 1657 drivers/scsi/ips.c if (IPS_IS_TROMBONE(ha) && pt->CoppCP.cmd.flashfw.type == IPS_FW_IMAGE) { pt 1658 drivers/scsi/ips.c if (ips_usrcmd(ha, pt, scb)) pt 1663 drivers/scsi/ips.c pt->BasicStatus = 0x0B; pt 1664 drivers/scsi/ips.c pt->ExtendedStatus = 0; pt 1668 drivers/scsi/ips.c if (pt->CoppCP.cmd.flashfw.type == IPS_BIOS_IMAGE && pt 1669 drivers/scsi/ips.c pt->CoppCP.cmd.flashfw.direction == IPS_ERASE_BIOS) { pt 1670 drivers/scsi/ips.c pt->BasicStatus = 0; pt 1671 drivers/scsi/ips.c return ips_flash_bios(ha, pt, scb); pt 1672 drivers/scsi/ips.c } else if (pt->CoppCP.cmd.flashfw.packet_num == 0) { pt 1679 drivers/scsi/ips.c datasize = pt->CoppCP.cmd.flashfw.total_packets * pt 1680 drivers/scsi/ips.c pt->CoppCP.cmd.flashfw.count; pt 1692 drivers/scsi/ips.c if (pt->CoppCP.cmd.flashfw.count + ha->flash_datasize > pt 1702 drivers/scsi/ips.c pt->BasicStatus = 0; pt 1703 drivers/scsi/ips.c memcpy(&ha->flash_data[ha->flash_datasize], pt + 1, pt 1704 drivers/scsi/ips.c pt->CoppCP.cmd.flashfw.count); pt 1705 drivers/scsi/ips.c ha->flash_datasize += pt->CoppCP.cmd.flashfw.count; pt 1706 drivers/scsi/ips.c if (pt->CoppCP.cmd.flashfw.packet_num == pt 1707 drivers/scsi/ips.c pt->CoppCP.cmd.flashfw.total_packets - 1) { pt 1708 drivers/scsi/ips.c if (pt->CoppCP.cmd.flashfw.type == IPS_BIOS_IMAGE) pt 1709 drivers/scsi/ips.c return ips_flash_bios(ha, pt, scb); pt 1710 drivers/scsi/ips.c else if (pt->CoppCP.cmd.flashfw.type == IPS_FW_IMAGE) pt 1711 drivers/scsi/ips.c return ips_flash_firmware(ha, pt, scb); pt 1722 drivers/scsi/ips.c ips_flash_bios(ips_ha_t * ha, ips_passthru_t * pt, ips_scb_t * scb) pt 1725 drivers/scsi/ips.c if (pt->CoppCP.cmd.flashfw.type == IPS_BIOS_IMAGE && pt 1726 drivers/scsi/ips.c pt->CoppCP.cmd.flashfw.direction == IPS_WRITE_BIOS) { pt 1758 drivers/scsi/ips.c } else if (pt->CoppCP.cmd.flashfw.type == IPS_BIOS_IMAGE && pt 1759 drivers/scsi/ips.c pt->CoppCP.cmd.flashfw.direction == IPS_ERASE_BIOS) { pt 1771 drivers/scsi/ips.c pt->BasicStatus = 0x0B; pt 1772 drivers/scsi/ips.c pt->ExtendedStatus = 0x00; pt 1824 drivers/scsi/ips.c ips_flash_firmware(ips_ha_t * ha, ips_passthru_t * pt, ips_scb_t * scb) pt 1829 drivers/scsi/ips.c if (pt->CoppCP.cmd.flashfw.type == IPS_FW_IMAGE && pt 1830 drivers/scsi/ips.c pt->CoppCP.cmd.flashfw.direction == IPS_WRITE_FW) { pt 1831 drivers/scsi/ips.c memset(&pt->CoppCP.cmd, 0, sizeof (IPS_HOST_COMMAND)); pt 1832 drivers/scsi/ips.c pt->CoppCP.cmd.flashfw.op_code = IPS_CMD_DOWNLOAD; pt 1833 drivers/scsi/ips.c pt->CoppCP.cmd.flashfw.count = cpu_to_le32(ha->flash_datasize); pt 1835 drivers/scsi/ips.c pt->BasicStatus = 0x0B; pt 1836 drivers/scsi/ips.c pt->ExtendedStatus = 0x00; pt 1844 drivers/scsi/ips.c memcpy(&scb->cmd, &pt->CoppCP.cmd, sizeof (IPS_IOCTL_CMD)); pt 1865 drivers/scsi/ips.c if (pt->TimeOut) pt 1866 drivers/scsi/ips.c scb->timeout = pt->TimeOut; pt 1897 drivers/scsi/ips.c ips_usrcmd(ips_ha_t * ha, ips_passthru_t * pt, ips_scb_t * scb) pt 1904 drivers/scsi/ips.c if ((!scb) || (!pt) || (!ha)) pt 1911 drivers/scsi/ips.c memcpy(&scb->cmd, &pt->CoppCP.cmd, sizeof (IPS_IOCTL_CMD)); pt 1912 drivers/scsi/ips.c memcpy(&scb->dcdb, &pt->CoppCP.dcdb, sizeof (IPS_DCDB_TABLE)); pt 1934 drivers/scsi/ips.c if (pt->CmdBSize) { pt 1935 drivers/scsi/ips.c scb->data_len = pt->CmdBSize; pt 1947 drivers/scsi/ips.c if (pt->CmdBSize) { pt 1957 drivers/scsi/ips.c if (pt->TimeOut) { pt 1958 drivers/scsi/ips.c scb->timeout = pt->TimeOut; pt 1960 drivers/scsi/ips.c if (pt->TimeOut <= 10) pt 1962 drivers/scsi/ips.c else if (pt->TimeOut <= 60) pt 1987 drivers/scsi/ips.c ips_passthru_t *pt; pt 1997 drivers/scsi/ips.c pt = (ips_passthru_t *) ha->ioctl_data; pt 2001 drivers/scsi/ips.c memcpy(&pt->CoppCP.dcdb, &scb->dcdb, sizeof (IPS_DCDB_TABLE)); pt 2003 drivers/scsi/ips.c pt->BasicStatus = scb->basic_status; pt 2004 drivers/scsi/ips.c pt->ExtendedStatus = scb->extended_status; pt 2005 drivers/scsi/ips.c pt->AdapterType = ha->ad_type; pt 101 drivers/soc/aspeed/aspeed-lpc-snoop.c struct poll_table_struct *pt) pt 105 drivers/soc/aspeed/aspeed-lpc-snoop.c poll_wait(file, &chan->wq, pt); pt 51 drivers/staging/comedi/drivers/addi_apci_1500.c unsigned int pt[2]; /* Pattern Transition */ pt 300 drivers/staging/comedi/drivers/addi_apci_1500.c z8536_write(dev, devpriv->pt[pa_trig] & 0xff, Z8536_PA_PT_REG); pt 305 drivers/staging/comedi/drivers/addi_apci_1500.c z8536_write(dev, (devpriv->pt[pb_trig] >> 8) & 0xff, Z8536_PB_PT_REG); pt 460 drivers/staging/comedi/drivers/addi_apci_1500.c unsigned int pt = devpriv->pt[trig] & old_mask; pt 478 drivers/staging/comedi/drivers/addi_apci_1500.c pt = 0; pt 483 drivers/staging/comedi/drivers/addi_apci_1500.c pt |= chan_mask; /* enable edge detection */ pt 489 drivers/staging/comedi/drivers/addi_apci_1500.c pt &= ~chan_mask; /* enable level detection */ pt 505 drivers/staging/comedi/drivers/addi_apci_1500.c src = pt & 0xff; pt 509 drivers/staging/comedi/drivers/addi_apci_1500.c src = (pt >> 8) & 0xff; pt 522 drivers/staging/comedi/drivers/addi_apci_1500.c devpriv->pt[trig] = pt; pt 123 drivers/staging/media/ipu3/ipu3-mmu.c u32 *pt; pt 126 drivers/staging/media/ipu3/ipu3-mmu.c pt = (u32 *)__get_free_page(GFP_KERNEL); pt 127 drivers/staging/media/ipu3/ipu3-mmu.c if (!pt) pt 131 drivers/staging/media/ipu3/ipu3-mmu.c pt[pte] = pteval; pt 133 drivers/staging/media/ipu3/ipu3-mmu.c set_memory_uc((unsigned long int)pt, IPU3_PT_ORDER); pt 135 drivers/staging/media/ipu3/ipu3-mmu.c return pt; pt 142 drivers/staging/media/ipu3/ipu3-mmu.c static void imgu_mmu_free_page_table(u32 *pt) pt 144 drivers/staging/media/ipu3/ipu3-mmu.c set_memory_wb((unsigned long int)pt, IPU3_PT_ORDER); pt 145 drivers/staging/media/ipu3/ipu3-mmu.c free_page((unsigned long)pt); pt 805 drivers/staging/media/soc_camera/soc_camera.c static __poll_t soc_camera_poll(struct file *file, poll_table *pt) pt 815 drivers/staging/media/soc_camera/soc_camera.c res = ici->ops->poll(file, pt); pt 2213 drivers/staging/rtl8723bs/core/rtw_security.c static void rijndaelEncrypt(u32 rk[/*44*/], u8 pt[16], u8 ct[16]) pt 2223 drivers/staging/rtl8723bs/core/rtw_security.c s0 = GETU32(pt) ^ rk[0]; pt 2224 drivers/staging/rtl8723bs/core/rtw_security.c s1 = GETU32(pt + 4) ^ rk[1]; pt 2225 drivers/staging/rtl8723bs/core/rtw_security.c s2 = GETU32(pt + 8) ^ rk[2]; pt 2226 drivers/staging/rtl8723bs/core/rtw_security.c s3 = GETU32(pt + 12) ^ rk[3]; pt 332 drivers/staging/rtl8723bs/include/rtw_security.h #define GETU32(pt) (((u32)(pt)[0] << 24) ^ ((u32)(pt)[1] << 16) ^ \ pt 333 drivers/staging/rtl8723bs/include/rtw_security.h ((u32)(pt)[2] << 8) ^ ((u32)(pt)[3])) pt 598 drivers/target/target_core_pscsi.c struct pscsi_plugin_task *pt = cmd->priv; pt 604 drivers/target/target_core_pscsi.c if (!pt) pt 607 drivers/target/target_core_pscsi.c cdb = &pt->pscsi_cdb[0]; pt 960 drivers/target/target_core_pscsi.c struct pscsi_plugin_task *pt; pt 968 drivers/target/target_core_pscsi.c pt = kzalloc(sizeof(*pt) + scsi_command_size(cmd->t_task_cdb), GFP_KERNEL); pt 969 drivers/target/target_core_pscsi.c if (!pt) { pt 972 drivers/target/target_core_pscsi.c cmd->priv = pt; pt 974 drivers/target/target_core_pscsi.c memcpy(pt->pscsi_cdb, cmd->t_task_cdb, pt 994 drivers/target/target_core_pscsi.c scsi_req(req)->cmd_len = scsi_command_size(pt->pscsi_cdb); pt 995 drivers/target/target_core_pscsi.c scsi_req(req)->cmd = &pt->pscsi_cdb[0]; pt 1012 drivers/target/target_core_pscsi.c kfree(pt); pt 1041 drivers/target/target_core_pscsi.c struct pscsi_plugin_task *pt = cmd->priv; pt 1047 drivers/target/target_core_pscsi.c " 0x%02x Result: 0x%08x\n", cmd, pt->pscsi_cdb[0], pt 1060 drivers/target/target_core_pscsi.c " 0x%02x Result: 0x%08x\n", cmd, pt->pscsi_cdb[0], pt 1067 drivers/target/target_core_pscsi.c kfree(pt); pt 266 drivers/tty/serial/8250/8250.h static inline int is_omap1_8250(struct uart_8250_port *pt) pt 270 drivers/tty/serial/8250/8250.h switch (pt->port.mapbase) { pt 284 drivers/tty/serial/8250/8250.h static inline int is_omap1510_8250(struct uart_8250_port *pt) pt 289 drivers/tty/serial/8250/8250.h return is_omap1_8250(pt); pt 292 drivers/tty/serial/8250/8250.h static inline int is_omap1_8250(struct uart_8250_port *pt) pt 296 drivers/tty/serial/8250/8250.h static inline int is_omap1510_8250(struct uart_8250_port *pt) pt 2747 drivers/tty/serial/8250/8250_port.c static unsigned int serial8250_port_size(struct uart_8250_port *pt) pt 2749 drivers/tty/serial/8250/8250_port.c if (pt->port.mapsize) pt 2750 drivers/tty/serial/8250/8250_port.c return pt->port.mapsize; pt 2751 drivers/tty/serial/8250/8250_port.c if (pt->port.iotype == UPIO_AU) { pt 2752 drivers/tty/serial/8250/8250_port.c if (pt->port.type == PORT_RT2880) pt 2756 drivers/tty/serial/8250/8250_port.c if (is_omap1_8250(pt)) pt 2757 drivers/tty/serial/8250/8250_port.c return 0x16 << pt->port.regshift; pt 2759 drivers/tty/serial/8250/8250_port.c return 8 << pt->port.regshift; pt 79 drivers/vfio/virqfd.c wait_queue_head_t *wqh, poll_table *pt) pt 81 drivers/vfio/virqfd.c struct virqfd *virqfd = container_of(pt, struct virqfd, pt); pt 164 drivers/vfio/virqfd.c init_poll_funcptr(&virqfd->pt, virqfd_ptable_queue_proc); pt 166 drivers/vfio/virqfd.c events = vfs_poll(irqfd.file, &virqfd->pt); pt 159 drivers/vhost/vhost.c poll_table *pt) pt 163 drivers/vhost/vhost.c poll = container_of(pt, struct vhost_poll, table); pt 1633 fs/aio.c struct poll_table_struct pt = { ._key = req->events }; pt 1638 fs/aio.c mask = vfs_poll(req->file, &pt) & req->events; pt 1719 fs/aio.c struct poll_table_struct pt; pt 1728 fs/aio.c struct aio_poll_table *pt = container_of(p, struct aio_poll_table, pt); pt 1731 fs/aio.c if (unlikely(pt->iocb->poll.head)) { pt 1732 fs/aio.c pt->error = -EINVAL; pt 1736 fs/aio.c pt->error = 0; pt 1737 fs/aio.c pt->iocb->poll.head = head; pt 1738 fs/aio.c add_wait_queue(head, &pt->iocb->poll.wait); pt 1763 fs/aio.c apt.pt._qproc = aio_poll_queue_proc; pt 1764 fs/aio.c apt.pt._key = req->events; pt 1772 fs/aio.c mask = vfs_poll(req->file, &apt.pt) & req->events; pt 1126 fs/btrfs/send.c struct fs_path *pt = ctx; pt 1128 fs/btrfs/send.c ret = fs_path_copy(pt, p); pt 29 fs/ecryptfs/miscdev.c ecryptfs_miscdev_poll(struct file *file, poll_table *pt) pt 46 fs/ecryptfs/miscdev.c poll_wait(file, &daemon->wait, pt); pt 250 fs/eventpoll.c poll_table pt; pt 354 fs/eventpoll.c return container_of(p, struct ep_pqueue, pt)->epi; pt 877 fs/eventpoll.c poll_table *pt); pt 884 fs/eventpoll.c static __poll_t ep_item_poll(const struct epitem *epi, poll_table *pt, pt 890 fs/eventpoll.c pt->_key = epi->event.events; pt 892 fs/eventpoll.c return vfs_poll(epi->ffd.file, pt) & epi->event.events; pt 895 fs/eventpoll.c poll_wait(epi->ffd.file, &ep->poll_wait, pt); pt 896 fs/eventpoll.c locked = pt && (pt->_qproc == ep_ptable_queue_proc); pt 907 fs/eventpoll.c poll_table pt; pt 910 fs/eventpoll.c init_poll_funcptr(&pt, NULL); pt 914 fs/eventpoll.c if (ep_item_poll(epi, &pt, depth)) { pt 1317 fs/eventpoll.c poll_table *pt) pt 1319 fs/eventpoll.c struct epitem *epi = ep_item_from_epqueue(pt); pt 1532 fs/eventpoll.c init_poll_funcptr(&epq.pt, ep_ptable_queue_proc); pt 1541 fs/eventpoll.c revents = ep_item_poll(epi, &epq.pt, 1); pt 1633 fs/eventpoll.c poll_table pt; pt 1637 fs/eventpoll.c init_poll_funcptr(&pt, NULL); pt 1679 fs/eventpoll.c if (ep_item_poll(epi, &pt, 1)) { pt 1709 fs/eventpoll.c poll_table pt; pt 1711 fs/eventpoll.c init_poll_funcptr(&pt, NULL); pt 1749 fs/eventpoll.c revents = ep_item_poll(epi, &pt, 1); pt 1793 fs/io_uring.c struct poll_table_struct pt = { ._key = poll->events }; pt 1801 fs/io_uring.c mask = vfs_poll(poll->file, &pt) & poll->events; pt 1857 fs/io_uring.c struct poll_table_struct pt; pt 1865 fs/io_uring.c struct io_poll_table *pt = container_of(p, struct io_poll_table, pt); pt 1867 fs/io_uring.c if (unlikely(pt->req->poll.head)) { pt 1868 fs/io_uring.c pt->error = -EINVAL; pt 1872 fs/io_uring.c pt->error = 0; pt 1873 fs/io_uring.c pt->req->poll.head = head; pt 1874 fs/io_uring.c add_wait_queue(head, &pt->req->poll.wait); pt 1902 fs/io_uring.c ipt.pt._qproc = io_poll_queue_proc; pt 1903 fs/io_uring.c ipt.pt._key = poll->events; pt 1913 fs/io_uring.c mask = vfs_poll(poll->file, &ipt.pt) & poll->events; pt 332 fs/ntfs/compress.c u16 lg, pt, length, max_non_overlap; pt 372 fs/ntfs/compress.c pt = le16_to_cpup((le16*)cb); pt 379 fs/ntfs/compress.c dp_back_addr = dp_addr - (pt >> (12 - lg)) - 1; pt 384 fs/ntfs/compress.c length = (pt & (0xfff >> lg)) + 3; pt 123 fs/select.c init_poll_funcptr(&pwq->pt, __pollwait); pt 224 fs/select.c struct poll_wqueues *pwq = container_of(p, struct poll_wqueues, pt); pt 495 fs/select.c wait = &table.pt; pt 874 fs/select.c poll_table* pt = &wait->pt; pt 883 fs/select.c pt->_qproc = NULL; pt 907 fs/select.c if (do_pollfd(pfd, pt, &can_busy_loop, pt 910 fs/select.c pt->_qproc = NULL; pt 921 fs/select.c pt->_qproc = NULL; pt 618 include/linux/cgroup-defs.h struct poll_table_struct *pt); pt 267 include/linux/kernfs.h struct poll_table_struct *pt); pt 371 include/linux/kernfs.h struct poll_table_struct *pt); pt 58 include/linux/kvm_irqfd.h poll_table pt; pt 2613 include/linux/netdevice.h void dev_add_pack(struct packet_type *pt); pt 2614 include/linux/netdevice.h void dev_remove_pack(struct packet_type *pt); pt 2615 include/linux/netdevice.h void __dev_remove_pack(struct packet_type *pt); pt 75 include/linux/poll.h static inline void init_poll_funcptr(poll_table *pt, poll_queue_proc qproc) pt 77 include/linux/poll.h pt->_qproc = qproc; pt 78 include/linux/poll.h pt->_key = ~(__poll_t)0; /* all events enabled */ pt 86 include/linux/poll.h static inline __poll_t vfs_poll(struct file *file, struct poll_table_struct *pt) pt 90 include/linux/poll.h return file->f_op->poll(file, pt); pt 104 include/linux/poll.h poll_table pt; pt 187 include/linux/vfio.h poll_table pt; pt 171 include/media/drv-intf/saa7146.h int saa7146_pgtable_alloc(struct pci_dev *pci, struct saa7146_pgtable *pt); pt 172 include/media/drv-intf/saa7146.h void saa7146_pgtable_free(struct pci_dev *pci, struct saa7146_pgtable *pt); pt 173 include/media/drv-intf/saa7146.h int saa7146_pgtable_build_single(struct pci_dev *pci, struct saa7146_pgtable *pt, struct scatterlist *list, int length ); pt 174 include/media/drv-intf/saa7146.h void *saa7146_vmalloc_build_pgtable(struct pci_dev *pci, long length, struct saa7146_pgtable *pt); pt 175 include/media/drv-intf/saa7146.h void saa7146_vfree_destroy_pgtable(struct pci_dev *pci, void *mem, struct saa7146_pgtable *pt); pt 69 include/media/drv-intf/saa7146_vv.h struct saa7146_pgtable pt[3]; pt 119 include/net/dn_dev.h __u8 pt; pt 16 include/net/dn_route.h struct packet_type *pt, struct net_device *orig_dev); pt 70 include/net/dsa.h struct packet_type *pt); pt 173 include/net/dsa.h struct packet_type *pt); pt 154 include/net/ip.h int ip_rcv(struct sk_buff *skb, struct net_device *dev, struct packet_type *pt, pt 156 include/net/ip.h void ip_list_rcv(struct list_head *head, struct packet_type *pt, pt 974 include/net/ipv6.h struct packet_type *pt, struct net_device *orig_dev); pt 975 include/net/ipv6.h void ipv6_list_rcv(struct list_head *head, struct packet_type *pt, pt 61 include/net/llc.h struct packet_type *pt, pt 98 include/net/llc.h int llc_rcv(struct sk_buff *skb, struct net_device *dev, struct packet_type *pt, pt 113 include/net/llc.h struct packet_type *pt, pt 8 include/net/p8022.h struct packet_type *pt, pt 20 include/sound/pt2258.h extern int snd_pt2258_reset(struct snd_pt2258 *pt); pt 21 include/sound/pt2258.h extern int snd_pt2258_build_controls(struct snd_pt2258 *pt); pt 3673 kernel/cgroup/cgroup.c poll_table *pt) pt 3675 kernel/cgroup/cgroup.c return psi_trigger_poll(&of->priv, of->file, pt); pt 3785 kernel/cgroup/cgroup.c static __poll_t cgroup_file_poll(struct kernfs_open_file *of, poll_table *pt) pt 3790 kernel/cgroup/cgroup.c return cft->poll(of, pt); pt 3792 kernel/cgroup/cgroup.c return kernfs_generic_poll(of, pt); pt 604 lib/crypto/des.c #define T1(x) pt[2 * (x) + 0] pt 605 lib/crypto/des.c #define T2(x) pt[2 * (x) + 1] pt 606 lib/crypto/des.c #define T3(x) pt[2 * (x) + 2] pt 607 lib/crypto/des.c #define T4(x) pt[2 * (x) + 3] pt 627 lib/crypto/des.c const u32 *pt = pc2; pt 655 lib/crypto/des.c pt += 512; pt 718 lib/crypto/des.c const u32 *pt = pc2; pt 743 lib/crypto/des.c pt += 512; pt 99 lib/nlattr.c static int nla_validate_int_range(const struct nla_policy *pt, pt 106 lib/nlattr.c validate_min = pt->validation_type == NLA_VALIDATE_RANGE || pt 107 lib/nlattr.c pt->validation_type == NLA_VALIDATE_MIN; pt 108 lib/nlattr.c validate_max = pt->validation_type == NLA_VALIDATE_RANGE || pt 109 lib/nlattr.c pt->validation_type == NLA_VALIDATE_MAX; pt 111 lib/nlattr.c switch (pt->type) { pt 135 lib/nlattr.c if ((validate_min && nla_get_u64(nla) < pt->min) || pt 136 lib/nlattr.c (validate_max && nla_get_u64(nla) > pt->max)) { pt 147 lib/nlattr.c if ((validate_min && value < pt->min) || pt 148 lib/nlattr.c (validate_max && value > pt->max)) { pt 162 lib/nlattr.c const struct nla_policy *pt; pt 172 lib/nlattr.c pt = &policy[type]; pt 174 lib/nlattr.c BUG_ON(pt->type > NLA_TYPE_MAX); pt 176 lib/nlattr.c if ((nla_attr_len[pt->type] && attrlen != nla_attr_len[pt->type]) || pt 177 lib/nlattr.c (pt->type == NLA_EXACT_LEN_WARN && attrlen != pt->len)) { pt 188 lib/nlattr.c if ((pt->type == NLA_NESTED || pt->type == NLA_NESTED_ARRAY) && pt 194 lib/nlattr.c if (pt->type != NLA_NESTED && pt->type != NLA_NESTED_ARRAY && pt 195 lib/nlattr.c pt->type != NLA_UNSPEC && (nla->nla_type & NLA_F_NESTED)) { pt 202 lib/nlattr.c switch (pt->type) { pt 204 lib/nlattr.c if (attrlen != pt->len) pt 209 lib/nlattr.c if (extack && pt->validation_data) { pt 211 lib/nlattr.c extack->_msg = pt->validation_data; pt 226 lib/nlattr.c err = validate_nla_bitfield32(nla, pt->validation_data); pt 232 lib/nlattr.c if (pt->len) pt 233 lib/nlattr.c minlen = min_t(int, attrlen, pt->len + 1); pt 247 lib/nlattr.c if (pt->len) { pt 253 lib/nlattr.c if (attrlen > pt->len) pt 259 lib/nlattr.c if (pt->len && attrlen > pt->len) pt 271 lib/nlattr.c if (pt->validation_data) { pt 272 lib/nlattr.c err = __nla_validate(nla_data(nla), nla_len(nla), pt->len, pt 273 lib/nlattr.c pt->validation_data, validate, pt 292 lib/nlattr.c if (pt->validation_data) { pt 296 lib/nlattr.c pt->len, pt->validation_data, pt 316 lib/nlattr.c if (attrlen < pt->len) pt 321 lib/nlattr.c if (pt->len) pt 322 lib/nlattr.c minlen = pt->len; pt 324 lib/nlattr.c minlen = nla_attr_minlen[pt->type]; pt 331 lib/nlattr.c switch (pt->validation_type) { pt 338 lib/nlattr.c err = nla_validate_int_range(pt, nla, extack); pt 343 lib/nlattr.c if (pt->validate) { pt 344 lib/nlattr.c err = pt->validate(nla, extack); pt 170 mm/memcontrol.c poll_table pt; pt 4678 mm/memcontrol.c wait_queue_head_t *wqh, poll_table *pt) pt 4681 mm/memcontrol.c container_of(pt, struct mem_cgroup_event, pt); pt 4727 mm/memcontrol.c init_poll_funcptr(&event->pt, memcg_event_ptable_queue_proc); pt 4801 mm/memcontrol.c vfs_poll(efile.file, &event->pt); pt 767 net/802/mrp.c struct packet_type *pt, struct net_device *orig_dev) pt 769 net/802/mrp.c struct mrp_application *appl = container_of(pt, struct mrp_application, pt 35 net/802/p8022.c struct packet_type *pt, pt 46 net/802/psnap.c struct packet_type *pt, struct net_device *orig_dev) pt 31 net/802/stp.c struct packet_type *pt, struct net_device *orig_dev) pt 128 net/9p/trans_fd.c poll_table pt; pt 219 net/9p/trans_fd.c p9_fd_poll(struct p9_client *client, struct poll_table_struct *pt, int *err) pt 233 net/9p/trans_fd.c ret = vfs_poll(ts->rd, pt); pt 235 net/9p/trans_fd.c ret = (ret & ~EPOLLOUT) | (vfs_poll(ts->wr, pt) & ~EPOLLIN); pt 546 net/9p/trans_fd.c struct p9_conn *m = container_of(p, struct p9_conn, pt); pt 591 net/9p/trans_fd.c init_poll_funcptr(&m->pt, p9_pollwait); pt 593 net/9p/trans_fd.c n = p9_fd_poll(client, &m->pt, NULL); pt 714 net/appletalk/aarp.c struct packet_type *pt, struct net_device *orig_dev) pt 1413 net/appletalk/ddp.c struct packet_type *pt, struct net_device *orig_dev) pt 1508 net/appletalk/ddp.c struct packet_type *pt, struct net_device *orig_dev) pt 1554 net/appletalk/ddp.c return atalk_rcv(skb, dev, pt, orig_dev); pt 171 net/ax25/ax25_uid.c struct ax25_uid_assoc *pt; pt 173 net/ax25/ax25_uid.c pt = hlist_entry(v, struct ax25_uid_assoc, uid_node); pt 175 net/ax25/ax25_uid.c from_kuid_munged(seq_user_ns(seq), pt->uid), pt 176 net/ax25/ax25_uid.c ax2asc(buf, &pt->call)); pt 211 net/bridge/br_ioctl.c struct net_bridge_port *pt; pt 214 net/bridge/br_ioctl.c if ((pt = br_get_port(br, args[2])) == NULL) { pt 220 net/bridge/br_ioctl.c memcpy(&p.designated_root, &pt->designated_root, 8); pt 221 net/bridge/br_ioctl.c memcpy(&p.designated_bridge, &pt->designated_bridge, 8); pt 222 net/bridge/br_ioctl.c p.port_id = pt->port_id; pt 223 net/bridge/br_ioctl.c p.designated_port = pt->designated_port; pt 224 net/bridge/br_ioctl.c p.path_cost = pt->path_cost; pt 225 net/bridge/br_ioctl.c p.designated_cost = pt->designated_cost; pt 226 net/bridge/br_ioctl.c p.state = pt->state; pt 227 net/bridge/br_ioctl.c p.top_change_ack = pt->topology_change_ack; pt 228 net/bridge/br_ioctl.c p.config_pending = pt->config_pending; pt 229 net/bridge/br_ioctl.c p.message_age_timer_value = br_timer_value(&pt->message_age_timer); pt 230 net/bridge/br_ioctl.c p.forward_delay_timer_value = br_timer_value(&pt->forward_delay_timer); pt 231 net/bridge/br_ioctl.c p.hold_timer_value = br_timer_value(&pt->hold_timer); pt 674 net/can/af_can.c struct packet_type *pt, struct net_device *orig_dev) pt 691 net/can/af_can.c struct packet_type *pt, struct net_device *orig_dev) pt 302 net/core/dev.c static inline struct list_head *ptype_head(const struct packet_type *pt) pt 304 net/core/dev.c if (pt->type == htons(ETH_P_ALL)) pt 305 net/core/dev.c return pt->dev ? &pt->dev->ptype_all : &ptype_all; pt 307 net/core/dev.c return pt->dev ? &pt->dev->ptype_specific : pt 308 net/core/dev.c &ptype_base[ntohs(pt->type) & PTYPE_HASH_MASK]; pt 324 net/core/dev.c void dev_add_pack(struct packet_type *pt) pt 326 net/core/dev.c struct list_head *head = ptype_head(pt); pt 329 net/core/dev.c list_add_rcu(&pt->list, head); pt 347 net/core/dev.c void __dev_remove_pack(struct packet_type *pt) pt 349 net/core/dev.c struct list_head *head = ptype_head(pt); pt 355 net/core/dev.c if (pt == pt1) { pt 356 net/core/dev.c list_del_rcu(&pt->list); pt 361 net/core/dev.c pr_warn("dev_remove_pack: %p not found\n", pt); pt 379 net/core/dev.c void dev_remove_pack(struct packet_type *pt) pt 381 net/core/dev.c __dev_remove_pack(pt); pt 1891 net/core/dev.c struct packet_type **pt, pt 1896 net/core/dev.c struct packet_type *ptype, *pt_prev = *pt; pt 1905 net/core/dev.c *pt = pt_prev; pt 187 net/core/net-procfs.c struct packet_type *pt = NULL; pt 191 net/core/net-procfs.c list_for_each_entry_rcu(pt, &ptype_all, list) { pt 193 net/core/net-procfs.c return pt; pt 198 net/core/net-procfs.c list_for_each_entry_rcu(pt, &ptype_base[t], list) { pt 200 net/core/net-procfs.c return pt; pt 216 net/core/net-procfs.c struct packet_type *pt; pt 224 net/core/net-procfs.c pt = v; pt 225 net/core/net-procfs.c nxt = pt->list.next; pt 226 net/core/net-procfs.c if (pt->type == htons(ETH_P_ALL)) { pt 232 net/core/net-procfs.c hash = ntohs(pt->type) & PTYPE_HASH_MASK; pt 251 net/core/net-procfs.c struct packet_type *pt = v; pt 255 net/core/net-procfs.c else if (pt->dev == NULL || dev_net(pt->dev) == seq_file_net(seq)) { pt 256 net/core/net-procfs.c if (pt->type == htons(ETH_P_ALL)) pt 259 net/core/net-procfs.c seq_printf(seq, "%04x", ntohs(pt->type)); pt 262 net/core/net-procfs.c pt->dev ? pt->dev->name : "", pt->func); pt 246 net/decnet/dn_neigh.c lp->pt = 0; pt 616 net/decnet/dn_route.c int dn_route_rcv(struct sk_buff *skb, struct net_device *dev, struct packet_type *pt, struct net_device *orig_dev) pt 204 net/dsa/dsa.c struct packet_type *pt, struct net_device *unused) pt 220 net/dsa/dsa.c nskb = cpu_dp->rcv(skb, dev, pt); pt 112 net/dsa/tag_brcm.c struct packet_type *pt, pt 159 net/dsa/tag_brcm.c struct packet_type *pt) pt 164 net/dsa/tag_brcm.c nskb = brcm_tag_rcv_ll(skb, dev, pt, 2); pt 198 net/dsa/tag_brcm.c struct packet_type *pt) pt 201 net/dsa/tag_brcm.c return brcm_tag_rcv_ll(skb, dev, pt, ETH_HLEN); pt 64 net/dsa/tag_dsa.c struct packet_type *pt) pt 77 net/dsa/tag_edsa.c struct packet_type *pt) pt 84 net/dsa/tag_gswip.c struct packet_type *pt) pt 115 net/dsa/tag_ksz.c struct packet_type *pt) pt 183 net/dsa/tag_ksz.c struct packet_type *pt) pt 86 net/dsa/tag_lan9303.c struct packet_type *pt) pt 68 net/dsa/tag_mtk.c struct packet_type *pt) pt 54 net/dsa/tag_qca.c struct packet_type *pt) pt 237 net/dsa/tag_sja1105.c struct packet_type *pt) pt 55 net/dsa/tag_trailer.c struct packet_type *pt) pt 100 net/hsr/hsr_main.c struct hsr_port *hsr_port_get_hsr(struct hsr_priv *hsr, enum hsr_port_type pt) pt 105 net/hsr/hsr_main.c if (port->type == pt) pt 176 net/hsr/hsr_main.h struct hsr_port *hsr_port_get_hsr(struct hsr_priv *hsr, enum hsr_port_type pt); pt 16 net/hsr/hsr_slave.h enum hsr_port_type pt); pt 272 net/ieee802154/6lowpan/rx.c struct packet_type *pt, struct net_device *orig_wdev) pt 1065 net/ieee802154/socket.c struct packet_type *pt, struct net_device *orig_dev) pt 937 net/ipv4/arp.c struct packet_type *pt, struct net_device *orig_dev) pt 514 net/ipv4/ip_input.c int ip_rcv(struct sk_buff *skb, struct net_device *dev, struct packet_type *pt, pt 584 net/ipv4/ip_input.c void ip_list_rcv(struct list_head *head, struct packet_type *pt, pt 454 net/ipv4/ipconfig.c static int ic_rarp_recv(struct sk_buff *skb, struct net_device *dev, struct packet_type *pt, struct net_device *orig_dev); pt 475 net/ipv4/ipconfig.c ic_rarp_recv(struct sk_buff *skb, struct net_device *dev, struct packet_type *pt, struct net_device *orig_dev) pt 637 net/ipv4/ipconfig.c static int ic_bootp_recv(struct sk_buff *skb, struct net_device *dev, struct packet_type *pt, struct net_device *orig_dev); pt 958 net/ipv4/ipconfig.c static int __init ic_bootp_recv(struct sk_buff *skb, struct net_device *dev, struct packet_type *pt, struct net_device *orig_dev) pt 277 net/ipv6/ip6_input.c int ipv6_rcv(struct sk_buff *skb, struct net_device *dev, struct packet_type *pt, struct net_device *orig_dev) pt 298 net/ipv6/ip6_input.c void ipv6_list_rcv(struct list_head *head, struct packet_type *pt, pt 93 net/iucv/af_iucv.c struct packet_type *pt, struct net_device *orig_dev); pt 2169 net/iucv/af_iucv.c struct packet_type *pt, struct net_device *orig_dev) pt 94 net/llc/llc_core.c struct packet_type *pt, pt 155 net/llc/llc_input.c struct packet_type *pt, struct net_device *orig_dev) pt 199 net/llc/llc_input.c rcv(skb, dev, pt, orig_dev); pt 206 net/llc/llc_input.c rcv(cskb, dev, pt, orig_dev); pt 342 net/mpls/af_mpls.c struct packet_type *pt, struct net_device *orig_dev) pt 380 net/ncsi/internal.h struct packet_type *pt, struct net_device *orig_dev); pt 1106 net/ncsi/ncsi-rsp.c struct packet_type *pt, struct net_device *orig_dev) pt 184 net/packet/af_packet.c struct packet_type *pt, struct net_device *orig_dev); pt 1415 net/packet/af_packet.c struct packet_type *pt, struct net_device *orig_dev) pt 1417 net/packet/af_packet.c struct packet_fanout *f = pt->af_packet_priv; pt 1794 net/packet/af_packet.c struct packet_type *pt, struct net_device *orig_dev) pt 1804 net/packet/af_packet.c sk = pt->af_packet_priv; pt 2045 net/packet/af_packet.c struct packet_type *pt, struct net_device *orig_dev) pt 2058 net/packet/af_packet.c sk = pt->af_packet_priv; pt 2160 net/packet/af_packet.c struct packet_type *pt, struct net_device *orig_dev) pt 2188 net/packet/af_packet.c sk = pt->af_packet_priv; pt 241 net/rose/rose_subr.c unsigned char *pt; pt 311 net/rose/rose_subr.c for (pt = p + 2, lg = 0 ; lg < l ; pt += AX25_ADDR_LEN, lg += AX25_ADDR_LEN) { pt 312 net/rose/rose_subr.c if (pt[6] & AX25_HBIT) { pt 315 net/rose/rose_subr.c memcpy(&facilities->dest_digis[facilities->dest_ndigis++], pt, AX25_ADDR_LEN); pt 319 net/rose/rose_subr.c memcpy(&facilities->source_digis[facilities->source_ndigis++], pt, AX25_ADDR_LEN); pt 62 net/sctp/input.c struct sctp_transport **pt); pt 1031 net/sctp/input.c struct sctp_transport **pt) pt 1041 net/sctp/input.c *pt = t; pt 70 net/tipc/bearer.c struct packet_type *pt, struct net_device *orig_dev); pt 411 net/tipc/bearer.c b->pt.dev = dev; pt 412 net/tipc/bearer.c b->pt.type = htons(ETH_P_TIPC); pt 413 net/tipc/bearer.c b->pt.func = tipc_l2_rcv_msg; pt 414 net/tipc/bearer.c dev_add_pack(&b->pt); pt 434 net/tipc/bearer.c dev_remove_pack(&b->pt); pt 577 net/tipc/bearer.c struct packet_type *pt, struct net_device *orig_dev) pt 587 net/tipc/bearer.c tipc_rcv(dev_net(b->pt.dev), skb, b); pt 711 net/tipc/bearer.c struct packet_type *pt, struct net_device *od) pt 158 net/tipc/bearer.h struct packet_type pt; pt 735 samples/bpf/xdpsock_user.c pthread_t pt; pt 772 samples/bpf/xdpsock_user.c ret = pthread_create(&pt, NULL, poller, NULL); pt 579 security/apparmor/apparmorfs.c static __poll_t ns_revision_poll(struct file *file, poll_table *pt) pt 586 security/apparmor/apparmorfs.c poll_wait(file, &rev->ns->wait, pt); pt 1384 security/integrity/ima/ima_policy.c seq_puts(m, pt(Opt_measure)); pt 1386 security/integrity/ima/ima_policy.c seq_puts(m, pt(Opt_dont_measure)); pt 1388 security/integrity/ima/ima_policy.c seq_puts(m, pt(Opt_appraise)); pt 1390 security/integrity/ima/ima_policy.c seq_puts(m, pt(Opt_dont_appraise)); pt 1392 security/integrity/ima/ima_policy.c seq_puts(m, pt(Opt_audit)); pt 1394 security/integrity/ima/ima_policy.c seq_puts(m, pt(Opt_hash)); pt 1396 security/integrity/ima/ima_policy.c seq_puts(m, pt(Opt_dont_hash)); pt 1407 security/integrity/ima/ima_policy.c seq_printf(m, pt(Opt_mask), mt(mask_exec) + offset); pt 1409 security/integrity/ima/ima_policy.c seq_printf(m, pt(Opt_mask), mt(mask_write) + offset); pt 1411 security/integrity/ima/ima_policy.c seq_printf(m, pt(Opt_mask), mt(mask_read) + offset); pt 1413 security/integrity/ima/ima_policy.c seq_printf(m, pt(Opt_mask), mt(mask_append) + offset); pt 1419 security/integrity/ima/ima_policy.c seq_printf(m, pt(Opt_fsmagic), tbuf); pt 1425 security/integrity/ima/ima_policy.c seq_printf(m, pt(Opt_fsname), tbuf); pt 1431 security/integrity/ima/ima_policy.c seq_printf(m, pt(Opt_pcr), tbuf); pt 1443 security/integrity/ima/ima_policy.c seq_printf(m, pt(Opt_uid_gt), tbuf); pt 1445 security/integrity/ima/ima_policy.c seq_printf(m, pt(Opt_uid_lt), tbuf); pt 1447 security/integrity/ima/ima_policy.c seq_printf(m, pt(Opt_uid_eq), tbuf); pt 1454 security/integrity/ima/ima_policy.c seq_printf(m, pt(Opt_euid_gt), tbuf); pt 1456 security/integrity/ima/ima_policy.c seq_printf(m, pt(Opt_euid_lt), tbuf); pt 1458 security/integrity/ima/ima_policy.c seq_printf(m, pt(Opt_euid_eq), tbuf); pt 1465 security/integrity/ima/ima_policy.c seq_printf(m, pt(Opt_fowner_gt), tbuf); pt 1467 security/integrity/ima/ima_policy.c seq_printf(m, pt(Opt_fowner_lt), tbuf); pt 1469 security/integrity/ima/ima_policy.c seq_printf(m, pt(Opt_fowner_eq), tbuf); pt 1477 security/integrity/ima/ima_policy.c seq_printf(m, pt(Opt_obj_user), pt 1481 security/integrity/ima/ima_policy.c seq_printf(m, pt(Opt_obj_role), pt 1485 security/integrity/ima/ima_policy.c seq_printf(m, pt(Opt_obj_type), pt 1489 security/integrity/ima/ima_policy.c seq_printf(m, pt(Opt_subj_user), pt 1493 security/integrity/ima/ima_policy.c seq_printf(m, pt(Opt_subj_role), pt 1497 security/integrity/ima/ima_policy.c seq_printf(m, pt(Opt_subj_type), pt 32 sound/i2c/other/pt2258.c int snd_pt2258_reset(struct snd_pt2258 *pt) pt 39 sound/i2c/other/pt2258.c snd_i2c_lock(pt->i2c_bus); pt 40 sound/i2c/other/pt2258.c if (snd_i2c_sendbytes(pt->i2c_dev, bytes, 1) != 1) pt 42 sound/i2c/other/pt2258.c snd_i2c_unlock(pt->i2c_bus); pt 45 sound/i2c/other/pt2258.c pt->mute = 1; pt 47 sound/i2c/other/pt2258.c snd_i2c_lock(pt->i2c_bus); pt 48 sound/i2c/other/pt2258.c if (snd_i2c_sendbytes(pt->i2c_dev, bytes, 1) != 1) pt 50 sound/i2c/other/pt2258.c snd_i2c_unlock(pt->i2c_bus); pt 54 sound/i2c/other/pt2258.c pt->volume[i] = 0; pt 57 sound/i2c/other/pt2258.c snd_i2c_lock(pt->i2c_bus); pt 58 sound/i2c/other/pt2258.c if (snd_i2c_sendbytes(pt->i2c_dev, bytes, 2) != 2) pt 60 sound/i2c/other/pt2258.c snd_i2c_unlock(pt->i2c_bus); pt 65 sound/i2c/other/pt2258.c snd_i2c_unlock(pt->i2c_bus); pt 83 sound/i2c/other/pt2258.c struct snd_pt2258 *pt = kcontrol->private_data; pt 87 sound/i2c/other/pt2258.c ucontrol->value.integer.value[0] = 79 - pt->volume[base]; pt 88 sound/i2c/other/pt2258.c ucontrol->value.integer.value[1] = 79 - pt->volume[base + 1]; pt 95 sound/i2c/other/pt2258.c struct snd_pt2258 *pt = kcontrol->private_data; pt 104 sound/i2c/other/pt2258.c if (val0 == pt->volume[base] && val1 == pt->volume[base + 1]) pt 107 sound/i2c/other/pt2258.c pt->volume[base] = val0; pt 110 sound/i2c/other/pt2258.c snd_i2c_lock(pt->i2c_bus); pt 111 sound/i2c/other/pt2258.c if (snd_i2c_sendbytes(pt->i2c_dev, bytes, 2) != 2) pt 113 sound/i2c/other/pt2258.c snd_i2c_unlock(pt->i2c_bus); pt 115 sound/i2c/other/pt2258.c pt->volume[base + 1] = val1; pt 118 sound/i2c/other/pt2258.c snd_i2c_lock(pt->i2c_bus); pt 119 sound/i2c/other/pt2258.c if (snd_i2c_sendbytes(pt->i2c_dev, bytes, 2) != 2) pt 121 sound/i2c/other/pt2258.c snd_i2c_unlock(pt->i2c_bus); pt 126 sound/i2c/other/pt2258.c snd_i2c_unlock(pt->i2c_bus); pt 136 sound/i2c/other/pt2258.c struct snd_pt2258 *pt = kcontrol->private_data; pt 138 sound/i2c/other/pt2258.c ucontrol->value.integer.value[0] = !pt->mute; pt 145 sound/i2c/other/pt2258.c struct snd_pt2258 *pt = kcontrol->private_data; pt 150 sound/i2c/other/pt2258.c if (pt->mute == val) pt 153 sound/i2c/other/pt2258.c pt->mute = val; pt 155 sound/i2c/other/pt2258.c snd_i2c_lock(pt->i2c_bus); pt 156 sound/i2c/other/pt2258.c if (snd_i2c_sendbytes(pt->i2c_dev, bytes, 1) != 1) pt 158 sound/i2c/other/pt2258.c snd_i2c_unlock(pt->i2c_bus); pt 163 sound/i2c/other/pt2258.c snd_i2c_unlock(pt->i2c_bus); pt 170 sound/i2c/other/pt2258.c int snd_pt2258_build_controls(struct snd_pt2258 *pt) pt 193 sound/i2c/other/pt2258.c err = snd_ctl_add(pt->card, snd_ctl_new1(&knew, pt)); pt 205 sound/i2c/other/pt2258.c err = snd_ctl_add(pt->card, snd_ctl_new1(&knew, pt)); pt 327 sound/isa/sb/emu8000_pcm.c int pt = calc_pitch_target(rec->pitch); pt 341 sound/isa/sb/emu8000_pcm.c temp = (temp << 8) | (pt << 16) | aux; pt 343 sound/isa/sb/emu8000_pcm.c EMU8000_CPF_WRITE(hw, ch, pt << 16); pt 145 sound/pci/ice1712/revo.c struct snd_pt2258 *pt) pt 168 sound/pci/ice1712/revo.c pt->card = ice->card; pt 169 sound/pci/ice1712/revo.c pt->i2c_bus = ice->i2c; pt 170 sound/pci/ice1712/revo.c pt->i2c_dev = spec->dev; pt 171 sound/pci/ice1712/revo.c spec->pt2258 = pt; pt 173 sound/pci/ice1712/revo.c snd_pt2258_reset(pt); pt 1476 sound/pci/riptide/riptide.c unsigned int i, j, size, pages, f, pt, period; pt 1490 sound/pci/riptide/riptide.c pt = 0; pt 1502 sound/pci/riptide/riptide.c addr = snd_pcm_sgbuf_get_addr(substream, ofs) + pt; pt 1504 sound/pci/riptide/riptide.c pt = (pt + f) % PAGE_SIZE; pt 1505 sound/pci/riptide/riptide.c if (pt == 0) pt 973 sound/usb/pcm.c struct snd_interval *pt = hw_param_interval(params, SNDRV_PCM_HW_PARAM_PERIOD_TIME); pt 1003 sound/usb/pcm.c if (ptime > pt->max || (ptime == pt->max && pt->openmax)) { pt 1004 sound/usb/pcm.c hwc_debug(" > check: ptime %u > max %u\n", ptime, pt->max); pt 1235 sound/usb/pcm.c unsigned int pt, ptmin; pt 1263 sound/usb/pcm.c pt = 125 * (1 << fp->datainterval); pt 1264 sound/usb/pcm.c ptmin = min(ptmin, pt); pt 47 tools/lib/bpf/nlattr.c struct libbpf_nla_policy *pt; pt 54 tools/lib/bpf/nlattr.c pt = &policy[type]; pt 56 tools/lib/bpf/nlattr.c if (pt->type > LIBBPF_NLA_TYPE_MAX) pt 59 tools/lib/bpf/nlattr.c if (pt->minlen) pt 60 tools/lib/bpf/nlattr.c minlen = pt->minlen; pt 61 tools/lib/bpf/nlattr.c else if (pt->type != LIBBPF_NLA_UNSPEC) pt 62 tools/lib/bpf/nlattr.c minlen = nla_attr_minlen[pt->type]; pt 67 tools/lib/bpf/nlattr.c if (pt->maxlen && libbpf_nla_len(nla) > pt->maxlen) pt 70 tools/lib/bpf/nlattr.c if (pt->type == LIBBPF_NLA_STRING) { pt 26 tools/perf/tests/mmap-thread-lookup.c pthread_t pt; pt 87 tools/perf/tests/mmap-thread-lookup.c err = pthread_create(&td->pt, NULL, thread_fn, td); pt 127 tools/perf/tests/mmap-thread-lookup.c err = pthread_join(threads[i].pt, NULL); pt 138 tools/perf/util/intel-pt.c struct intel_pt *pt; pt 179 tools/perf/util/intel-pt.c static void intel_pt_dump(struct intel_pt *pt __maybe_unused, pt 219 tools/perf/util/intel-pt.c static void intel_pt_dump_event(struct intel_pt *pt, unsigned char *buf, pt 223 tools/perf/util/intel-pt.c intel_pt_dump(pt, buf, len); pt 236 tools/perf/util/intel-pt.c static int intel_pt_do_fix_overlap(struct intel_pt *pt, struct auxtrace_buffer *a, pt 243 tools/perf/util/intel-pt.c pt->have_tsc, &consecutive); pt 261 tools/perf/util/intel-pt.c int fd = perf_data__fd(ptq->pt->session->data); pt 268 tools/perf/util/intel-pt.c might_overlap = ptq->pt->snapshot_mode || ptq->pt->sampling_mode; pt 270 tools/perf/util/intel-pt.c intel_pt_do_fix_overlap(ptq->pt, old_buffer, buffer)) pt 312 tools/perf/util/intel-pt.c queue = &ptq->pt->queues.queue_array[ptq->queue_nr]; pt 362 tools/perf/util/intel-pt.c queue = &ptq->pt->queues.queue_array[ptq->queue_nr]; pt 507 tools/perf/util/intel-pt.c static inline u8 intel_pt_cpumode(struct intel_pt *pt, uint64_t ip) pt 509 tools/perf/util/intel-pt.c return ip >= pt->kernel_start ? pt 520 tools/perf/util/intel-pt.c struct machine *machine = ptq->pt->machine; pt 536 tools/perf/util/intel-pt.c cpumode = intel_pt_cpumode(ptq->pt, *ip); pt 542 tools/perf/util/intel-pt.c thread = ptq->pt->unknown_thread; pt 644 tools/perf/util/intel-pt.c static bool intel_pt_match_pgd_ip(struct intel_pt *pt, uint64_t ip, pt 652 tools/perf/util/intel-pt.c list_for_each_entry(filt, &pt->filts.head, list) { pt 690 tools/perf/util/intel-pt.c if (ip >= ptq->pt->kernel_start) pt 691 tools/perf/util/intel-pt.c return intel_pt_match_pgd_ip(ptq->pt, ip, ip, NULL); pt 704 tools/perf/util/intel-pt.c return intel_pt_match_pgd_ip(ptq->pt, ip, offset, pt 713 tools/perf/util/intel-pt.c static bool intel_pt_get_config(struct intel_pt *pt, pt 716 tools/perf/util/intel-pt.c if (attr->type == pt->pmu_type) { pt 725 tools/perf/util/intel-pt.c static bool intel_pt_exclude_kernel(struct intel_pt *pt) pt 729 tools/perf/util/intel-pt.c evlist__for_each_entry(pt->session->evlist, evsel) { pt 730 tools/perf/util/intel-pt.c if (intel_pt_get_config(pt, &evsel->core.attr, NULL) && pt 737 tools/perf/util/intel-pt.c static bool intel_pt_return_compression(struct intel_pt *pt) pt 742 tools/perf/util/intel-pt.c if (!pt->noretcomp_bit) pt 745 tools/perf/util/intel-pt.c evlist__for_each_entry(pt->session->evlist, evsel) { pt 746 tools/perf/util/intel-pt.c if (intel_pt_get_config(pt, &evsel->core.attr, &config) && pt 747 tools/perf/util/intel-pt.c (config & pt->noretcomp_bit)) pt 753 tools/perf/util/intel-pt.c static bool intel_pt_branch_enable(struct intel_pt *pt) pt 758 tools/perf/util/intel-pt.c evlist__for_each_entry(pt->session->evlist, evsel) { pt 759 tools/perf/util/intel-pt.c if (intel_pt_get_config(pt, &evsel->core.attr, &config) && pt 766 tools/perf/util/intel-pt.c static unsigned int intel_pt_mtc_period(struct intel_pt *pt) pt 772 tools/perf/util/intel-pt.c if (!pt->mtc_freq_bits) pt 775 tools/perf/util/intel-pt.c for (shift = 0, config = pt->mtc_freq_bits; !(config & 1); shift++) pt 778 tools/perf/util/intel-pt.c evlist__for_each_entry(pt->session->evlist, evsel) { pt 779 tools/perf/util/intel-pt.c if (intel_pt_get_config(pt, &evsel->core.attr, &config)) pt 780 tools/perf/util/intel-pt.c return (config & pt->mtc_freq_bits) >> shift; pt 785 tools/perf/util/intel-pt.c static bool intel_pt_timeless_decoding(struct intel_pt *pt) pt 791 tools/perf/util/intel-pt.c if (!pt->tsc_bit || !pt->cap_user_time_zero) pt 794 tools/perf/util/intel-pt.c evlist__for_each_entry(pt->session->evlist, evsel) { pt 797 tools/perf/util/intel-pt.c if (intel_pt_get_config(pt, &evsel->core.attr, &config)) { pt 798 tools/perf/util/intel-pt.c if (config & pt->tsc_bit) pt 807 tools/perf/util/intel-pt.c static bool intel_pt_tracing_kernel(struct intel_pt *pt) pt 811 tools/perf/util/intel-pt.c evlist__for_each_entry(pt->session->evlist, evsel) { pt 812 tools/perf/util/intel-pt.c if (intel_pt_get_config(pt, &evsel->core.attr, NULL) && pt 819 tools/perf/util/intel-pt.c static bool intel_pt_have_tsc(struct intel_pt *pt) pt 825 tools/perf/util/intel-pt.c if (!pt->tsc_bit) pt 828 tools/perf/util/intel-pt.c evlist__for_each_entry(pt->session->evlist, evsel) { pt 829 tools/perf/util/intel-pt.c if (intel_pt_get_config(pt, &evsel->core.attr, &config)) { pt 830 tools/perf/util/intel-pt.c if (config & pt->tsc_bit) pt 839 tools/perf/util/intel-pt.c static u64 intel_pt_ns_to_ticks(const struct intel_pt *pt, u64 ns) pt 843 tools/perf/util/intel-pt.c quot = ns / pt->tc.time_mult; pt 844 tools/perf/util/intel-pt.c rem = ns % pt->tc.time_mult; pt 845 tools/perf/util/intel-pt.c return (quot << pt->tc.time_shift) + (rem << pt->tc.time_shift) / pt 846 tools/perf/util/intel-pt.c pt->tc.time_mult; pt 849 tools/perf/util/intel-pt.c static struct intel_pt_queue *intel_pt_alloc_queue(struct intel_pt *pt, pt 853 tools/perf/util/intel-pt.c struct perf_env *env = pt->machine->env; pt 860 tools/perf/util/intel-pt.c if (pt->synth_opts.callchain) { pt 864 tools/perf/util/intel-pt.c sz += (pt->synth_opts.callchain_sz + 1) * sizeof(u64); pt 870 tools/perf/util/intel-pt.c if (pt->synth_opts.last_branch) { pt 873 tools/perf/util/intel-pt.c sz += pt->synth_opts.last_branch_sz * pt 887 tools/perf/util/intel-pt.c ptq->pt = pt; pt 889 tools/perf/util/intel-pt.c ptq->exclude_kernel = intel_pt_exclude_kernel(pt); pt 899 tools/perf/util/intel-pt.c params.return_compression = intel_pt_return_compression(pt); pt 900 tools/perf/util/intel-pt.c params.branch_enable = intel_pt_branch_enable(pt); pt 901 tools/perf/util/intel-pt.c params.max_non_turbo_ratio = pt->max_non_turbo_ratio; pt 902 tools/perf/util/intel-pt.c params.mtc_period = intel_pt_mtc_period(pt); pt 903 tools/perf/util/intel-pt.c params.tsc_ctc_ratio_n = pt->tsc_ctc_ratio_n; pt 904 tools/perf/util/intel-pt.c params.tsc_ctc_ratio_d = pt->tsc_ctc_ratio_d; pt 906 tools/perf/util/intel-pt.c if (pt->filts.cnt > 0) pt 909 tools/perf/util/intel-pt.c if (pt->synth_opts.instructions) { pt 910 tools/perf/util/intel-pt.c if (pt->synth_opts.period) { pt 911 tools/perf/util/intel-pt.c switch (pt->synth_opts.period_type) { pt 915 tools/perf/util/intel-pt.c params.period = pt->synth_opts.period; pt 919 tools/perf/util/intel-pt.c params.period = pt->synth_opts.period; pt 923 tools/perf/util/intel-pt.c params.period = intel_pt_ns_to_ticks(pt, pt 924 tools/perf/util/intel-pt.c pt->synth_opts.period); pt 970 tools/perf/util/intel-pt.c static void intel_pt_set_pid_tid_cpu(struct intel_pt *pt, pt 975 tools/perf/util/intel-pt.c if (queue->tid == -1 || pt->have_sched_switch) { pt 976 tools/perf/util/intel-pt.c ptq->tid = machine__get_current_tid(pt->machine, ptq->cpu); pt 981 tools/perf/util/intel-pt.c ptq->thread = machine__find_thread(pt->machine, -1, ptq->tid); pt 1021 tools/perf/util/intel-pt.c static void intel_pt_setup_time_range(struct intel_pt *pt, pt 1024 tools/perf/util/intel-pt.c if (!pt->range_cnt) pt 1027 tools/perf/util/intel-pt.c ptq->sel_timestamp = pt->time_ranges[0].start; pt 1033 tools/perf/util/intel-pt.c ptq->sel_timestamp = pt->time_ranges[0].end; pt 1038 tools/perf/util/intel-pt.c static int intel_pt_setup_queue(struct intel_pt *pt, pt 1048 tools/perf/util/intel-pt.c ptq = intel_pt_alloc_queue(pt, queue_nr); pt 1059 tools/perf/util/intel-pt.c if (pt->sampling_mode && !pt->snapshot_mode && pt 1060 tools/perf/util/intel-pt.c pt->timeless_decoding) pt 1063 tools/perf/util/intel-pt.c ptq->sync_switch = pt->sync_switch; pt 1065 tools/perf/util/intel-pt.c intel_pt_setup_time_range(pt, ptq); pt 1074 tools/perf/util/intel-pt.c if (pt->timeless_decoding) pt 1111 tools/perf/util/intel-pt.c ret = auxtrace_heap__add(&pt->heap, queue_nr, ptq->timestamp); pt 1120 tools/perf/util/intel-pt.c static int intel_pt_setup_queues(struct intel_pt *pt) pt 1125 tools/perf/util/intel-pt.c for (i = 0; i < pt->queues.nr_queues; i++) { pt 1126 tools/perf/util/intel-pt.c ret = intel_pt_setup_queue(pt, &pt->queues.queue_array[i], i); pt 1144 tools/perf/util/intel-pt.c nr = ptq->pt->synth_opts.last_branch_sz - ptq->last_branch_pos; pt 1149 tools/perf/util/intel-pt.c if (bs_src->nr >= ptq->pt->synth_opts.last_branch_sz) { pt 1169 tools/perf/util/intel-pt.c ptq->last_branch_pos = ptq->pt->synth_opts.last_branch_sz; pt 1179 tools/perf/util/intel-pt.c be->flags.mispred = ptq->pt->mispred_all; pt 1181 tools/perf/util/intel-pt.c if (bs->nr < ptq->pt->synth_opts.last_branch_sz) pt 1185 tools/perf/util/intel-pt.c static inline bool intel_pt_skip_event(struct intel_pt *pt) pt 1187 tools/perf/util/intel-pt.c return pt->synth_opts.initial_skip && pt 1188 tools/perf/util/intel-pt.c pt->num_events++ < pt->synth_opts.initial_skip; pt 1196 tools/perf/util/intel-pt.c static inline bool intel_pt_skip_cbr_event(struct intel_pt *pt) pt 1198 tools/perf/util/intel-pt.c return pt->synth_opts.initial_skip && pt 1199 tools/perf/util/intel-pt.c pt->num_events + 4 < pt->synth_opts.initial_skip; pt 1216 tools/perf/util/intel-pt.c static void intel_pt_prep_b_sample(struct intel_pt *pt, pt 1223 tools/perf/util/intel-pt.c if (!pt->timeless_decoding) pt 1224 tools/perf/util/intel-pt.c sample->time = tsc_to_perf_time(ptq->timestamp, &pt->tc); pt 1227 tools/perf/util/intel-pt.c sample->cpumode = intel_pt_cpumode(pt, sample->ip); pt 1242 tools/perf/util/intel-pt.c static inline int intel_pt_opt_inject(struct intel_pt *pt, pt 1246 tools/perf/util/intel-pt.c if (!pt->synth_opts.inject) pt 1252 tools/perf/util/intel-pt.c static int intel_pt_deliver_synth_b_event(struct intel_pt *pt, pt 1258 tools/perf/util/intel-pt.c ret = intel_pt_opt_inject(pt, event, sample, type); pt 1262 tools/perf/util/intel-pt.c ret = perf_session__deliver_synth_event(pt->session, event, sample); pt 1271 tools/perf/util/intel-pt.c struct intel_pt *pt = ptq->pt; pt 1279 tools/perf/util/intel-pt.c if (pt->branches_filter && !(pt->branches_filter & ptq->flags)) pt 1282 tools/perf/util/intel-pt.c if (intel_pt_skip_event(pt)) pt 1285 tools/perf/util/intel-pt.c intel_pt_prep_b_sample(pt, ptq, event, &sample); pt 1287 tools/perf/util/intel-pt.c sample.id = ptq->pt->branches_id; pt 1288 tools/perf/util/intel-pt.c sample.stream_id = ptq->pt->branches_id; pt 1294 tools/perf/util/intel-pt.c if (pt->synth_opts.last_branch && sort__mode == SORT_MODE__BRANCH) { pt 1312 tools/perf/util/intel-pt.c return intel_pt_deliver_synth_b_event(pt, event, &sample, pt 1313 tools/perf/util/intel-pt.c pt->branches_sample_type); pt 1316 tools/perf/util/intel-pt.c static void intel_pt_prep_sample(struct intel_pt *pt, pt 1321 tools/perf/util/intel-pt.c intel_pt_prep_b_sample(pt, ptq, event, sample); pt 1323 tools/perf/util/intel-pt.c if (pt->synth_opts.callchain) { pt 1325 tools/perf/util/intel-pt.c pt->synth_opts.callchain_sz + 1, pt 1326 tools/perf/util/intel-pt.c sample->ip, pt->kernel_start); pt 1330 tools/perf/util/intel-pt.c if (pt->synth_opts.last_branch) { pt 1336 tools/perf/util/intel-pt.c static inline int intel_pt_deliver_synth_event(struct intel_pt *pt, pt 1344 tools/perf/util/intel-pt.c ret = intel_pt_deliver_synth_b_event(pt, event, sample, type); pt 1346 tools/perf/util/intel-pt.c if (pt->synth_opts.last_branch) pt 1354 tools/perf/util/intel-pt.c struct intel_pt *pt = ptq->pt; pt 1358 tools/perf/util/intel-pt.c if (intel_pt_skip_event(pt)) pt 1361 tools/perf/util/intel-pt.c intel_pt_prep_sample(pt, ptq, event, &sample); pt 1363 tools/perf/util/intel-pt.c sample.id = ptq->pt->instructions_id; pt 1364 tools/perf/util/intel-pt.c sample.stream_id = ptq->pt->instructions_id; pt 1376 tools/perf/util/intel-pt.c return intel_pt_deliver_synth_event(pt, ptq, event, &sample, pt 1377 tools/perf/util/intel-pt.c pt->instructions_sample_type); pt 1382 tools/perf/util/intel-pt.c struct intel_pt *pt = ptq->pt; pt 1386 tools/perf/util/intel-pt.c if (intel_pt_skip_event(pt)) pt 1389 tools/perf/util/intel-pt.c intel_pt_prep_sample(pt, ptq, event, &sample); pt 1391 tools/perf/util/intel-pt.c sample.id = ptq->pt->transactions_id; pt 1392 tools/perf/util/intel-pt.c sample.stream_id = ptq->pt->transactions_id; pt 1394 tools/perf/util/intel-pt.c return intel_pt_deliver_synth_event(pt, ptq, event, &sample, pt 1395 tools/perf/util/intel-pt.c pt->transactions_sample_type); pt 1398 tools/perf/util/intel-pt.c static void intel_pt_prep_p_sample(struct intel_pt *pt, pt 1403 tools/perf/util/intel-pt.c intel_pt_prep_sample(pt, ptq, event, sample); pt 1415 tools/perf/util/intel-pt.c struct intel_pt *pt = ptq->pt; pt 1420 tools/perf/util/intel-pt.c if (intel_pt_skip_event(pt)) pt 1423 tools/perf/util/intel-pt.c intel_pt_prep_p_sample(pt, ptq, event, &sample); pt 1425 tools/perf/util/intel-pt.c sample.id = ptq->pt->ptwrites_id; pt 1426 tools/perf/util/intel-pt.c sample.stream_id = ptq->pt->ptwrites_id; pt 1435 tools/perf/util/intel-pt.c return intel_pt_deliver_synth_event(pt, ptq, event, &sample, pt 1436 tools/perf/util/intel-pt.c pt->ptwrites_sample_type); pt 1441 tools/perf/util/intel-pt.c struct intel_pt *pt = ptq->pt; pt 1447 tools/perf/util/intel-pt.c if (intel_pt_skip_cbr_event(pt)) pt 1452 tools/perf/util/intel-pt.c intel_pt_prep_p_sample(pt, ptq, event, &sample); pt 1454 tools/perf/util/intel-pt.c sample.id = ptq->pt->cbr_id; pt 1455 tools/perf/util/intel-pt.c sample.stream_id = ptq->pt->cbr_id; pt 1457 tools/perf/util/intel-pt.c flags = (u16)ptq->state->cbr_payload | (pt->max_non_turbo_ratio << 16); pt 1459 tools/perf/util/intel-pt.c raw.freq = cpu_to_le32(raw.cbr * pt->cbr2khz); pt 1465 tools/perf/util/intel-pt.c return intel_pt_deliver_synth_event(pt, ptq, event, &sample, pt 1466 tools/perf/util/intel-pt.c pt->pwr_events_sample_type); pt 1471 tools/perf/util/intel-pt.c struct intel_pt *pt = ptq->pt; pt 1476 tools/perf/util/intel-pt.c if (intel_pt_skip_event(pt)) pt 1479 tools/perf/util/intel-pt.c intel_pt_prep_p_sample(pt, ptq, event, &sample); pt 1481 tools/perf/util/intel-pt.c sample.id = ptq->pt->mwait_id; pt 1482 tools/perf/util/intel-pt.c sample.stream_id = ptq->pt->mwait_id; pt 1490 tools/perf/util/intel-pt.c return intel_pt_deliver_synth_event(pt, ptq, event, &sample, pt 1491 tools/perf/util/intel-pt.c pt->pwr_events_sample_type); pt 1496 tools/perf/util/intel-pt.c struct intel_pt *pt = ptq->pt; pt 1501 tools/perf/util/intel-pt.c if (intel_pt_skip_event(pt)) pt 1504 tools/perf/util/intel-pt.c intel_pt_prep_p_sample(pt, ptq, event, &sample); pt 1506 tools/perf/util/intel-pt.c sample.id = ptq->pt->pwre_id; pt 1507 tools/perf/util/intel-pt.c sample.stream_id = ptq->pt->pwre_id; pt 1515 tools/perf/util/intel-pt.c return intel_pt_deliver_synth_event(pt, ptq, event, &sample, pt 1516 tools/perf/util/intel-pt.c pt->pwr_events_sample_type); pt 1521 tools/perf/util/intel-pt.c struct intel_pt *pt = ptq->pt; pt 1526 tools/perf/util/intel-pt.c if (intel_pt_skip_event(pt)) pt 1529 tools/perf/util/intel-pt.c intel_pt_prep_p_sample(pt, ptq, event, &sample); pt 1531 tools/perf/util/intel-pt.c sample.id = ptq->pt->exstop_id; pt 1532 tools/perf/util/intel-pt.c sample.stream_id = ptq->pt->exstop_id; pt 1540 tools/perf/util/intel-pt.c return intel_pt_deliver_synth_event(pt, ptq, event, &sample, pt 1541 tools/perf/util/intel-pt.c pt->pwr_events_sample_type); pt 1546 tools/perf/util/intel-pt.c struct intel_pt *pt = ptq->pt; pt 1551 tools/perf/util/intel-pt.c if (intel_pt_skip_event(pt)) pt 1554 tools/perf/util/intel-pt.c intel_pt_prep_p_sample(pt, ptq, event, &sample); pt 1556 tools/perf/util/intel-pt.c sample.id = ptq->pt->pwrx_id; pt 1557 tools/perf/util/intel-pt.c sample.stream_id = ptq->pt->pwrx_id; pt 1565 tools/perf/util/intel-pt.c return intel_pt_deliver_synth_event(pt, ptq, event, &sample, pt 1566 tools/perf/util/intel-pt.c pt->pwr_events_sample_type); pt 1705 tools/perf/util/intel-pt.c struct intel_pt *pt = ptq->pt; pt 1706 tools/perf/util/intel-pt.c struct evsel *evsel = pt->pebs_evsel; pt 1711 tools/perf/util/intel-pt.c if (intel_pt_skip_event(pt)) pt 1731 tools/perf/util/intel-pt.c cpumode = sample.ip < ptq->pt->kernel_start ? pt 1744 tools/perf/util/intel-pt.c else if (!pt->timeless_decoding) pt 1747 tools/perf/util/intel-pt.c sample.time = tsc_to_perf_time(timestamp, &pt->tc); pt 1751 tools/perf/util/intel-pt.c pt->synth_opts.callchain) { pt 1753 tools/perf/util/intel-pt.c pt->synth_opts.callchain_sz, sample.ip, pt 1754 tools/perf/util/intel-pt.c pt->kernel_start); pt 1785 tools/perf/util/intel-pt.c } else if (pt->synth_opts.last_branch) { pt 1821 tools/perf/util/intel-pt.c return intel_pt_deliver_synth_event(pt, ptq, event, &sample, sample_type); pt 1824 tools/perf/util/intel-pt.c static int intel_pt_synth_error(struct intel_pt *pt, int code, int cpu, pt 1836 tools/perf/util/intel-pt.c err = perf_session__deliver_synth_event(pt->session, &event, NULL); pt 1847 tools/perf/util/intel-pt.c struct intel_pt *pt = ptq->pt; pt 1850 tools/perf/util/intel-pt.c tm = pt->timeless_decoding ? 0 : tsc_to_perf_time(tm, &pt->tc); pt 1852 tools/perf/util/intel-pt.c return intel_pt_synth_error(pt, state->err, ptq->cpu, ptq->pid, pt 1856 tools/perf/util/intel-pt.c static int intel_pt_next_tid(struct intel_pt *pt, struct intel_pt_queue *ptq) pt 1867 tools/perf/util/intel-pt.c err = machine__set_current_tid(pt->machine, ptq->cpu, -1, tid); pt 1869 tools/perf/util/intel-pt.c queue = &pt->queues.queue_array[ptq->queue_nr]; pt 1870 tools/perf/util/intel-pt.c intel_pt_set_pid_tid_cpu(pt, queue); pt 1879 tools/perf/util/intel-pt.c struct intel_pt *pt = ptq->pt; pt 1881 tools/perf/util/intel-pt.c return ip == pt->switch_ip && pt 1893 tools/perf/util/intel-pt.c struct intel_pt *pt = ptq->pt; pt 1914 tools/perf/util/intel-pt.c if (pt->sample_pebs && state->type & INTEL_PT_BLK_ITEMS) { pt 1920 tools/perf/util/intel-pt.c if (pt->sample_pwr_events) { pt 1950 tools/perf/util/intel-pt.c if (pt->sample_instructions && (state->type & INTEL_PT_INSTRUCTION)) { pt 1956 tools/perf/util/intel-pt.c if (pt->sample_transactions && (state->type & INTEL_PT_TRANSACTION)) { pt 1962 tools/perf/util/intel-pt.c if (pt->sample_ptwrites && (state->type & INTEL_PT_PTW)) { pt 1971 tools/perf/util/intel-pt.c if (pt->synth_opts.callchain || pt->synth_opts.thread_stack) pt 1978 tools/perf/util/intel-pt.c if (pt->sample_branches) { pt 1984 tools/perf/util/intel-pt.c if (pt->synth_opts.last_branch) pt 1995 tools/perf/util/intel-pt.c err = intel_pt_next_tid(pt, ptq); pt 2009 tools/perf/util/intel-pt.c state->to_ip == pt->ptss_ip && pt 2017 tools/perf/util/intel-pt.c static u64 intel_pt_switch_ip(struct intel_pt *pt, u64 *ptss_ip) pt 2019 tools/perf/util/intel-pt.c struct machine *machine = pt->machine; pt 2051 tools/perf/util/intel-pt.c if (pt->have_sched_switch == 1) pt 2069 tools/perf/util/intel-pt.c static void intel_pt_enable_sync_switch(struct intel_pt *pt) pt 2073 tools/perf/util/intel-pt.c pt->sync_switch = true; pt 2075 tools/perf/util/intel-pt.c for (i = 0; i < pt->queues.nr_queues; i++) { pt 2076 tools/perf/util/intel-pt.c struct auxtrace_queue *queue = &pt->queues.queue_array[i]; pt 2090 tools/perf/util/intel-pt.c struct intel_pt *pt = ptq->pt; pt 2095 tools/perf/util/intel-pt.c ptq->sel_timestamp = pt->time_ranges[ptq->sel_idx].end; pt 2097 tools/perf/util/intel-pt.c } else if (ptq->sel_idx + 1 < pt->range_cnt) { pt 2101 tools/perf/util/intel-pt.c ptq->sel_timestamp = pt->time_ranges[ptq->sel_idx].start; pt 2129 tools/perf/util/intel-pt.c intel_pt_next_tid(ptq->pt, ptq); pt 2159 tools/perf/util/intel-pt.c struct intel_pt *pt = ptq->pt; pt 2163 tools/perf/util/intel-pt.c if (!pt->kernel_start) { pt 2164 tools/perf/util/intel-pt.c pt->kernel_start = machine__kernel_start(pt->machine); pt 2165 tools/perf/util/intel-pt.c if (pt->per_cpu_mmaps && pt 2166 tools/perf/util/intel-pt.c (pt->have_sched_switch == 1 || pt->have_sched_switch == 3) && pt 2167 tools/perf/util/intel-pt.c !pt->timeless_decoding && intel_pt_tracing_kernel(pt) && pt 2168 tools/perf/util/intel-pt.c !pt->sampling_mode) { pt 2169 tools/perf/util/intel-pt.c pt->switch_ip = intel_pt_switch_ip(pt, &pt->ptss_ip); pt 2170 tools/perf/util/intel-pt.c if (pt->switch_ip) { pt 2172 tools/perf/util/intel-pt.c pt->switch_ip, pt->ptss_ip); pt 2173 tools/perf/util/intel-pt.c intel_pt_enable_sync_switch(pt); pt 2190 tools/perf/util/intel-pt.c state->from_ip >= pt->kernel_start) { pt 2192 tools/perf/util/intel-pt.c intel_pt_next_tid(pt, ptq); pt 2194 tools/perf/util/intel-pt.c if (pt->synth_opts.errors) { pt 2207 tools/perf/util/intel-pt.c if (pt->est_tsc && pt 2208 tools/perf/util/intel-pt.c (state->from_ip >= pt->kernel_start || !state->from_ip) && pt 2209 tools/perf/util/intel-pt.c state->to_ip && state->to_ip < pt->kernel_start) { pt 2231 tools/perf/util/intel-pt.c if (!pt->timeless_decoding && ptq->timestamp >= *timestamp) { pt 2239 tools/perf/util/intel-pt.c static inline int intel_pt_update_queues(struct intel_pt *pt) pt 2241 tools/perf/util/intel-pt.c if (pt->queues.new_data) { pt 2242 tools/perf/util/intel-pt.c pt->queues.new_data = false; pt 2243 tools/perf/util/intel-pt.c return intel_pt_setup_queues(pt); pt 2248 tools/perf/util/intel-pt.c static int intel_pt_process_queues(struct intel_pt *pt, u64 timestamp) pt 2258 tools/perf/util/intel-pt.c if (!pt->heap.heap_cnt) pt 2261 tools/perf/util/intel-pt.c if (pt->heap.heap_array[0].ordinal >= timestamp) pt 2264 tools/perf/util/intel-pt.c queue_nr = pt->heap.heap_array[0].queue_nr; pt 2265 tools/perf/util/intel-pt.c queue = &pt->queues.queue_array[queue_nr]; pt 2269 tools/perf/util/intel-pt.c queue_nr, pt->heap.heap_array[0].ordinal, pt 2272 tools/perf/util/intel-pt.c auxtrace_heap__pop(&pt->heap); pt 2274 tools/perf/util/intel-pt.c if (pt->heap.heap_cnt) { pt 2275 tools/perf/util/intel-pt.c ts = pt->heap.heap_array[0].ordinal + 1; pt 2282 tools/perf/util/intel-pt.c intel_pt_set_pid_tid_cpu(pt, queue); pt 2287 tools/perf/util/intel-pt.c auxtrace_heap__add(&pt->heap, queue_nr, ts); pt 2292 tools/perf/util/intel-pt.c ret = auxtrace_heap__add(&pt->heap, queue_nr, ts); pt 2303 tools/perf/util/intel-pt.c static int intel_pt_process_timeless_queues(struct intel_pt *pt, pid_t tid, pt 2306 tools/perf/util/intel-pt.c struct auxtrace_queues *queues = &pt->queues; pt 2311 tools/perf/util/intel-pt.c struct auxtrace_queue *queue = &pt->queues.queue_array[i]; pt 2316 tools/perf/util/intel-pt.c intel_pt_set_pid_tid_cpu(pt, queue); pt 2323 tools/perf/util/intel-pt.c static int intel_pt_lost(struct intel_pt *pt, struct perf_sample *sample) pt 2325 tools/perf/util/intel-pt.c return intel_pt_synth_error(pt, INTEL_PT_ERR_LOST, sample->cpu, pt 2329 tools/perf/util/intel-pt.c static struct intel_pt_queue *intel_pt_cpu_to_ptq(struct intel_pt *pt, int cpu) pt 2333 tools/perf/util/intel-pt.c if (cpu < 0 || !pt->queues.nr_queues) pt 2336 tools/perf/util/intel-pt.c if ((unsigned)cpu >= pt->queues.nr_queues) pt 2337 tools/perf/util/intel-pt.c i = pt->queues.nr_queues - 1; pt 2341 tools/perf/util/intel-pt.c if (pt->queues.queue_array[i].cpu == cpu) pt 2342 tools/perf/util/intel-pt.c return pt->queues.queue_array[i].priv; pt 2345 tools/perf/util/intel-pt.c if (pt->queues.queue_array[--i].cpu == cpu) pt 2346 tools/perf/util/intel-pt.c return pt->queues.queue_array[i].priv; pt 2349 tools/perf/util/intel-pt.c for (; j < pt->queues.nr_queues; j++) { pt 2350 tools/perf/util/intel-pt.c if (pt->queues.queue_array[j].cpu == cpu) pt 2351 tools/perf/util/intel-pt.c return pt->queues.queue_array[j].priv; pt 2357 tools/perf/util/intel-pt.c static int intel_pt_sync_switch(struct intel_pt *pt, int cpu, pid_t tid, pt 2363 tools/perf/util/intel-pt.c if (!pt->sync_switch) pt 2366 tools/perf/util/intel-pt.c ptq = intel_pt_cpu_to_ptq(pt, cpu); pt 2381 tools/perf/util/intel-pt.c &pt->tc); pt 2382 tools/perf/util/intel-pt.c err = auxtrace_heap__add(&pt->heap, ptq->queue_nr, pt 2402 tools/perf/util/intel-pt.c static int intel_pt_process_switch(struct intel_pt *pt, pt 2409 tools/perf/util/intel-pt.c evsel = perf_evlist__id2evsel(pt->session->evlist, sample->id); pt 2410 tools/perf/util/intel-pt.c if (evsel != pt->switch_evsel) pt 2418 tools/perf/util/intel-pt.c &pt->tc)); pt 2420 tools/perf/util/intel-pt.c ret = intel_pt_sync_switch(pt, cpu, tid, sample->time); pt 2424 tools/perf/util/intel-pt.c return machine__set_current_tid(pt->machine, cpu, -1, tid); pt 2427 tools/perf/util/intel-pt.c static int intel_pt_context_switch_in(struct intel_pt *pt, pt 2434 tools/perf/util/intel-pt.c if (pt->sync_switch) { pt 2437 tools/perf/util/intel-pt.c ptq = intel_pt_cpu_to_ptq(pt, cpu); pt 2459 tools/perf/util/intel-pt.c if (machine__get_current_tid(pt->machine, cpu) == tid) pt 2462 tools/perf/util/intel-pt.c return machine__set_current_tid(pt->machine, cpu, pid, tid); pt 2465 tools/perf/util/intel-pt.c static int intel_pt_context_switch(struct intel_pt *pt, union perf_event *event, pt 2474 tools/perf/util/intel-pt.c if (pt->have_sched_switch == 3) { pt 2476 tools/perf/util/intel-pt.c return intel_pt_context_switch_in(pt, sample); pt 2497 tools/perf/util/intel-pt.c &pt->tc)); pt 2499 tools/perf/util/intel-pt.c ret = intel_pt_sync_switch(pt, cpu, tid, sample->time); pt 2503 tools/perf/util/intel-pt.c return machine__set_current_tid(pt->machine, cpu, pid, tid); pt 2506 tools/perf/util/intel-pt.c static int intel_pt_process_itrace_start(struct intel_pt *pt, pt 2510 tools/perf/util/intel-pt.c if (!pt->per_cpu_mmaps) pt 2516 tools/perf/util/intel-pt.c perf_time_to_tsc(sample->time, &pt->tc)); pt 2518 tools/perf/util/intel-pt.c return machine__set_current_tid(pt->machine, sample->cpu, pt 2528 tools/perf/util/intel-pt.c struct intel_pt *pt = container_of(session->auxtrace, struct intel_pt, pt 2542 tools/perf/util/intel-pt.c timestamp = perf_time_to_tsc(sample->time, &pt->tc); pt 2546 tools/perf/util/intel-pt.c if (timestamp || pt->timeless_decoding) { pt 2547 tools/perf/util/intel-pt.c err = intel_pt_update_queues(pt); pt 2552 tools/perf/util/intel-pt.c if (pt->timeless_decoding) { pt 2554 tools/perf/util/intel-pt.c err = intel_pt_process_timeless_queues(pt, pt 2559 tools/perf/util/intel-pt.c err = intel_pt_process_queues(pt, timestamp); pt 2566 tools/perf/util/intel-pt.c pt->synth_opts.errors) { pt 2567 tools/perf/util/intel-pt.c err = intel_pt_lost(pt, sample); pt 2572 tools/perf/util/intel-pt.c if (pt->switch_evsel && event->header.type == PERF_RECORD_SAMPLE) pt 2573 tools/perf/util/intel-pt.c err = intel_pt_process_switch(pt, sample); pt 2575 tools/perf/util/intel-pt.c err = intel_pt_process_itrace_start(pt, event, sample); pt 2578 tools/perf/util/intel-pt.c err = intel_pt_context_switch(pt, event, sample); pt 2589 tools/perf/util/intel-pt.c struct intel_pt *pt = container_of(session->auxtrace, struct intel_pt, pt 2599 tools/perf/util/intel-pt.c ret = intel_pt_update_queues(pt); pt 2603 tools/perf/util/intel-pt.c if (pt->timeless_decoding) pt 2604 tools/perf/util/intel-pt.c return intel_pt_process_timeless_queues(pt, -1, pt 2607 tools/perf/util/intel-pt.c return intel_pt_process_queues(pt, MAX_TIMESTAMP); pt 2612 tools/perf/util/intel-pt.c struct intel_pt *pt = container_of(session->auxtrace, struct intel_pt, pt 2614 tools/perf/util/intel-pt.c struct auxtrace_queues *queues = &pt->queues; pt 2627 tools/perf/util/intel-pt.c struct intel_pt *pt = container_of(session->auxtrace, struct intel_pt, pt 2630 tools/perf/util/intel-pt.c auxtrace_heap__free(&pt->heap); pt 2633 tools/perf/util/intel-pt.c thread__put(pt->unknown_thread); pt 2634 tools/perf/util/intel-pt.c addr_filters__exit(&pt->filts); pt 2635 tools/perf/util/intel-pt.c zfree(&pt->filter); pt 2636 tools/perf/util/intel-pt.c zfree(&pt->time_ranges); pt 2637 tools/perf/util/intel-pt.c free(pt); pt 2644 tools/perf/util/intel-pt.c struct intel_pt *pt = container_of(session->auxtrace, struct intel_pt, pt 2647 tools/perf/util/intel-pt.c if (!pt->data_queued) { pt 2661 tools/perf/util/intel-pt.c err = auxtrace_queues__add_event(&pt->queues, session, event, pt 2669 tools/perf/util/intel-pt.c intel_pt_dump_event(pt, buffer->data, pt 2732 tools/perf/util/intel-pt.c static struct evsel *intel_pt_evsel(struct intel_pt *pt, pt 2738 tools/perf/util/intel-pt.c if (evsel->core.attr.type == pt->pmu_type && evsel->core.ids) pt 2745 tools/perf/util/intel-pt.c static int intel_pt_synth_events(struct intel_pt *pt, pt 2749 tools/perf/util/intel-pt.c struct evsel *evsel = intel_pt_evsel(pt, evlist); pt 2765 tools/perf/util/intel-pt.c if (pt->timeless_decoding) pt 2769 tools/perf/util/intel-pt.c if (!pt->per_cpu_mmaps) pt 2783 tools/perf/util/intel-pt.c if (pt->synth_opts.branches) { pt 2790 tools/perf/util/intel-pt.c pt->sample_branches = true; pt 2791 tools/perf/util/intel-pt.c pt->branches_sample_type = attr.sample_type; pt 2792 tools/perf/util/intel-pt.c pt->branches_id = id; pt 2797 tools/perf/util/intel-pt.c if (pt->synth_opts.callchain) pt 2799 tools/perf/util/intel-pt.c if (pt->synth_opts.last_branch) pt 2802 tools/perf/util/intel-pt.c if (pt->synth_opts.instructions) { pt 2804 tools/perf/util/intel-pt.c if (pt->synth_opts.period_type == PERF_ITRACE_PERIOD_NANOSECS) pt 2806 tools/perf/util/intel-pt.c intel_pt_ns_to_ticks(pt, pt->synth_opts.period); pt 2808 tools/perf/util/intel-pt.c attr.sample_period = pt->synth_opts.period; pt 2812 tools/perf/util/intel-pt.c pt->sample_instructions = true; pt 2813 tools/perf/util/intel-pt.c pt->instructions_sample_type = attr.sample_type; pt 2814 tools/perf/util/intel-pt.c pt->instructions_id = id; pt 2821 tools/perf/util/intel-pt.c if (pt->synth_opts.transactions) { pt 2826 tools/perf/util/intel-pt.c pt->sample_transactions = true; pt 2827 tools/perf/util/intel-pt.c pt->transactions_sample_type = attr.sample_type; pt 2828 tools/perf/util/intel-pt.c pt->transactions_id = id; pt 2836 tools/perf/util/intel-pt.c if (pt->synth_opts.ptwrites) { pt 2841 tools/perf/util/intel-pt.c pt->sample_ptwrites = true; pt 2842 tools/perf/util/intel-pt.c pt->ptwrites_sample_type = attr.sample_type; pt 2843 tools/perf/util/intel-pt.c pt->ptwrites_id = id; pt 2848 tools/perf/util/intel-pt.c if (pt->synth_opts.pwr_events) { pt 2849 tools/perf/util/intel-pt.c pt->sample_pwr_events = true; pt 2850 tools/perf/util/intel-pt.c pt->pwr_events_sample_type = attr.sample_type; pt 2856 tools/perf/util/intel-pt.c pt->cbr_id = id; pt 2861 tools/perf/util/intel-pt.c if (pt->synth_opts.pwr_events && (evsel->core.attr.config & 0x10)) { pt 2866 tools/perf/util/intel-pt.c pt->mwait_id = id; pt 2874 tools/perf/util/intel-pt.c pt->pwre_id = id; pt 2882 tools/perf/util/intel-pt.c pt->exstop_id = id; pt 2890 tools/perf/util/intel-pt.c pt->pwrx_id = id; pt 2898 tools/perf/util/intel-pt.c static void intel_pt_setup_pebs_events(struct intel_pt *pt) pt 2902 tools/perf/util/intel-pt.c if (!pt->synth_opts.other_events) pt 2905 tools/perf/util/intel-pt.c evlist__for_each_entry(pt->session->evlist, evsel) { pt 2907 tools/perf/util/intel-pt.c pt->sample_pebs = true; pt 2908 tools/perf/util/intel-pt.c pt->pebs_evsel = evsel; pt 2942 tools/perf/util/intel-pt.c struct intel_pt *pt = data; pt 2945 tools/perf/util/intel-pt.c pt->mispred_all = perf_config_bool(var, value); pt 2951 tools/perf/util/intel-pt.c static u64 intel_pt_tsc_start(u64 ns, struct intel_pt *pt) pt 2955 tools/perf/util/intel-pt.c tsc = perf_time_to_tsc(ns, &pt->tc); pt 2958 tools/perf/util/intel-pt.c tm = tsc_to_perf_time(tsc, &pt->tc); pt 2965 tools/perf/util/intel-pt.c tm = tsc_to_perf_time(++tsc, &pt->tc); pt 2971 tools/perf/util/intel-pt.c static u64 intel_pt_tsc_end(u64 ns, struct intel_pt *pt) pt 2975 tools/perf/util/intel-pt.c tsc = perf_time_to_tsc(ns, &pt->tc); pt 2978 tools/perf/util/intel-pt.c tm = tsc_to_perf_time(tsc, &pt->tc); pt 2985 tools/perf/util/intel-pt.c tm = tsc_to_perf_time(--tsc, &pt->tc); pt 2990 tools/perf/util/intel-pt.c static int intel_pt_setup_time_ranges(struct intel_pt *pt, pt 2997 tools/perf/util/intel-pt.c if (!n || !p || pt->timeless_decoding) pt 3000 tools/perf/util/intel-pt.c pt->time_ranges = calloc(n, sizeof(struct range)); pt 3001 tools/perf/util/intel-pt.c if (!pt->time_ranges) pt 3004 tools/perf/util/intel-pt.c pt->range_cnt = n; pt 3009 tools/perf/util/intel-pt.c struct range *r = &pt->time_ranges[i]; pt 3017 tools/perf/util/intel-pt.c r->start = ts ? intel_pt_tsc_start(ts, pt) : 0; pt 3018 tools/perf/util/intel-pt.c r->end = te ? intel_pt_tsc_end(te, pt) : 0; pt 3078 tools/perf/util/intel-pt.c struct intel_pt *pt; pt 3087 tools/perf/util/intel-pt.c pt = zalloc(sizeof(struct intel_pt)); pt 3088 tools/perf/util/intel-pt.c if (!pt) pt 3091 tools/perf/util/intel-pt.c addr_filters__init(&pt->filts); pt 3093 tools/perf/util/intel-pt.c err = perf_config(intel_pt_perf_config, pt); pt 3097 tools/perf/util/intel-pt.c err = auxtrace_queues__init(&pt->queues); pt 3103 tools/perf/util/intel-pt.c pt->session = session; pt 3104 tools/perf/util/intel-pt.c pt->machine = &session->machines.host; /* No kvm support */ pt 3105 tools/perf/util/intel-pt.c pt->auxtrace_type = auxtrace_info->type; pt 3106 tools/perf/util/intel-pt.c pt->pmu_type = auxtrace_info->priv[INTEL_PT_PMU_TYPE]; pt 3107 tools/perf/util/intel-pt.c pt->tc.time_shift = auxtrace_info->priv[INTEL_PT_TIME_SHIFT]; pt 3108 tools/perf/util/intel-pt.c pt->tc.time_mult = auxtrace_info->priv[INTEL_PT_TIME_MULT]; pt 3109 tools/perf/util/intel-pt.c pt->tc.time_zero = auxtrace_info->priv[INTEL_PT_TIME_ZERO]; pt 3110 tools/perf/util/intel-pt.c pt->cap_user_time_zero = auxtrace_info->priv[INTEL_PT_CAP_USER_TIME_ZERO]; pt 3111 tools/perf/util/intel-pt.c pt->tsc_bit = auxtrace_info->priv[INTEL_PT_TSC_BIT]; pt 3112 tools/perf/util/intel-pt.c pt->noretcomp_bit = auxtrace_info->priv[INTEL_PT_NORETCOMP_BIT]; pt 3113 tools/perf/util/intel-pt.c pt->have_sched_switch = auxtrace_info->priv[INTEL_PT_HAVE_SCHED_SWITCH]; pt 3114 tools/perf/util/intel-pt.c pt->snapshot_mode = auxtrace_info->priv[INTEL_PT_SNAPSHOT_MODE]; pt 3115 tools/perf/util/intel-pt.c pt->per_cpu_mmaps = auxtrace_info->priv[INTEL_PT_PER_CPU_MMAPS]; pt 3120 tools/perf/util/intel-pt.c pt->mtc_bit = auxtrace_info->priv[INTEL_PT_MTC_BIT]; pt 3121 tools/perf/util/intel-pt.c pt->mtc_freq_bits = auxtrace_info->priv[INTEL_PT_MTC_FREQ_BITS]; pt 3122 tools/perf/util/intel-pt.c pt->tsc_ctc_ratio_n = auxtrace_info->priv[INTEL_PT_TSC_CTC_N]; pt 3123 tools/perf/util/intel-pt.c pt->tsc_ctc_ratio_d = auxtrace_info->priv[INTEL_PT_TSC_CTC_D]; pt 3124 tools/perf/util/intel-pt.c pt->cyc_bit = auxtrace_info->priv[INTEL_PT_CYC_BIT]; pt 3130 tools/perf/util/intel-pt.c pt->max_non_turbo_ratio = pt 3157 tools/perf/util/intel-pt.c pt->filter = memdup(filter, len); pt 3158 tools/perf/util/intel-pt.c if (!pt->filter) { pt 3163 tools/perf/util/intel-pt.c mem_bswap_64(pt->filter, len); pt 3164 tools/perf/util/intel-pt.c if (pt->filter[len - 1]) { pt 3169 tools/perf/util/intel-pt.c err = addr_filters__parse_bare_filter(&pt->filts, pt 3174 tools/perf/util/intel-pt.c intel_pt_print_info_str("Filter string", pt->filter); pt 3177 tools/perf/util/intel-pt.c pt->timeless_decoding = intel_pt_timeless_decoding(pt); pt 3178 tools/perf/util/intel-pt.c if (pt->timeless_decoding && !pt->tc.time_mult) pt 3179 tools/perf/util/intel-pt.c pt->tc.time_mult = 1; pt 3180 tools/perf/util/intel-pt.c pt->have_tsc = intel_pt_have_tsc(pt); pt 3181 tools/perf/util/intel-pt.c pt->sampling_mode = false; pt 3182 tools/perf/util/intel-pt.c pt->est_tsc = !pt->timeless_decoding; pt 3184 tools/perf/util/intel-pt.c pt->unknown_thread = thread__new(999999999, 999999999); pt 3185 tools/perf/util/intel-pt.c if (!pt->unknown_thread) { pt 3196 tools/perf/util/intel-pt.c INIT_LIST_HEAD(&pt->unknown_thread->node); pt 3198 tools/perf/util/intel-pt.c err = thread__set_comm(pt->unknown_thread, "unknown", 0); pt 3201 tools/perf/util/intel-pt.c if (thread__init_map_groups(pt->unknown_thread, pt->machine)) { pt 3206 tools/perf/util/intel-pt.c pt->auxtrace.process_event = intel_pt_process_event; pt 3207 tools/perf/util/intel-pt.c pt->auxtrace.process_auxtrace_event = intel_pt_process_auxtrace_event; pt 3208 tools/perf/util/intel-pt.c pt->auxtrace.flush_events = intel_pt_flush; pt 3209 tools/perf/util/intel-pt.c pt->auxtrace.free_events = intel_pt_free_events; pt 3210 tools/perf/util/intel-pt.c pt->auxtrace.free = intel_pt_free; pt 3211 tools/perf/util/intel-pt.c session->auxtrace = &pt->auxtrace; pt 3216 tools/perf/util/intel-pt.c if (pt->have_sched_switch == 1) { pt 3217 tools/perf/util/intel-pt.c pt->switch_evsel = intel_pt_find_sched_switch(session->evlist); pt 3218 tools/perf/util/intel-pt.c if (!pt->switch_evsel) { pt 3223 tools/perf/util/intel-pt.c } else if (pt->have_sched_switch == 2 && pt 3231 tools/perf/util/intel-pt.c pt->synth_opts = *session->itrace_synth_opts; pt 3233 tools/perf/util/intel-pt.c itrace_synth_opts__set_default(&pt->synth_opts, pt 3237 tools/perf/util/intel-pt.c pt->synth_opts.branches = false; pt 3238 tools/perf/util/intel-pt.c pt->synth_opts.callchain = true; pt 3240 tools/perf/util/intel-pt.c pt->synth_opts.thread_stack = pt 3244 tools/perf/util/intel-pt.c if (pt->synth_opts.log) pt 3248 tools/perf/util/intel-pt.c if (pt->tc.time_mult) { pt 3249 tools/perf/util/intel-pt.c u64 tsc_freq = intel_pt_ns_to_ticks(pt, 1000000000); pt 3251 tools/perf/util/intel-pt.c if (!pt->max_non_turbo_ratio) pt 3252 tools/perf/util/intel-pt.c pt->max_non_turbo_ratio = pt 3256 tools/perf/util/intel-pt.c pt->max_non_turbo_ratio); pt 3257 tools/perf/util/intel-pt.c pt->cbr2khz = tsc_freq / pt->max_non_turbo_ratio / 1000; pt 3260 tools/perf/util/intel-pt.c err = intel_pt_setup_time_ranges(pt, session->itrace_synth_opts); pt 3264 tools/perf/util/intel-pt.c if (pt->synth_opts.calls) pt 3265 tools/perf/util/intel-pt.c pt->branches_filter |= PERF_IP_FLAG_CALL | PERF_IP_FLAG_ASYNC | pt 3267 tools/perf/util/intel-pt.c if (pt->synth_opts.returns) pt 3268 tools/perf/util/intel-pt.c pt->branches_filter |= PERF_IP_FLAG_RETURN | pt 3271 tools/perf/util/intel-pt.c if (pt->synth_opts.callchain && !symbol_conf.use_callchain) { pt 3275 tools/perf/util/intel-pt.c pt->synth_opts.callchain = false; pt 3279 tools/perf/util/intel-pt.c err = intel_pt_synth_events(pt, session); pt 3283 tools/perf/util/intel-pt.c intel_pt_setup_pebs_events(pt); pt 3285 tools/perf/util/intel-pt.c err = auxtrace_queues__process_index(&pt->queues, session); pt 3289 tools/perf/util/intel-pt.c if (pt->queues.populated) pt 3290 tools/perf/util/intel-pt.c pt->data_queued = true; pt 3292 tools/perf/util/intel-pt.c if (pt->timeless_decoding) pt 3298 tools/perf/util/intel-pt.c thread__zput(pt->unknown_thread); pt 3301 tools/perf/util/intel-pt.c auxtrace_queues__free(&pt->queues); pt 3304 tools/perf/util/intel-pt.c addr_filters__exit(&pt->filts); pt 3305 tools/perf/util/intel-pt.c zfree(&pt->filter); pt 3306 tools/perf/util/intel-pt.c zfree(&pt->time_ranges); pt 3307 tools/perf/util/intel-pt.c free(pt); pt 252 tools/perf/util/strfilter.c int pt = node->r ? 2 : 0; /* don't need to check node->l */ pt 254 tools/perf/util/strfilter.c if (buf && pt) pt 259 tools/perf/util/strfilter.c if (buf && pt) pt 261 tools/perf/util/strfilter.c return len + pt; pt 109 tools/testing/selftests/vDSO/parse_vdso.c ELF(Phdr) *pt = (ELF(Phdr)*)(vdso_info.load_addr + hdr->e_phoff); pt 118 tools/testing/selftests/vDSO/parse_vdso.c if (pt[i].p_type == PT_LOAD && !found_vaddr) { pt 121 tools/testing/selftests/vDSO/parse_vdso.c + (uintptr_t)pt[i].p_offset pt 122 tools/testing/selftests/vDSO/parse_vdso.c - (uintptr_t)pt[i].p_vaddr; pt 123 tools/testing/selftests/vDSO/parse_vdso.c } else if (pt[i].p_type == PT_DYNAMIC) { pt 124 tools/testing/selftests/vDSO/parse_vdso.c dyn = (ELF(Dyn)*)(base + pt[i].p_offset); pt 235 virt/kvm/eventfd.c poll_table *pt) pt 238 virt/kvm/eventfd.c container_of(pt, struct kvm_kernel_irqfd, pt); pt 376 virt/kvm/eventfd.c init_poll_funcptr(&irqfd->pt, irqfd_ptable_queue_proc); pt 401 virt/kvm/eventfd.c events = vfs_poll(f.file, &irqfd->pt);