dp 211 arch/arc/include/asm/arcregs.h unsigned int pad2:15, dp:1, pad1:7, sp:1, ver:8; dp 213 arch/arc/include/asm/arcregs.h unsigned int ver:8, sp:1, pad1:7, dp:1, pad2:15; dp 46 arch/arc/include/asm/processor.h struct eznps_dp dp; dp 196 arch/arc/kernel/setup.c struct bcr_fp_arcompact sp, dp; dp 200 arch/arc/kernel/setup.c READ_BCR(ARC_REG_DPFP_BCR, dp); dp 202 arch/arc/kernel/setup.c cpu->extn.fpu_dp = dp.ver ? 1 : 0; dp 217 arch/arc/kernel/setup.c cpu->extn.fpu_dp = spdp.dp ? 1 : 0; dp 12 arch/arc/plat-eznps/ctop.c struct eznps_dp *prev_task_dp = &prev->thread.dp; dp 13 arch/arc/plat-eznps/ctop.c struct eznps_dp *next_task_dp = &next->thread.dp; dp 160 arch/c6x/include/uapi/asm/ptrace.h REG_PAIR(sp, dp); dp 81 arch/c6x/kernel/asm-offsets.c OFFSET(REGS_DP, pt_regs, dp); dp 136 arch/c6x/kernel/process.c unsigned long dp; dp 138 arch/c6x/kernel/process.c asm volatile ("mv .S2 b14,%0\n" : "=b"(dp)); dp 140 arch/c6x/kernel/process.c thread_saved_dp(p) = dp; dp 142 arch/c6x/kernel/process.c childregs->dp = dp; dp 48 arch/c6x/kernel/traps.c pr_err("A14: %08lx B14: %08lx\n", regs->a14, regs->dp); dp 202 arch/ia64/include/asm/io.h unsigned char *dp = dst; dp 205 arch/ia64/include/asm/io.h *dp++ = inb(port); dp 211 arch/ia64/include/asm/io.h unsigned short *dp = dst; dp 214 arch/ia64/include/asm/io.h put_unaligned(inw(port), dp++); dp 220 arch/ia64/include/asm/io.h unsigned int *dp = dst; dp 223 arch/ia64/include/asm/io.h put_unaligned(inl(port), dp++); dp 510 arch/ia64/include/asm/pal.h dp : 1, /* Data poisoned on MBE */ dp 584 arch/ia64/include/asm/pal.h dp : 1, /* Data poisoned on MBE */ dp 1537 arch/ia64/kernel/unwind.c u8 *dp, *desc_end; dp 1601 arch/ia64/kernel/unwind.c dp = (u8 *) (table->segment_base + e->info_offset + 8); dp 1602 arch/ia64/kernel/unwind.c desc_end = dp + 8*UNW_LENGTH(hdr); dp 1604 arch/ia64/kernel/unwind.c while (!sr.done && dp < desc_end) dp 1605 arch/ia64/kernel/unwind.c dp = unw_decode(dp, sr.in_body, &sr); dp 86 arch/ia64/kernel/unwind_decoder.c unw_decode_x1 (unsigned char *dp, unsigned char code, void *arg) dp 91 arch/ia64/kernel/unwind_decoder.c byte1 = *dp++; dp 92 arch/ia64/kernel/unwind_decoder.c t = unw_decode_uleb128 (&dp); dp 93 arch/ia64/kernel/unwind_decoder.c off = unw_decode_uleb128 (&dp); dp 99 arch/ia64/kernel/unwind_decoder.c return dp; dp 103 arch/ia64/kernel/unwind_decoder.c unw_decode_x2 (unsigned char *dp, unsigned char code, void *arg) dp 108 arch/ia64/kernel/unwind_decoder.c byte1 = *dp++; byte2 = *dp++; dp 109 arch/ia64/kernel/unwind_decoder.c t = unw_decode_uleb128 (&dp); dp 117 arch/ia64/kernel/unwind_decoder.c return dp; dp 121 arch/ia64/kernel/unwind_decoder.c unw_decode_x3 (unsigned char *dp, unsigned char code, void *arg) dp 126 arch/ia64/kernel/unwind_decoder.c byte1 = *dp++; byte2 = *dp++; dp 127 arch/ia64/kernel/unwind_decoder.c t = unw_decode_uleb128 (&dp); dp 128 arch/ia64/kernel/unwind_decoder.c off = unw_decode_uleb128 (&dp); dp 137 arch/ia64/kernel/unwind_decoder.c return dp; dp 141 arch/ia64/kernel/unwind_decoder.c unw_decode_x4 (unsigned char *dp, unsigned char code, void *arg) dp 146 arch/ia64/kernel/unwind_decoder.c byte1 = *dp++; byte2 = *dp++; byte3 = *dp++; dp 147 arch/ia64/kernel/unwind_decoder.c t = unw_decode_uleb128 (&dp); dp 158 arch/ia64/kernel/unwind_decoder.c return dp; dp 162 arch/ia64/kernel/unwind_decoder.c unw_decode_r1 (unsigned char *dp, unsigned char code, void *arg) dp 169 arch/ia64/kernel/unwind_decoder.c return dp; dp 173 arch/ia64/kernel/unwind_decoder.c unw_decode_r2 (unsigned char *dp, unsigned char code, void *arg) dp 178 arch/ia64/kernel/unwind_decoder.c byte1 = *dp++; dp 182 arch/ia64/kernel/unwind_decoder.c rlen = unw_decode_uleb128 (&dp); dp 184 arch/ia64/kernel/unwind_decoder.c return dp; dp 188 arch/ia64/kernel/unwind_decoder.c unw_decode_r3 (unsigned char *dp, unsigned char code, void *arg) dp 192 arch/ia64/kernel/unwind_decoder.c rlen = unw_decode_uleb128 (&dp); dp 194 arch/ia64/kernel/unwind_decoder.c return dp; dp 198 arch/ia64/kernel/unwind_decoder.c unw_decode_p1 (unsigned char *dp, unsigned char code, void *arg) dp 203 arch/ia64/kernel/unwind_decoder.c return dp; dp 207 arch/ia64/kernel/unwind_decoder.c unw_decode_p2_p5 (unsigned char *dp, unsigned char code, void *arg) dp 211 arch/ia64/kernel/unwind_decoder.c unsigned char byte1 = *dp++; dp 218 arch/ia64/kernel/unwind_decoder.c unsigned char byte1 = *dp++, r, dst; dp 240 arch/ia64/kernel/unwind_decoder.c UNW_DEC_SPILL_MASK(P4, dp, arg); dp 245 arch/ia64/kernel/unwind_decoder.c byte1 = *dp++; byte2 = *dp++; byte3 = *dp++; dp 252 arch/ia64/kernel/unwind_decoder.c return dp; dp 256 arch/ia64/kernel/unwind_decoder.c unw_decode_p6 (unsigned char *dp, unsigned char code, void *arg) dp 265 arch/ia64/kernel/unwind_decoder.c return dp; dp 269 arch/ia64/kernel/unwind_decoder.c unw_decode_p7_p10 (unsigned char *dp, unsigned char code, void *arg) dp 277 arch/ia64/kernel/unwind_decoder.c t = unw_decode_uleb128 (&dp); dp 281 arch/ia64/kernel/unwind_decoder.c size = unw_decode_uleb128 (&dp); dp 309 arch/ia64/kernel/unwind_decoder.c r = *dp++; dp 310 arch/ia64/kernel/unwind_decoder.c t = unw_decode_uleb128 (&dp); dp 338 arch/ia64/kernel/unwind_decoder.c byte1 = *dp++; byte2 = *dp++; dp 343 arch/ia64/kernel/unwind_decoder.c byte1 = *dp++; byte2 = *dp++; dp 348 arch/ia64/kernel/unwind_decoder.c return unw_decode_x1 (dp, code, arg); dp 351 arch/ia64/kernel/unwind_decoder.c return unw_decode_x2 (dp, code, arg); dp 354 arch/ia64/kernel/unwind_decoder.c return unw_decode_x3 (dp, code, arg); dp 357 arch/ia64/kernel/unwind_decoder.c return unw_decode_x4 (dp, code, arg); dp 364 arch/ia64/kernel/unwind_decoder.c return dp; dp 368 arch/ia64/kernel/unwind_decoder.c unw_decode_b1 (unsigned char *dp, unsigned char code, void *arg) dp 376 arch/ia64/kernel/unwind_decoder.c return dp; dp 380 arch/ia64/kernel/unwind_decoder.c unw_decode_b2 (unsigned char *dp, unsigned char code, void *arg) dp 384 arch/ia64/kernel/unwind_decoder.c t = unw_decode_uleb128 (&dp); dp 386 arch/ia64/kernel/unwind_decoder.c return dp; dp 390 arch/ia64/kernel/unwind_decoder.c unw_decode_b3_x4 (unsigned char *dp, unsigned char code, void *arg) dp 396 arch/ia64/kernel/unwind_decoder.c t = unw_decode_uleb128 (&dp); dp 397 arch/ia64/kernel/unwind_decoder.c ecount = unw_decode_uleb128 (&dp); dp 402 arch/ia64/kernel/unwind_decoder.c label = unw_decode_uleb128 (&dp); dp 411 arch/ia64/kernel/unwind_decoder.c case 1: return unw_decode_x1 (dp, code, arg); dp 412 arch/ia64/kernel/unwind_decoder.c case 2: return unw_decode_x2 (dp, code, arg); dp 413 arch/ia64/kernel/unwind_decoder.c case 3: return unw_decode_x3 (dp, code, arg); dp 414 arch/ia64/kernel/unwind_decoder.c case 4: return unw_decode_x4 (dp, code, arg); dp 417 arch/ia64/kernel/unwind_decoder.c return dp; dp 451 arch/ia64/kernel/unwind_decoder.c unw_decode (unsigned char *dp, int inside_body, void *arg) dp 456 arch/ia64/kernel/unwind_decoder.c code = *dp++; dp 458 arch/ia64/kernel/unwind_decoder.c dp = (*decoder) (dp, code, arg); dp 459 arch/ia64/kernel/unwind_decoder.c return dp; dp 34 arch/m68k/mm/memory.c #define PD_MARKBITS(dp) (*(unsigned char *)&PD_PAGE(dp)->index) dp 40 arch/m68k/mm/memory.c ptable_desc *dp; dp 44 arch/m68k/mm/memory.c dp = PD_PTABLE(page); dp 45 arch/m68k/mm/memory.c if (!(PD_MARKBITS(dp) & mask)) { dp 46 arch/m68k/mm/memory.c PD_MARKBITS(dp) = 0xff; dp 47 arch/m68k/mm/memory.c list_add(dp, &ptable_list); dp 50 arch/m68k/mm/memory.c PD_MARKBITS(dp) &= ~mask; dp 51 arch/m68k/mm/memory.c pr_debug("init_pointer_table: %lx, %x\n", ptable, PD_MARKBITS(dp)); dp 54 arch/m68k/mm/memory.c __ClearPageReserved(PD_PAGE(dp)); dp 55 arch/m68k/mm/memory.c init_page_count(PD_PAGE(dp)); dp 62 arch/m68k/mm/memory.c ptable_desc *dp = ptable_list.next; dp 63 arch/m68k/mm/memory.c unsigned char mask = PD_MARKBITS (dp); dp 85 arch/m68k/mm/memory.c list_add_tail(new, dp); dp 92 arch/m68k/mm/memory.c PD_MARKBITS(dp) = mask & ~tmp; dp 93 arch/m68k/mm/memory.c if (!PD_MARKBITS(dp)) { dp 95 arch/m68k/mm/memory.c list_move_tail(dp, &ptable_list); dp 97 arch/m68k/mm/memory.c return (pmd_t *) (page_address(PD_PAGE(dp)) + off); dp 102 arch/m68k/mm/memory.c ptable_desc *dp; dp 106 arch/m68k/mm/memory.c dp = PD_PTABLE(page); dp 107 arch/m68k/mm/memory.c if (PD_MARKBITS (dp) & mask) dp 110 arch/m68k/mm/memory.c PD_MARKBITS (dp) |= mask; dp 112 arch/m68k/mm/memory.c if (PD_MARKBITS(dp) == 0xff) { dp 114 arch/m68k/mm/memory.c list_del(dp); dp 118 arch/m68k/mm/memory.c } else if (ptable_list.next != dp) { dp 123 arch/m68k/mm/memory.c list_move(dp, &ptable_list); dp 212 arch/mips/alchemy/common/dbdma.c void *au1xxx_ddma_get_nextptr_virt(au1x_ddma_desc_t *dp) dp 214 arch/mips/alchemy/common/dbdma.c return phys_to_virt(DSCR_GET_NXTPTR(dp->dscr_nxtptr)); dp 398 arch/mips/alchemy/common/dbdma.c au1x_ddma_desc_t *dp; dp 436 arch/mips/alchemy/common/dbdma.c dp = (au1x_ddma_desc_t *)desc_base; dp 439 arch/mips/alchemy/common/dbdma.c ctp->chan_desc_base = dp; dp 564 arch/mips/alchemy/common/dbdma.c dp->dscr_cmd0 = cmd0; dp 565 arch/mips/alchemy/common/dbdma.c dp->dscr_cmd1 = cmd1; dp 566 arch/mips/alchemy/common/dbdma.c dp->dscr_source0 = src0; dp 567 arch/mips/alchemy/common/dbdma.c dp->dscr_source1 = src1; dp 568 arch/mips/alchemy/common/dbdma.c dp->dscr_dest0 = dest0; dp 569 arch/mips/alchemy/common/dbdma.c dp->dscr_dest1 = dest1; dp 570 arch/mips/alchemy/common/dbdma.c dp->dscr_stat = 0; dp 571 arch/mips/alchemy/common/dbdma.c dp->sw_context = 0; dp 572 arch/mips/alchemy/common/dbdma.c dp->sw_status = 0; dp 573 arch/mips/alchemy/common/dbdma.c dp->dscr_nxtptr = DSCR_NXTPTR(virt_to_phys(dp + 1)); dp 574 arch/mips/alchemy/common/dbdma.c dp++; dp 578 arch/mips/alchemy/common/dbdma.c dp--; dp 579 arch/mips/alchemy/common/dbdma.c dp->dscr_nxtptr = DSCR_NXTPTR(virt_to_phys(ctp->chan_desc_base)); dp 594 arch/mips/alchemy/common/dbdma.c au1x_ddma_desc_t *dp; dp 607 arch/mips/alchemy/common/dbdma.c dp = ctp->put_ptr; dp 613 arch/mips/alchemy/common/dbdma.c if (dp->dscr_cmd0 & DSCR_CMD0_V) dp 617 arch/mips/alchemy/common/dbdma.c dp->dscr_source0 = buf & ~0UL; dp 618 arch/mips/alchemy/common/dbdma.c dp->dscr_cmd1 = nbytes; dp 621 arch/mips/alchemy/common/dbdma.c dp->dscr_cmd0 |= DSCR_CMD0_IE; dp 623 arch/mips/alchemy/common/dbdma.c dp->dscr_cmd0 &= ~DSCR_CMD0_IE; dp 633 arch/mips/alchemy/common/dbdma.c dp->dscr_cmd0 |= DSCR_CMD0_V; /* Let it rip */ dp 635 arch/mips/alchemy/common/dbdma.c dma_cache_wback_inv((unsigned long)dp, sizeof(*dp)); dp 639 arch/mips/alchemy/common/dbdma.c ctp->put_ptr = phys_to_virt(DSCR_GET_NXTPTR(dp->dscr_nxtptr)); dp 653 arch/mips/alchemy/common/dbdma.c au1x_ddma_desc_t *dp; dp 664 arch/mips/alchemy/common/dbdma.c dp = ctp->put_ptr; dp 669 arch/mips/alchemy/common/dbdma.c if (dp->dscr_cmd0 & DSCR_CMD0_V) dp 676 arch/mips/alchemy/common/dbdma.c dp->dscr_cmd0 |= DSCR_CMD0_IE; dp 678 arch/mips/alchemy/common/dbdma.c dp->dscr_cmd0 &= ~DSCR_CMD0_IE; dp 680 arch/mips/alchemy/common/dbdma.c dp->dscr_dest0 = buf & ~0UL; dp 681 arch/mips/alchemy/common/dbdma.c dp->dscr_cmd1 = nbytes; dp 684 arch/mips/alchemy/common/dbdma.c dp->dscr_cmd0, dp->dscr_cmd1, dp->dscr_source0, dp 685 arch/mips/alchemy/common/dbdma.c dp->dscr_source1, dp->dscr_dest0, dp->dscr_dest1); dp 695 arch/mips/alchemy/common/dbdma.c dp->dscr_cmd0 |= DSCR_CMD0_V; /* Let it rip */ dp 697 arch/mips/alchemy/common/dbdma.c dma_cache_wback_inv((unsigned long)dp, sizeof(*dp)); dp 701 arch/mips/alchemy/common/dbdma.c ctp->put_ptr = phys_to_virt(DSCR_GET_NXTPTR(dp->dscr_nxtptr)); dp 717 arch/mips/alchemy/common/dbdma.c au1x_ddma_desc_t *dp; dp 731 arch/mips/alchemy/common/dbdma.c dp = ctp->get_ptr; dp 737 arch/mips/alchemy/common/dbdma.c if (dp->dscr_cmd0 & DSCR_CMD0_V) dp 741 arch/mips/alchemy/common/dbdma.c *buf = (void *)(phys_to_virt(dp->dscr_dest0)); dp 742 arch/mips/alchemy/common/dbdma.c *nbytes = dp->dscr_cmd1; dp 743 arch/mips/alchemy/common/dbdma.c rv = dp->dscr_stat; dp 746 arch/mips/alchemy/common/dbdma.c ctp->get_ptr = phys_to_virt(DSCR_GET_NXTPTR(dp->dscr_nxtptr)); dp 801 arch/mips/alchemy/common/dbdma.c au1x_ddma_desc_t *dp; dp 809 arch/mips/alchemy/common/dbdma.c dp = ctp->chan_desc_base; dp 812 arch/mips/alchemy/common/dbdma.c dp->dscr_cmd0 &= ~DSCR_CMD0_V; dp 818 arch/mips/alchemy/common/dbdma.c dp->sw_status = 0; dp 819 arch/mips/alchemy/common/dbdma.c dp = phys_to_virt(DSCR_GET_NXTPTR(dp->dscr_nxtptr)); dp 820 arch/mips/alchemy/common/dbdma.c } while (dp != ctp->chan_desc_base); dp 867 arch/mips/alchemy/common/dbdma.c au1x_ddma_desc_t *dp; dp 876 arch/mips/alchemy/common/dbdma.c dp = ctp->cur_ptr; dp 885 arch/mips/alchemy/common/dbdma.c ctp->cur_ptr = phys_to_virt(DSCR_GET_NXTPTR(dp->dscr_nxtptr)); dp 892 arch/mips/alchemy/common/dbdma.c au1x_ddma_desc_t *dp; dp 917 arch/mips/alchemy/common/dbdma.c dp = ctp->chan_desc_base; dp 921 arch/mips/alchemy/common/dbdma.c i++, (u32)dp, dp->dscr_cmd0, dp->dscr_cmd1); dp 923 arch/mips/alchemy/common/dbdma.c dp->dscr_source0, dp->dscr_source1, dp 924 arch/mips/alchemy/common/dbdma.c dp->dscr_dest0, dp->dscr_dest1); dp 926 arch/mips/alchemy/common/dbdma.c dp->dscr_stat, dp->dscr_nxtptr); dp 927 arch/mips/alchemy/common/dbdma.c dp = phys_to_virt(DSCR_GET_NXTPTR(dp->dscr_nxtptr)); dp 928 arch/mips/alchemy/common/dbdma.c } while (dp != ctp->chan_desc_base); dp 937 arch/mips/alchemy/common/dbdma.c au1x_ddma_desc_t *dp; dp 951 arch/mips/alchemy/common/dbdma.c dp = ctp->put_ptr; dp 957 arch/mips/alchemy/common/dbdma.c if (dp->dscr_cmd0 & DSCR_CMD0_V) dp 961 arch/mips/alchemy/common/dbdma.c dp->dscr_dest0 = dscr->dscr_dest0; dp 962 arch/mips/alchemy/common/dbdma.c dp->dscr_source0 = dscr->dscr_source0; dp 963 arch/mips/alchemy/common/dbdma.c dp->dscr_dest1 = dscr->dscr_dest1; dp 964 arch/mips/alchemy/common/dbdma.c dp->dscr_source1 = dscr->dscr_source1; dp 965 arch/mips/alchemy/common/dbdma.c dp->dscr_cmd1 = dscr->dscr_cmd1; dp 968 arch/mips/alchemy/common/dbdma.c dp->dscr_cmd0 &= ~DSCR_CMD0_IE; dp 969 arch/mips/alchemy/common/dbdma.c dp->dscr_cmd0 |= dscr->dscr_cmd0 | DSCR_CMD0_V; dp 973 arch/mips/alchemy/common/dbdma.c ctp->put_ptr = phys_to_virt(DSCR_GET_NXTPTR(dp->dscr_nxtptr)); dp 379 arch/mips/include/asm/mach-au1x00/au1xxx_dbdma.h void *au1xxx_ddma_get_nextptr_virt(au1x_ddma_desc_t *dp); dp 842 arch/mips/math-emu/cp1emu.c #define DPFROMREG(dp, x) DIFROMREG((dp).bits, x) dp 843 arch/mips/math-emu/cp1emu.c #define DPTOREG(dp, x) DITOREG((dp).bits, x) dp 1458 arch/mips/math-emu/cp1emu.c DEF3OP(madd, dp, ieee754dp_mul, ieee754dp_add, ); dp 1459 arch/mips/math-emu/cp1emu.c DEF3OP(msub, dp, ieee754dp_mul, ieee754dp_sub, ); dp 1460 arch/mips/math-emu/cp1emu.c DEF3OP(nmadd, dp, ieee754dp_mul, ieee754dp_add, ieee754dp_neg); dp 1461 arch/mips/math-emu/cp1emu.c DEF3OP(nmsub, dp, ieee754dp_mul, ieee754dp_sub, ieee754dp_neg); dp 27 arch/mips/math-emu/ieee754dp.h #define DPSIGN(dp) (dp.sign) dp 28 arch/mips/math-emu/ieee754dp.h #define DPBEXP(dp) (dp.bexp) dp 29 arch/mips/math-emu/ieee754dp.h #define DPMANT(dp) (dp.mant) dp 10 arch/nds32/math-emu/fpuemu.c #define DPFROMREG(dp, x) (dp = (void *)((unsigned long *)fpu_reg + 2*x)) dp 16 arch/parisc/include/asm/asmregs.h dp: .reg %r27 dp 379 arch/parisc/include/uapi/asm/pdc.h struct device_path dp; /* see above */ dp 1253 arch/parisc/kernel/firmware.c PAGE0->mem_cons.spa, __pa(PAGE0->mem_cons.dp.layers), dp 1281 arch/parisc/kernel/firmware.c PAGE0->mem_kbd.spa, __pa(PAGE0->mem_kbd.dp.layers), dp 534 arch/parisc/kernel/module.c register unsigned long dp asm ("r27"); dp 609 arch/parisc/kernel/module.c val = lrsel(val - dp, addend); dp 614 arch/parisc/kernel/module.c val = rrsel(val - dp, addend); dp 121 arch/powerpc/include/asm/sstep.h double dp[2]; dp 43 arch/powerpc/lib/sstep.c extern void conv_sp_to_dp(const float *sp, double *dp); dp 44 arch/powerpc/lib/sstep.c extern void conv_dp_to_sp(const double *dp, float *sp); dp 693 arch/powerpc/lib/sstep.c ®->dp[IS_LE]); dp 774 arch/powerpc/lib/sstep.c conv_dp_to_sp(®->dp[IS_LE], &buf.fp[1 + IS_LE]); dp 107 arch/powerpc/math-emu/math_efp.c u64 dp[1]; dp 288 arch/powerpc/math-emu/math_efp.c FP_UNPACK_DP(DB, vb.dp); dp 348 arch/powerpc/math-emu/math_efp.c FP_UNPACK_DP(DA, va.dp); dp 350 arch/powerpc/math-emu/math_efp.c FP_UNPACK_DP(DB, vb.dp); dp 353 arch/powerpc/math-emu/math_efp.c FP_UNPACK_DP(DA, va.dp); dp 364 arch/powerpc/math-emu/math_efp.c vc.dp[0] = va.dp[0] & ~SIGN_BIT_D; dp 368 arch/powerpc/math-emu/math_efp.c vc.dp[0] = va.dp[0] | SIGN_BIT_D; dp 372 arch/powerpc/math-emu/math_efp.c vc.dp[0] = va.dp[0] ^ SIGN_BIT_D; dp 430 arch/powerpc/math-emu/math_efp.c vc.dp[0] = 0; dp 433 arch/powerpc/math-emu/math_efp.c FP_TO_INT_D(vc.dp[0], DB, 64, dp 469 arch/powerpc/math-emu/math_efp.c FP_PACK_DP(vc.dp, DR); dp 839 arch/powerpc/math-emu/math_efp.c fgpr.dp[0]++; /* Z > 0, choose Z1 */ dp 846 arch/powerpc/math-emu/math_efp.c fgpr.dp[0]++; /* Z < 0, choose Z2 */ dp 508 arch/powerpc/platforms/powermac/nvram.c static int __init core99_nvram_setup(struct device_node *dp, unsigned long addr) dp 550 arch/powerpc/platforms/powermac/nvram.c if (of_device_is_compatible(dp, "amd-0137")) { dp 562 arch/powerpc/platforms/powermac/nvram.c struct device_node *dp; dp 569 arch/powerpc/platforms/powermac/nvram.c dp = of_find_node_by_name(NULL, "nvram"); dp 570 arch/powerpc/platforms/powermac/nvram.c if (dp == NULL) { dp 576 arch/powerpc/platforms/powermac/nvram.c if (of_address_to_resource(dp, 0, &r1) == 0) { dp 579 arch/powerpc/platforms/powermac/nvram.c if (of_address_to_resource(dp, 1, &r2) == 0) { dp 585 arch/powerpc/platforms/powermac/nvram.c is_core_99 = of_device_is_compatible(dp, "nvram,flash"); dp 587 arch/powerpc/platforms/powermac/nvram.c err = core99_nvram_setup(dp, r1.start); dp 623 arch/powerpc/platforms/powermac/nvram.c of_node_put(dp); dp 177 arch/powerpc/sysdev/dart_iommu.c unsigned int *dp, *orig_dp; dp 183 arch/powerpc/sysdev/dart_iommu.c orig_dp = dp = ((unsigned int*)tbl->it_base) + index; dp 192 arch/powerpc/sysdev/dart_iommu.c *(dp++) = DARTMAP_VALID | (rpn & DARTMAP_RPNMASK); dp 211 arch/powerpc/sysdev/dart_iommu.c unsigned int *dp, *orig_dp; dp 221 arch/powerpc/sysdev/dart_iommu.c orig_dp = dp = ((unsigned int *)tbl->it_base) + index; dp 224 arch/powerpc/sysdev/dart_iommu.c *(dp++) = dart_emptyval; dp 53 arch/powerpc/sysdev/dcr.c struct device_node *dp; dp 58 arch/powerpc/sysdev/dcr.c dp = find_dcr_parent(dev); dp 59 arch/powerpc/sysdev/dcr.c if (dp == NULL) dp 62 arch/powerpc/sysdev/dcr.c prop = of_get_property(dp, "dcr-access-method", NULL); dp 74 arch/powerpc/sysdev/dcr.c of_node_put(dp); dp 146 arch/powerpc/sysdev/dcr.c struct device_node *dp; dp 151 arch/powerpc/sysdev/dcr.c dp = find_dcr_parent(dev); dp 152 arch/powerpc/sysdev/dcr.c if (dp == NULL) dp 156 arch/powerpc/sysdev/dcr.c p = of_get_property(dp, "dcr-mmio-stride", NULL); dp 160 arch/powerpc/sysdev/dcr.c p = of_get_property(dp, "dcr-mmio-range", NULL); dp 162 arch/powerpc/sysdev/dcr.c p = of_get_property(dp, "dcr-mmio-space", NULL); dp 167 arch/powerpc/sysdev/dcr.c ret = of_translate_address(dp, p); dp 174 arch/powerpc/sysdev/dcr.c of_node_put(dp); dp 278 arch/sparc/include/asm/floppy_32.h struct device_node *dp; dp 325 arch/sparc/include/asm/floppy_32.h for_each_node_by_name(dp, "SUNW,fdtwo") { dp 326 arch/sparc/include/asm/floppy_32.h op = of_find_device_by_node(dp); dp 331 arch/sparc/include/asm/floppy_32.h for_each_node_by_name(dp, "fd") { dp 332 arch/sparc/include/asm/floppy_32.h op = of_find_device_by_node(dp); dp 529 arch/sparc/include/asm/floppy_64.h static int __init ebus_fdthree_p(struct device_node *dp) dp 531 arch/sparc/include/asm/floppy_64.h if (of_node_name_eq(dp, "fdthree")) dp 533 arch/sparc/include/asm/floppy_64.h if (of_node_name_eq(dp, "floppy")) { dp 536 arch/sparc/include/asm/floppy_64.h compat = of_get_property(dp, "compatible", NULL); dp 546 arch/sparc/include/asm/floppy_64.h struct device_node *dp; dp 557 arch/sparc/include/asm/floppy_64.h for_each_node_by_name(dp, "SUNW,fdtwo") { dp 558 arch/sparc/include/asm/floppy_64.h if (!of_node_name_eq(dp->parent, "sbus")) dp 560 arch/sparc/include/asm/floppy_64.h op = of_find_device_by_node(dp); dp 573 arch/sparc/include/asm/floppy_64.h dp = NULL; dp 575 arch/sparc/include/asm/floppy_64.h for (dp = ebus_dp->child; dp; dp = dp->sibling) { dp 576 arch/sparc/include/asm/floppy_64.h if (ebus_fdthree_p(dp)) dp 581 arch/sparc/include/asm/floppy_64.h if (!dp) dp 584 arch/sparc/include/asm/floppy_64.h op = of_find_device_by_node(dp); dp 658 arch/sparc/include/asm/floppy_64.h for (dp = ebus_dp->child; dp; dp = dp->sibling) { dp 659 arch/sparc/include/asm/floppy_64.h if (of_node_name_eq(dp, "ecpp")) { dp 662 arch/sparc/include/asm/floppy_64.h ecpp_op = of_find_device_by_node(dp); dp 228 arch/sparc/include/asm/leon.h void leon_node_init(struct device_node *dp, struct device_node ***nextp); dp 181 arch/sparc/include/asm/leon_amba.h void _amba_init(struct device_node *dp, struct device_node ***nextp); dp 52 arch/sparc/include/asm/prom.h void irq_trans_init(struct device_node *dp); dp 53 arch/sparc/include/asm/prom.h char *build_path_component(struct device_node *dp); dp 334 arch/sparc/include/asm/vio.h struct device_node *dp; dp 108 arch/sparc/kernel/auxio_64.c struct device_node *dp = dev->dev.of_node; dp 111 arch/sparc/kernel/auxio_64.c if (of_node_name_eq(dp->parent, "ebus")) { dp 114 arch/sparc/kernel/auxio_64.c } else if (of_node_name_eq(dp->parent, "sbus")) { dp 119 arch/sparc/kernel/auxio_64.c dp->parent); dp 126 arch/sparc/kernel/auxio_64.c printk(KERN_INFO "AUXIO: Found device at %pOF\n", dp); dp 281 arch/sparc/kernel/chmc.c struct jbusmc_dimm_group *dp = &p->dimm_groups[i]; dp 283 arch/sparc/kernel/chmc.c if (phys_addr < dp->base_addr || dp 284 arch/sparc/kernel/chmc.c (dp->base_addr + dp->size) <= phys_addr) dp 287 arch/sparc/kernel/chmc.c return dp; dp 298 arch/sparc/kernel/chmc.c struct jbusmc_dimm_group *dp; dp 302 arch/sparc/kernel/chmc.c dp = jbusmc_find_dimm_group(phys_addr); dp 303 arch/sparc/kernel/chmc.c if (dp == NULL || dp 312 arch/sparc/kernel/chmc.c p = dp->controller; dp 315 arch/sparc/kernel/chmc.c first_dimm = dp->index * JB_NUM_DIMMS_PER_GROUP; dp 372 arch/sparc/kernel/chmc.c struct jbusmc_dimm_group *dp = &p->dimm_groups[index]; dp 374 arch/sparc/kernel/chmc.c dp->controller = p; dp 375 arch/sparc/kernel/chmc.c dp->index = index; dp 377 arch/sparc/kernel/chmc.c dp->base_addr = (p->portid * (64UL * 1024 * 1024 * 1024)); dp 378 arch/sparc/kernel/chmc.c dp->base_addr += (index * (8UL * 1024 * 1024 * 1024)); dp 379 arch/sparc/kernel/chmc.c dp->size = jbusmc_dimm_group_size(dp->base_addr, mem_regs, num_mem_regs); dp 695 arch/sparc/kernel/chmc.c struct device_node *dp = op->dev.of_node; dp 708 arch/sparc/kernel/chmc.c portid = of_getintprop_default(dp, "portid", -1); dp 712 arch/sparc/kernel/chmc.c pval = of_get_property(dp, "memory-layout", &len); dp 752 arch/sparc/kernel/chmc.c dp, dp 57 arch/sparc/kernel/devices.c struct device_node *dp; dp 61 arch/sparc/kernel/devices.c for_each_node_by_type(dp, "cpu") { dp 62 arch/sparc/kernel/devices.c int err = check_cpu_node(dp->phandle, &cur_inst, dp 66 arch/sparc/kernel/devices.c of_node_put(dp); dp 135 arch/sparc/kernel/ds.c void (*data)(struct ds_info *dp, dp 147 arch/sparc/kernel/ds.c static void md_update_data(struct ds_info *dp, struct ds_cap_state *cp, dp 149 arch/sparc/kernel/ds.c static void domain_shutdown_data(struct ds_info *dp, dp 152 arch/sparc/kernel/ds.c static void domain_panic_data(struct ds_info *dp, dp 156 arch/sparc/kernel/ds.c static void dr_cpu_data(struct ds_info *dp, dp 160 arch/sparc/kernel/ds.c static void ds_pri_data(struct ds_info *dp, dp 163 arch/sparc/kernel/ds.c static void ds_var_data(struct ds_info *dp, dp 221 arch/sparc/kernel/ds.c static struct ds_cap_state *find_cap(struct ds_info *dp, u64 handle) dp 225 arch/sparc/kernel/ds.c if (index >= dp->num_ds_states) dp 227 arch/sparc/kernel/ds.c return &dp->ds_states[index]; dp 230 arch/sparc/kernel/ds.c static struct ds_cap_state *find_cap_by_string(struct ds_info *dp, dp 235 arch/sparc/kernel/ds.c for (i = 0; i < dp->num_ds_states; i++) { dp 236 arch/sparc/kernel/ds.c if (strcmp(dp->ds_states[i].service_id, name)) dp 239 arch/sparc/kernel/ds.c return &dp->ds_states[i]; dp 280 arch/sparc/kernel/ds.c static void md_update_data(struct ds_info *dp, dp 284 arch/sparc/kernel/ds.c struct ldc_channel *lp = dp->lp; dp 294 arch/sparc/kernel/ds.c printk(KERN_INFO "ds-%llu: Machine description update.\n", dp->id); dp 319 arch/sparc/kernel/ds.c static void domain_shutdown_data(struct ds_info *dp, dp 323 arch/sparc/kernel/ds.c struct ldc_channel *lp = dp->lp; dp 334 arch/sparc/kernel/ds.c "LDOM manager received.\n", dp->id); dp 359 arch/sparc/kernel/ds.c static void domain_panic_data(struct ds_info *dp, dp 363 arch/sparc/kernel/ds.c struct ldc_channel *lp = dp->lp; dp 374 arch/sparc/kernel/ds.c "LDOM manager received.\n", dp->id); dp 422 arch/sparc/kernel/ds.c static void __dr_cpu_send_error(struct ds_info *dp, dp 445 arch/sparc/kernel/ds.c __ds_send(dp->lp, &pkt, msg_len); dp 448 arch/sparc/kernel/ds.c static void dr_cpu_send_error(struct ds_info *dp, dp 455 arch/sparc/kernel/ds.c __dr_cpu_send_error(dp, cp, data); dp 533 arch/sparc/kernel/ds.c static int dr_cpu_configure(struct ds_info *dp, struct ds_cap_state *cp, dp 557 arch/sparc/kernel/ds.c dp->id, cpu); dp 573 arch/sparc/kernel/ds.c dp->id, err); dp 579 arch/sparc/kernel/ds.c __ds_send(dp->lp, resp, resp_len); dp 590 arch/sparc/kernel/ds.c static int dr_cpu_unconfigure(struct ds_info *dp, dp 613 arch/sparc/kernel/ds.c dp->id, cpu); dp 622 arch/sparc/kernel/ds.c __ds_send(dp->lp, resp, resp_len); dp 630 arch/sparc/kernel/ds.c static void dr_cpu_data(struct ds_info *dp, struct ds_cap_state *cp, void *buf, dp 648 arch/sparc/kernel/ds.c dr_cpu_send_error(dp, cp, data); dp 664 arch/sparc/kernel/ds.c err = dr_cpu_configure(dp, cp, req_num, &mask); dp 666 arch/sparc/kernel/ds.c err = dr_cpu_unconfigure(dp, cp, req_num, &mask); dp 669 arch/sparc/kernel/ds.c dr_cpu_send_error(dp, cp, data); dp 681 arch/sparc/kernel/ds.c static void ds_pri_data(struct ds_info *dp, dp 691 arch/sparc/kernel/ds.c dp->id, rp->req_num, rp->type, len); dp 726 arch/sparc/kernel/ds.c static void ds_var_data(struct ds_info *dp, dp 747 arch/sparc/kernel/ds.c struct ds_info *dp; dp 752 arch/sparc/kernel/ds.c for (dp = ds_info_list; dp; dp = dp->next) { dp 755 arch/sparc/kernel/ds.c tmp = find_cap_by_string(dp, "var-config"); dp 762 arch/sparc/kernel/ds.c for (dp = ds_info_list; dp; dp = dp->next) { dp 765 arch/sparc/kernel/ds.c tmp = find_cap_by_string(dp, "var-config-backup"); dp 817 arch/sparc/kernel/ds.c __ds_send(dp->lp, &pkt, msg_len); dp 834 arch/sparc/kernel/ds.c dp->id, var, value, dp 878 arch/sparc/kernel/ds.c static void ds_conn_reset(struct ds_info *dp) dp 881 arch/sparc/kernel/ds.c dp->id, __builtin_return_address(0)); dp 884 arch/sparc/kernel/ds.c static int register_services(struct ds_info *dp) dp 886 arch/sparc/kernel/ds.c struct ldc_channel *lp = dp->lp; dp 889 arch/sparc/kernel/ds.c for (i = 0; i < dp->num_ds_states; i++) { dp 894 arch/sparc/kernel/ds.c struct ds_cap_state *cp = &dp->ds_states[i]; dp 922 arch/sparc/kernel/ds.c static int ds_handshake(struct ds_info *dp, struct ds_msg_tag *pkt) dp 925 arch/sparc/kernel/ds.c if (dp->hs_state == DS_HS_START) { dp 929 arch/sparc/kernel/ds.c dp->hs_state = DS_HS_DONE; dp 931 arch/sparc/kernel/ds.c return register_services(dp); dp 934 arch/sparc/kernel/ds.c if (dp->hs_state != DS_HS_DONE) dp 939 arch/sparc/kernel/ds.c struct ds_cap_state *cp = find_cap(dp, ap->handle); dp 943 arch/sparc/kernel/ds.c "handle %llx\n", dp->id, ap->handle); dp 947 arch/sparc/kernel/ds.c dp->id, cp->service_id); dp 951 arch/sparc/kernel/ds.c struct ds_cap_state *cp = find_cap(dp, np->handle); dp 956 arch/sparc/kernel/ds.c dp->id, np->handle); dp 965 arch/sparc/kernel/ds.c ds_conn_reset(dp); dp 969 arch/sparc/kernel/ds.c static void __send_ds_nack(struct ds_info *dp, u64 handle) dp 981 arch/sparc/kernel/ds.c __ds_send(dp->lp, &nack, sizeof(nack)); dp 989 arch/sparc/kernel/ds.c struct ds_info *dp; dp 1007 arch/sparc/kernel/ds.c struct ds_info *dp = qp->dp; dp 1008 arch/sparc/kernel/ds.c struct ds_cap_state *cp = find_cap(dp, dpkt->handle); dp 1014 arch/sparc/kernel/ds.c dp->id, dpkt->handle); dp 1017 arch/sparc/kernel/ds.c __send_ds_nack(dp, dpkt->handle); dp 1020 arch/sparc/kernel/ds.c cp->data(dp, cp, dpkt, req_len); dp 1047 arch/sparc/kernel/ds.c static int ds_data(struct ds_info *dp, struct ds_msg_tag *pkt, int len) dp 1054 arch/sparc/kernel/ds.c __send_ds_nack(dp, dpkt->handle); dp 1056 arch/sparc/kernel/ds.c qp->dp = dp; dp 1064 arch/sparc/kernel/ds.c static void ds_up(struct ds_info *dp) dp 1066 arch/sparc/kernel/ds.c struct ldc_channel *lp = dp->lp; dp 1077 arch/sparc/kernel/ds.c dp->hs_state = DS_HS_START; dp 1080 arch/sparc/kernel/ds.c static void ds_reset(struct ds_info *dp) dp 1084 arch/sparc/kernel/ds.c dp->hs_state = 0; dp 1086 arch/sparc/kernel/ds.c for (i = 0; i < dp->num_ds_states; i++) { dp 1087 arch/sparc/kernel/ds.c struct ds_cap_state *cp = &dp->ds_states[i]; dp 1095 arch/sparc/kernel/ds.c struct ds_info *dp = arg; dp 1096 arch/sparc/kernel/ds.c struct ldc_channel *lp = dp->lp; dp 1103 arch/sparc/kernel/ds.c ds_up(dp); dp 1109 arch/sparc/kernel/ds.c ds_reset(dp); dp 1116 arch/sparc/kernel/ds.c dp->id, event); dp 1125 arch/sparc/kernel/ds.c err = ldc_read(lp, dp->rcv_buf, sizeof(*tag)); dp 1129 arch/sparc/kernel/ds.c ds_conn_reset(dp); dp 1135 arch/sparc/kernel/ds.c tag = dp->rcv_buf; dp 1140 arch/sparc/kernel/ds.c ds_conn_reset(dp); dp 1147 arch/sparc/kernel/ds.c err = ds_handshake(dp, dp->rcv_buf); dp 1149 arch/sparc/kernel/ds.c err = ds_data(dp, dp->rcv_buf, dp 1168 arch/sparc/kernel/ds.c struct ds_info *dp; dp 1175 arch/sparc/kernel/ds.c dp = kzalloc(sizeof(*dp), GFP_KERNEL); dp 1177 arch/sparc/kernel/ds.c if (!dp) dp 1183 arch/sparc/kernel/ds.c dp->id = *val; dp 1186 arch/sparc/kernel/ds.c dp->rcv_buf = kzalloc(4096, GFP_KERNEL); dp 1187 arch/sparc/kernel/ds.c if (!dp->rcv_buf) dp 1190 arch/sparc/kernel/ds.c dp->rcv_buf_len = 4096; dp 1192 arch/sparc/kernel/ds.c dp->ds_states = kmemdup(ds_states_template, dp 1194 arch/sparc/kernel/ds.c if (!dp->ds_states) dp 1197 arch/sparc/kernel/ds.c dp->num_ds_states = ARRAY_SIZE(ds_states_template); dp 1199 arch/sparc/kernel/ds.c for (i = 0; i < dp->num_ds_states; i++) dp 1200 arch/sparc/kernel/ds.c dp->ds_states[i].handle = ((u64)i << 32); dp 1205 arch/sparc/kernel/ds.c lp = ldc_alloc(vdev->channel_id, &ds_cfg, dp, "DS"); dp 1210 arch/sparc/kernel/ds.c dp->lp = lp; dp 1217 arch/sparc/kernel/ds.c dp->next = ds_info_list; dp 1218 arch/sparc/kernel/ds.c ds_info_list = dp; dp 1224 arch/sparc/kernel/ds.c ldc_free(dp->lp); dp 1227 arch/sparc/kernel/ds.c kfree(dp->ds_states); dp 1230 arch/sparc/kernel/ds.c kfree(dp->rcv_buf); dp 1233 arch/sparc/kernel/ds.c kfree(dp); dp 911 arch/sparc/kernel/irq_64.c struct device_node *dp; dp 915 arch/sparc/kernel/irq_64.c dp = of_find_node_by_path("/"); dp 916 arch/sparc/kernel/irq_64.c dp = dp->child; dp 917 arch/sparc/kernel/irq_64.c while (dp) { dp 918 arch/sparc/kernel/irq_64.c if (of_node_name_eq(dp, "counter-timer")) dp 920 arch/sparc/kernel/irq_64.c dp = dp->sibling; dp 926 arch/sparc/kernel/irq_64.c if (!dp) { dp 932 arch/sparc/kernel/irq_64.c addr = of_get_property(dp, "address", NULL); dp 278 arch/sparc/kernel/of_device_32.c struct device_node *dp = op->dev.of_node; dp 302 arch/sparc/kernel/of_device_32.c dp = pp; dp 303 arch/sparc/kernel/of_device_32.c pp = dp->parent; dp 310 arch/sparc/kernel/of_device_32.c pbus->count_cells(dp, &pna, &pns); dp 312 arch/sparc/kernel/of_device_32.c if (build_one_resource(dp, dbus, pbus, addr, dp 340 arch/sparc/kernel/of_device_32.c static struct platform_device * __init scan_one_device(struct device_node *dp, dp 354 arch/sparc/kernel/of_device_32.c op->dev.of_node = dp; dp 356 arch/sparc/kernel/of_device_32.c intr = of_get_property(dp, "intr", &len); dp 364 arch/sparc/kernel/of_device_32.c of_get_property(dp, "interrupts", &len); dp 383 arch/sparc/kernel/of_device_32.c dev_set_name(&op->dev, "%08x", dp->phandle); dp 389 arch/sparc/kernel/of_device_32.c printk("%pOF: Could not register of device.\n", dp); dp 397 arch/sparc/kernel/of_device_32.c static void __init scan_tree(struct device_node *dp, struct device *parent) dp 399 arch/sparc/kernel/of_device_32.c while (dp) { dp 400 arch/sparc/kernel/of_device_32.c struct platform_device *op = scan_one_device(dp, parent); dp 403 arch/sparc/kernel/of_device_32.c scan_tree(dp->child, &op->dev); dp 405 arch/sparc/kernel/of_device_32.c dp = dp->sibling; dp 356 arch/sparc/kernel/of_device_64.c struct device_node *dp = op->dev.of_node; dp 379 arch/sparc/kernel/of_device_64.c dp = pp; dp 380 arch/sparc/kernel/of_device_64.c pp = dp->parent; dp 387 arch/sparc/kernel/of_device_64.c pbus->count_cells(dp, &pna, &pns); dp 389 arch/sparc/kernel/of_device_64.c if (build_one_resource(dp, dbus, pbus, addr, dp 421 arch/sparc/kernel/of_device_64.c apply_interrupt_map(struct device_node *dp, struct device_node *pp, dp 433 arch/sparc/kernel/of_device_64.c bus->count_cells(dp, &na, NULL); dp 435 arch/sparc/kernel/of_device_64.c reg = of_get_property(dp, "reg", &num_reg); dp 480 arch/sparc/kernel/of_device_64.c static unsigned int __init pci_irq_swizzle(struct device_node *dp, dp 490 arch/sparc/kernel/of_device_64.c regs = of_get_property(dp, "reg", NULL); dp 538 arch/sparc/kernel/of_device_64.c struct device_node *dp = op->dev.of_node; dp 546 arch/sparc/kernel/of_device_64.c if (dp->irq_trans) { dp 547 arch/sparc/kernel/of_device_64.c irq = dp->irq_trans->irq_build(dp, irq, dp 548 arch/sparc/kernel/of_device_64.c dp->irq_trans->data); dp 552 arch/sparc/kernel/of_device_64.c dp, orig_irq, irq); dp 565 arch/sparc/kernel/of_device_64.c pp = dp->parent; dp 577 arch/sparc/kernel/of_device_64.c iret = apply_interrupt_map(dp, pp, dp 597 arch/sparc/kernel/of_device_64.c irq = pci_irq_swizzle(dp, pp, irq); dp 612 arch/sparc/kernel/of_device_64.c dp = pp; dp 625 arch/sparc/kernel/of_device_64.c nid = of_node_to_nid(dp); dp 636 arch/sparc/kernel/of_device_64.c static struct platform_device * __init scan_one_device(struct device_node *dp, dp 650 arch/sparc/kernel/of_device_64.c op->dev.of_node = dp; dp 652 arch/sparc/kernel/of_device_64.c irq = of_get_property(dp, "interrupts", &len); dp 660 arch/sparc/kernel/of_device_64.c dp, op->archdata.num_irqs, PROMINTR_MAX); dp 677 arch/sparc/kernel/of_device_64.c dev_set_name(&op->dev, "%08x", dp->phandle); dp 682 arch/sparc/kernel/of_device_64.c printk("%pOF: Could not register of device.\n", dp); dp 690 arch/sparc/kernel/of_device_64.c static void __init scan_tree(struct device_node *dp, struct device *parent) dp 692 arch/sparc/kernel/of_device_64.c while (dp) { dp 693 arch/sparc/kernel/of_device_64.c struct platform_device *op = scan_one_device(dp, parent); dp 696 arch/sparc/kernel/of_device_64.c scan_tree(dp->child, &op->dev); dp 698 arch/sparc/kernel/of_device_64.c dp = dp->sibling; dp 61 arch/sparc/kernel/of_device_common.c struct device_node *dp; dp 63 arch/sparc/kernel/of_device_common.c for (dp = bus_dp->child; dp; dp = dp->sibling) { dp 64 arch/sparc/kernel/of_device_common.c struct platform_device *op = of_find_device_by_node(dp); dp 71 arch/sparc/kernel/of_device_common.c if (dp->child) dp 76 arch/sparc/kernel/of_device_common.c static void get_cells(struct device_node *dp, int *addrc, int *sizec) dp 79 arch/sparc/kernel/of_device_common.c *addrc = of_n_addr_cells(dp); dp 81 arch/sparc/kernel/of_device_common.c *sizec = of_n_size_cells(dp); dp 152 arch/sparc/kernel/of_device_common.c struct device_node *dp = np; dp 154 arch/sparc/kernel/of_device_common.c while (dp) { dp 155 arch/sparc/kernel/of_device_common.c if (of_node_name_eq(dp, "sbus") || dp 156 arch/sparc/kernel/of_device_common.c of_node_name_eq(dp, "sbi")) dp 164 arch/sparc/kernel/of_device_common.c if (of_find_property(dp, "ranges", NULL) != NULL) dp 167 arch/sparc/kernel/of_device_common.c dp = dp->parent; dp 591 arch/sparc/kernel/pci.c struct device_node *dp; dp 594 arch/sparc/kernel/pci.c dp = pdev->dev.of_node; dp 596 arch/sparc/kernel/pci.c return snprintf (buf, PAGE_SIZE, "%pOF\n", dp); dp 417 arch/sparc/kernel/pci_fire.c struct device_node *dp = op->dev.of_node; dp 429 arch/sparc/kernel/pci_fire.c pbm->name = dp->full_name; dp 431 arch/sparc/kernel/pci_fire.c regs = of_get_property(dp, "reg", NULL); dp 461 arch/sparc/kernel/pci_fire.c struct device_node *dp = op->dev.of_node; dp 467 arch/sparc/kernel/pci_fire.c portid = of_getintprop_default(dp, "portid", 0xff); dp 511 arch/sparc/kernel/pci_psycho.c struct device_node *dp = op->dev.of_node; dp 517 arch/sparc/kernel/pci_psycho.c upa_portid = of_getintprop_default(dp, "upa-portid", 0xff); dp 540 arch/sparc/kernel/pci_psycho.c pr_regs = of_get_property(dp, "reg", NULL); dp 315 arch/sparc/kernel/pci_sabre.c struct device_node *dp = pbm->op->dev.of_node; dp 322 arch/sparc/kernel/pci_sabre.c dp = dp->parent; dp 324 arch/sparc/kernel/pci_sabre.c op = of_find_device_by_node(dp); dp 461 arch/sparc/kernel/pci_sabre.c struct device_node *dp = op->dev.of_node; dp 498 arch/sparc/kernel/pci_sabre.c upa_portid = of_getintprop_default(dp, "upa-portid", 0xff); dp 506 arch/sparc/kernel/pci_sabre.c pr_regs = of_get_property(dp, "reg", NULL); dp 536 arch/sparc/kernel/pci_sabre.c vdma = of_get_property(dp, "virtual-dma", NULL); dp 1316 arch/sparc/kernel/pci_schizo.c struct device_node *dp = op->dev.of_node; dp 1346 arch/sparc/kernel/pci_schizo.c regs = of_get_property(dp, "reg", NULL); dp 1362 arch/sparc/kernel/pci_schizo.c pbm->chip_version = of_getintprop_default(dp, "version#", 0); dp 1363 arch/sparc/kernel/pci_schizo.c pbm->chip_revision = of_getintprop_default(dp, "module-version#", 0); dp 1371 arch/sparc/kernel/pci_schizo.c pbm->name = dp->full_name; dp 1417 arch/sparc/kernel/pci_schizo.c struct device_node *dp = op->dev.of_node; dp 1423 arch/sparc/kernel/pci_schizo.c portid = of_getintprop_default(dp, "portid", 0xff); dp 700 arch/sparc/kernel/pci_sun4v.c struct device_node *dp; dp 702 arch/sparc/kernel/pci_sun4v.c dp = pbm->op->dev.of_node; dp 703 arch/sparc/kernel/pci_sun4v.c prop = of_find_property(dp, "66mhz-capable", NULL); dp 1173 arch/sparc/kernel/pci_sun4v.c struct device_node *dp = op->dev.of_node; dp 1176 arch/sparc/kernel/pci_sun4v.c pbm->numa_node = of_node_to_nid(dp); dp 1187 arch/sparc/kernel/pci_sun4v.c pbm->name = dp->full_name; dp 1227 arch/sparc/kernel/pci_sun4v.c struct device_node *dp; dp 1234 arch/sparc/kernel/pci_sun4v.c dp = op->dev.of_node; dp 1268 arch/sparc/kernel/pci_sun4v.c regs = of_get_property(dp, "reg", NULL); dp 27 arch/sparc/kernel/power.c static int has_button_interrupt(unsigned int irq, struct device_node *dp) dp 31 arch/sparc/kernel/power.c if (!of_find_property(dp, "button", NULL)) dp 57 arch/sparc/kernel/prom_32.c static void __init sparc32_path_component(struct device_node *dp, char *tmp_buf) dp 59 arch/sparc/kernel/prom_32.c const char *name = of_get_property(dp, "name", NULL); dp 63 arch/sparc/kernel/prom_32.c rprop = of_find_property(dp, "reg", NULL); dp 74 arch/sparc/kernel/prom_32.c static void __init sbus_path_component(struct device_node *dp, char *tmp_buf) dp 76 arch/sparc/kernel/prom_32.c const char *name = of_get_property(dp, "name", NULL); dp 80 arch/sparc/kernel/prom_32.c prop = of_find_property(dp, "reg", NULL); dp 92 arch/sparc/kernel/prom_32.c static void __init pci_path_component(struct device_node *dp, char *tmp_buf) dp 94 arch/sparc/kernel/prom_32.c const char *name = of_get_property(dp, "name", NULL); dp 99 arch/sparc/kernel/prom_32.c prop = of_find_property(dp, "reg", NULL); dp 118 arch/sparc/kernel/prom_32.c static void __init ebus_path_component(struct device_node *dp, char *tmp_buf) dp 120 arch/sparc/kernel/prom_32.c const char *name = of_get_property(dp, "name", NULL); dp 124 arch/sparc/kernel/prom_32.c prop = of_find_property(dp, "reg", NULL); dp 136 arch/sparc/kernel/prom_32.c static void __init ambapp_path_component(struct device_node *dp, char *tmp_buf) dp 138 arch/sparc/kernel/prom_32.c const char *name = of_get_property(dp, "name", NULL); dp 147 arch/sparc/kernel/prom_32.c prop = of_find_property(dp, "reg", NULL); dp 149 arch/sparc/kernel/prom_32.c reg0 = (unsigned int)dp->phandle; dp 156 arch/sparc/kernel/prom_32.c prop = of_find_property(dp, "interrupts", NULL); dp 162 arch/sparc/kernel/prom_32.c prop = of_find_property(dp, "vendor", NULL); dp 166 arch/sparc/kernel/prom_32.c prop = of_find_property(dp, "device", NULL); dp 176 arch/sparc/kernel/prom_32.c static void __init __build_path_component(struct device_node *dp, char *tmp_buf) dp 178 arch/sparc/kernel/prom_32.c struct device_node *parent = dp->parent; dp 183 arch/sparc/kernel/prom_32.c return pci_path_component(dp, tmp_buf); dp 185 arch/sparc/kernel/prom_32.c return sbus_path_component(dp, tmp_buf); dp 187 arch/sparc/kernel/prom_32.c return ebus_path_component(dp, tmp_buf); dp 189 arch/sparc/kernel/prom_32.c return ambapp_path_component(dp, tmp_buf); dp 195 arch/sparc/kernel/prom_32.c return sparc32_path_component(dp, tmp_buf); dp 198 arch/sparc/kernel/prom_32.c char * __init build_path_component(struct device_node *dp) dp 200 arch/sparc/kernel/prom_32.c const char *name = of_get_property(dp, "name", NULL); dp 204 arch/sparc/kernel/prom_32.c __build_path_component(dp, tmp_buf); dp 219 arch/sparc/kernel/prom_32.c struct device_node *dp; dp 250 arch/sparc/kernel/prom_32.c for_each_node_by_type(dp, type) { dp 254 arch/sparc/kernel/prom_32.c if (!dp) { dp 258 arch/sparc/kernel/prom_32.c of_console_device = dp; dp 260 arch/sparc/kernel/prom_32.c sprintf(of_console_path, "%pOF", dp); dp 282 arch/sparc/kernel/prom_32.c dp = of_find_node_by_phandle(node); dp 284 arch/sparc/kernel/prom_32.c if (!of_node_is_type(dp, "display") && dp 285 arch/sparc/kernel/prom_32.c !of_node_is_type(dp, "serial")) { dp 291 arch/sparc/kernel/prom_32.c of_console_device = dp; dp 294 arch/sparc/kernel/prom_32.c sprintf(of_console_path, "%pOF", dp); dp 306 arch/sparc/kernel/prom_32.c dp = of_find_node_by_path("/"); dp 307 arch/sparc/kernel/prom_32.c path = of_get_property(dp, "stdout-path", NULL); dp 331 arch/sparc/kernel/prom_32.c void __init irq_trans_init(struct device_node *dp) dp 66 arch/sparc/kernel/prom_64.c static void __init sun4v_path_component(struct device_node *dp, char *tmp_buf) dp 68 arch/sparc/kernel/prom_64.c const char *name = of_get_property(dp, "name", NULL); dp 73 arch/sparc/kernel/prom_64.c rprop = of_find_property(dp, "reg", NULL); dp 78 arch/sparc/kernel/prom_64.c if (!of_node_is_root(dp->parent)) { dp 108 arch/sparc/kernel/prom_64.c static void __init sun4u_path_component(struct device_node *dp, char *tmp_buf) dp 110 arch/sparc/kernel/prom_64.c const char *name = of_get_property(dp, "name", NULL); dp 114 arch/sparc/kernel/prom_64.c prop = of_find_property(dp, "reg", NULL); dp 119 arch/sparc/kernel/prom_64.c if (!of_node_is_root(dp->parent)) { dp 127 arch/sparc/kernel/prom_64.c prop = of_find_property(dp, "upa-portid", NULL); dp 129 arch/sparc/kernel/prom_64.c prop = of_find_property(dp, "portid", NULL); dp 144 arch/sparc/kernel/prom_64.c static void __init sbus_path_component(struct device_node *dp, char *tmp_buf) dp 146 arch/sparc/kernel/prom_64.c const char *name = of_get_property(dp, "name", NULL); dp 150 arch/sparc/kernel/prom_64.c prop = of_find_property(dp, "reg", NULL); dp 162 arch/sparc/kernel/prom_64.c static void __init pci_path_component(struct device_node *dp, char *tmp_buf) dp 164 arch/sparc/kernel/prom_64.c const char *name = of_get_property(dp, "name", NULL); dp 169 arch/sparc/kernel/prom_64.c prop = of_find_property(dp, "reg", NULL); dp 188 arch/sparc/kernel/prom_64.c static void __init upa_path_component(struct device_node *dp, char *tmp_buf) dp 190 arch/sparc/kernel/prom_64.c const char *name = of_get_property(dp, "name", NULL); dp 194 arch/sparc/kernel/prom_64.c prop = of_find_property(dp, "reg", NULL); dp 200 arch/sparc/kernel/prom_64.c prop = of_find_property(dp, "upa-portid", NULL); dp 211 arch/sparc/kernel/prom_64.c static void __init vdev_path_component(struct device_node *dp, char *tmp_buf) dp 213 arch/sparc/kernel/prom_64.c const char *name = of_get_property(dp, "name", NULL); dp 217 arch/sparc/kernel/prom_64.c prop = of_find_property(dp, "reg", NULL); dp 227 arch/sparc/kernel/prom_64.c static void __init ebus_path_component(struct device_node *dp, char *tmp_buf) dp 229 arch/sparc/kernel/prom_64.c const char *name = of_get_property(dp, "name", NULL); dp 233 arch/sparc/kernel/prom_64.c prop = of_find_property(dp, "reg", NULL); dp 246 arch/sparc/kernel/prom_64.c static void __init i2c_path_component(struct device_node *dp, char *tmp_buf) dp 248 arch/sparc/kernel/prom_64.c const char *name = of_get_property(dp, "name", NULL); dp 252 arch/sparc/kernel/prom_64.c prop = of_find_property(dp, "reg", NULL); dp 266 arch/sparc/kernel/prom_64.c static void __init usb_path_component(struct device_node *dp, char *tmp_buf) dp 268 arch/sparc/kernel/prom_64.c const char *name = of_get_property(dp, "name", NULL); dp 272 arch/sparc/kernel/prom_64.c prop = of_find_property(dp, "reg", NULL); dp 288 arch/sparc/kernel/prom_64.c static void __init ieee1394_path_component(struct device_node *dp, char *tmp_buf) dp 290 arch/sparc/kernel/prom_64.c const char *name = of_get_property(dp, "name", NULL); dp 294 arch/sparc/kernel/prom_64.c prop = of_find_property(dp, "reg", NULL); dp 309 arch/sparc/kernel/prom_64.c static void __init __build_path_component(struct device_node *dp, char *tmp_buf) dp 311 arch/sparc/kernel/prom_64.c struct device_node *parent = dp->parent; dp 316 arch/sparc/kernel/prom_64.c pci_path_component(dp, tmp_buf); dp 320 arch/sparc/kernel/prom_64.c sbus_path_component(dp, tmp_buf); dp 324 arch/sparc/kernel/prom_64.c upa_path_component(dp, tmp_buf); dp 328 arch/sparc/kernel/prom_64.c ebus_path_component(dp, tmp_buf); dp 333 arch/sparc/kernel/prom_64.c usb_path_component(dp, tmp_buf); dp 337 arch/sparc/kernel/prom_64.c i2c_path_component(dp, tmp_buf); dp 341 arch/sparc/kernel/prom_64.c ieee1394_path_component(dp, tmp_buf); dp 345 arch/sparc/kernel/prom_64.c vdev_path_component(dp, tmp_buf); dp 353 arch/sparc/kernel/prom_64.c sun4v_path_component(dp, tmp_buf); dp 356 arch/sparc/kernel/prom_64.c sun4u_path_component(dp, tmp_buf); dp 360 arch/sparc/kernel/prom_64.c char * __init build_path_component(struct device_node *dp) dp 362 arch/sparc/kernel/prom_64.c const char *name = of_get_property(dp, "name", NULL); dp 366 arch/sparc/kernel/prom_64.c __build_path_component(dp, tmp_buf); dp 436 arch/sparc/kernel/prom_64.c struct device_node *dp; dp 440 arch/sparc/kernel/prom_64.c for_each_node_by_type(dp, "cpu") { dp 441 arch/sparc/kernel/prom_64.c int cpuid = of_getintprop_default(dp, mid_prop, -1); dp 447 arch/sparc/kernel/prom_64.c cpuid = of_getintprop_default(dp, this_mid_prop, -1); dp 462 arch/sparc/kernel/prom_64.c ret = func(dp, cpuid, arg); dp 469 arch/sparc/kernel/prom_64.c static void *check_cpu_node(struct device_node *dp, int cpuid, int id) dp 472 arch/sparc/kernel/prom_64.c return dp; dp 481 arch/sparc/kernel/prom_64.c static void *record_one_cpu(struct device_node *dp, int cpuid, int arg) dp 500 arch/sparc/kernel/prom_64.c static void *fill_in_one_cpu(struct device_node *dp, int cpuid, int arg) dp 505 arch/sparc/kernel/prom_64.c if (of_find_property(dp, "cpuid", NULL)) { dp 508 arch/sparc/kernel/prom_64.c portid_parent = dp; dp 531 arch/sparc/kernel/prom_64.c of_getintprop_default(dp, "clock-frequency", 0); dp 535 arch/sparc/kernel/prom_64.c of_getintprop_default(dp, "l1-dcache-size", dp 538 arch/sparc/kernel/prom_64.c of_getintprop_default(dp, "l1-dcache-line-size", dp 541 arch/sparc/kernel/prom_64.c of_getintprop_default(dp, "l1-icache-size", dp 544 arch/sparc/kernel/prom_64.c of_getintprop_default(dp, "l1-icache-line-size", dp 547 arch/sparc/kernel/prom_64.c of_getintprop_default(dp, "l2-cache-size", 0); dp 549 arch/sparc/kernel/prom_64.c of_getintprop_default(dp, "l2-cache-line-size", 0); dp 565 arch/sparc/kernel/prom_64.c of_getintprop_default(dp, "dcache-size", 16 * 1024); dp 567 arch/sparc/kernel/prom_64.c of_getintprop_default(dp, "dcache-line-size", 32); dp 570 arch/sparc/kernel/prom_64.c of_getintprop_default(dp, "icache-size", 16 * 1024); dp 572 arch/sparc/kernel/prom_64.c of_getintprop_default(dp, "icache-line-size", 32); dp 575 arch/sparc/kernel/prom_64.c of_getintprop_default(dp, "ecache-size", dp 578 arch/sparc/kernel/prom_64.c of_getintprop_default(dp, "ecache-line-size", 64); dp 600 arch/sparc/kernel/prom_64.c struct device_node *dp; dp 622 arch/sparc/kernel/prom_64.c dp = of_find_node_by_phandle(node); dp 624 arch/sparc/kernel/prom_64.c if (!of_node_is_type(dp, "display") && !of_node_is_type(dp, "serial")) { dp 630 arch/sparc/kernel/prom_64.c of_console_device = dp; dp 50 arch/sparc/kernel/prom_common.c int of_set_property(struct device_node *dp, const char *name, void *val, int len) dp 65 arch/sparc/kernel/prom_common.c prevp = &dp->properties; dp 73 arch/sparc/kernel/prom_common.c ret = prom_setprop(dp->phandle, name, val, len); dp 43 arch/sparc/kernel/prom_irqtrans.c static unsigned int psycho_irq_build(struct device_node *dp, dp 73 arch/sparc/kernel/prom_irqtrans.c static void __init psycho_irq_trans_init(struct device_node *dp) dp 77 arch/sparc/kernel/prom_irqtrans.c dp->irq_trans = prom_early_alloc(sizeof(struct of_irq_controller)); dp 78 arch/sparc/kernel/prom_irqtrans.c dp->irq_trans->irq_build = psycho_irq_build; dp 80 arch/sparc/kernel/prom_irqtrans.c regs = of_get_property(dp, "reg", NULL); dp 81 arch/sparc/kernel/prom_irqtrans.c dp->irq_trans->data = (void *) regs[2].phys_addr; dp 182 arch/sparc/kernel/prom_irqtrans.c static int sabre_device_needs_wsync(struct device_node *dp) dp 184 arch/sparc/kernel/prom_irqtrans.c struct device_node *parent = dp->parent; dp 221 arch/sparc/kernel/prom_irqtrans.c static unsigned int sabre_irq_build(struct device_node *dp, dp 258 arch/sparc/kernel/prom_irqtrans.c regs = of_get_property(dp, "reg", NULL); dp 259 arch/sparc/kernel/prom_irqtrans.c if (regs && sabre_device_needs_wsync(dp)) { dp 269 arch/sparc/kernel/prom_irqtrans.c static void __init sabre_irq_trans_init(struct device_node *dp) dp 275 arch/sparc/kernel/prom_irqtrans.c dp->irq_trans = prom_early_alloc(sizeof(struct of_irq_controller)); dp 276 arch/sparc/kernel/prom_irqtrans.c dp->irq_trans->irq_build = sabre_irq_build; dp 280 arch/sparc/kernel/prom_irqtrans.c regs = of_get_property(dp, "reg", NULL); dp 283 arch/sparc/kernel/prom_irqtrans.c busrange = of_get_property(dp, "bus-range", NULL); dp 286 arch/sparc/kernel/prom_irqtrans.c dp->irq_trans->data = irq_data; dp 378 arch/sparc/kernel/prom_irqtrans.c static unsigned int schizo_irq_build(struct device_node *dp, dp 426 arch/sparc/kernel/prom_irqtrans.c static void __init __schizo_irq_trans_init(struct device_node *dp, dp 432 arch/sparc/kernel/prom_irqtrans.c dp->irq_trans = prom_early_alloc(sizeof(struct of_irq_controller)); dp 433 arch/sparc/kernel/prom_irqtrans.c dp->irq_trans->irq_build = schizo_irq_build; dp 437 arch/sparc/kernel/prom_irqtrans.c regs = of_get_property(dp, "reg", NULL); dp 438 arch/sparc/kernel/prom_irqtrans.c dp->irq_trans->data = irq_data; dp 445 arch/sparc/kernel/prom_irqtrans.c irq_data->portid = of_getintprop_default(dp, "portid", 0); dp 446 arch/sparc/kernel/prom_irqtrans.c irq_data->chip_version = of_getintprop_default(dp, "version#", 0); dp 449 arch/sparc/kernel/prom_irqtrans.c static void __init schizo_irq_trans_init(struct device_node *dp) dp 451 arch/sparc/kernel/prom_irqtrans.c __schizo_irq_trans_init(dp, 0); dp 454 arch/sparc/kernel/prom_irqtrans.c static void __init tomatillo_irq_trans_init(struct device_node *dp) dp 456 arch/sparc/kernel/prom_irqtrans.c __schizo_irq_trans_init(dp, 1); dp 459 arch/sparc/kernel/prom_irqtrans.c static unsigned int pci_sun4v_irq_build(struct device_node *dp, dp 468 arch/sparc/kernel/prom_irqtrans.c static void __init pci_sun4v_irq_trans_init(struct device_node *dp) dp 472 arch/sparc/kernel/prom_irqtrans.c dp->irq_trans = prom_early_alloc(sizeof(struct of_irq_controller)); dp 473 arch/sparc/kernel/prom_irqtrans.c dp->irq_trans->irq_build = pci_sun4v_irq_build; dp 475 arch/sparc/kernel/prom_irqtrans.c regs = of_get_property(dp, "reg", NULL); dp 476 arch/sparc/kernel/prom_irqtrans.c dp->irq_trans->data = (void *) (unsigned long) dp 510 arch/sparc/kernel/prom_irqtrans.c static unsigned int fire_irq_build(struct device_node *dp, dp 542 arch/sparc/kernel/prom_irqtrans.c static void __init fire_irq_trans_init(struct device_node *dp) dp 547 arch/sparc/kernel/prom_irqtrans.c dp->irq_trans = prom_early_alloc(sizeof(struct of_irq_controller)); dp 548 arch/sparc/kernel/prom_irqtrans.c dp->irq_trans->irq_build = fire_irq_build; dp 552 arch/sparc/kernel/prom_irqtrans.c regs = of_get_property(dp, "reg", NULL); dp 553 arch/sparc/kernel/prom_irqtrans.c dp->irq_trans->data = irq_data; dp 556 arch/sparc/kernel/prom_irqtrans.c irq_data->portid = of_getintprop_default(dp, "portid", 0); dp 648 arch/sparc/kernel/prom_irqtrans.c static unsigned int sbus_of_build_irq(struct device_node *dp, dp 660 arch/sparc/kernel/prom_irqtrans.c regs = of_get_property(dp, "reg", NULL); dp 705 arch/sparc/kernel/prom_irqtrans.c static void __init sbus_irq_trans_init(struct device_node *dp) dp 709 arch/sparc/kernel/prom_irqtrans.c dp->irq_trans = prom_early_alloc(sizeof(struct of_irq_controller)); dp 710 arch/sparc/kernel/prom_irqtrans.c dp->irq_trans->irq_build = sbus_of_build_irq; dp 712 arch/sparc/kernel/prom_irqtrans.c regs = of_get_property(dp, "reg", NULL); dp 713 arch/sparc/kernel/prom_irqtrans.c dp->irq_trans->data = (void *) (unsigned long) regs->phys_addr; dp 718 arch/sparc/kernel/prom_irqtrans.c static unsigned int central_build_irq(struct device_node *dp, dp 728 arch/sparc/kernel/prom_irqtrans.c if (of_node_name_eq(dp, "eeprom")) { dp 730 arch/sparc/kernel/prom_irqtrans.c } else if (of_node_name_eq(dp, "zs")) { dp 732 arch/sparc/kernel/prom_irqtrans.c } else if (of_node_name_eq(dp, "clock-board")) { dp 752 arch/sparc/kernel/prom_irqtrans.c static void __init central_irq_trans_init(struct device_node *dp) dp 754 arch/sparc/kernel/prom_irqtrans.c dp->irq_trans = prom_early_alloc(sizeof(struct of_irq_controller)); dp 755 arch/sparc/kernel/prom_irqtrans.c dp->irq_trans->irq_build = central_build_irq; dp 757 arch/sparc/kernel/prom_irqtrans.c dp->irq_trans->data = dp; dp 783 arch/sparc/kernel/prom_irqtrans.c static unsigned int sun4v_vdev_irq_build(struct device_node *dp, dp 792 arch/sparc/kernel/prom_irqtrans.c static void __init sun4v_vdev_irq_trans_init(struct device_node *dp) dp 796 arch/sparc/kernel/prom_irqtrans.c dp->irq_trans = prom_early_alloc(sizeof(struct of_irq_controller)); dp 797 arch/sparc/kernel/prom_irqtrans.c dp->irq_trans->irq_build = sun4v_vdev_irq_build; dp 799 arch/sparc/kernel/prom_irqtrans.c regs = of_get_property(dp, "reg", NULL); dp 800 arch/sparc/kernel/prom_irqtrans.c dp->irq_trans->data = (void *) (unsigned long) dp 804 arch/sparc/kernel/prom_irqtrans.c void __init irq_trans_init(struct device_node *dp) dp 812 arch/sparc/kernel/prom_irqtrans.c model = of_get_property(dp, "model", NULL); dp 814 arch/sparc/kernel/prom_irqtrans.c model = of_get_property(dp, "compatible", NULL); dp 820 arch/sparc/kernel/prom_irqtrans.c t->init(dp); dp 827 arch/sparc/kernel/prom_irqtrans.c if (of_node_name_eq(dp, "sbus") || dp 828 arch/sparc/kernel/prom_irqtrans.c of_node_name_eq(dp, "sbi")) { dp 829 arch/sparc/kernel/prom_irqtrans.c sbus_irq_trans_init(dp); dp 833 arch/sparc/kernel/prom_irqtrans.c if (of_node_name_eq(dp, "fhc") && dp 834 arch/sparc/kernel/prom_irqtrans.c of_node_name_eq(dp->parent, "central")) { dp 835 arch/sparc/kernel/prom_irqtrans.c central_irq_trans_init(dp); dp 838 arch/sparc/kernel/prom_irqtrans.c if (of_node_name_eq(dp, "virtual-devices") || dp 839 arch/sparc/kernel/prom_irqtrans.c of_node_name_eq(dp, "niu")) { dp 840 arch/sparc/kernel/prom_irqtrans.c sun4v_vdev_irq_trans_init(dp); dp 455 arch/sparc/kernel/psycho_common.c struct device_node *dp = op->dev.of_node; dp 457 arch/sparc/kernel/psycho_common.c pbm->name = dp->full_name; dp 460 arch/sparc/kernel/psycho_common.c pbm->chip_version = of_getintprop_default(dp, "version#", 0); dp 461 arch/sparc/kernel/psycho_common.c pbm->chip_revision = of_getintprop_default(dp, "module-revision#", 0); dp 543 arch/sparc/kernel/sbus.c struct device_node *dp = op->dev.of_node; dp 550 arch/sparc/kernel/sbus.c pr = of_get_property(dp, "reg", NULL); dp 666 arch/sparc/kernel/sbus.c struct device_node *dp; dp 668 arch/sparc/kernel/sbus.c for_each_node_by_name(dp, "sbus") { dp 669 arch/sparc/kernel/sbus.c struct platform_device *op = of_find_device_by_node(dp); dp 375 arch/sparc/kernel/smp_64.c struct device_node *dp = of_find_node_by_cpuid(cpu); dp 377 arch/sparc/kernel/smp_64.c prom_startcpu(dp->phandle, entry, cookie); dp 252 arch/sparc/kernel/sun4d_irq.c struct device_node *dp; dp 258 arch/sparc/kernel/sun4d_irq.c for_each_node_by_name(dp, "sbi") { dp 259 arch/sparc/kernel/sun4d_irq.c int devid = of_getintprop_default(dp, "device-id", 0); dp 260 arch/sparc/kernel/sun4d_irq.c int board = of_getintprop_default(dp, "board#", 0); dp 327 arch/sparc/kernel/sun4d_irq.c struct device_node *dp = op->dev.of_node; dp 328 arch/sparc/kernel/sun4d_irq.c struct device_node *board_parent, *bus = dp->parent; dp 353 arch/sparc/kernel/sun4d_irq.c regs = of_get_property(dp, "reg", NULL); dp 420 arch/sparc/kernel/sun4d_irq.c struct device_node *dp; dp 427 arch/sparc/kernel/sun4d_irq.c dp = of_find_node_by_name(NULL, "cpu-unit"); dp 428 arch/sparc/kernel/sun4d_irq.c if (!dp) { dp 437 arch/sparc/kernel/sun4d_irq.c reg = of_get_property(dp, "reg", NULL); dp 443 arch/sparc/kernel/sun4d_irq.c board = of_getintprop_default(dp, "board#", -1); dp 449 arch/sparc/kernel/sun4d_irq.c of_node_put(dp); dp 486 arch/sparc/kernel/sun4d_irq.c struct device_node *dp; dp 490 arch/sparc/kernel/sun4d_irq.c for_each_node_by_name(dp, "sbi") { dp 491 arch/sparc/kernel/sun4d_irq.c int devid = of_getintprop_default(dp, "device-id", 0); dp 492 arch/sparc/kernel/sun4d_irq.c int board = of_getintprop_default(dp, "board#", 0); dp 361 arch/sparc/kernel/sun4m_irq.c struct device_node *dp = of_find_node_by_name(NULL, "counter"); dp 366 arch/sparc/kernel/sun4m_irq.c if (!dp) { dp 371 arch/sparc/kernel/sun4m_irq.c addr = of_get_property(dp, "address", &len); dp 372 arch/sparc/kernel/sun4m_irq.c of_node_put(dp); dp 438 arch/sparc/kernel/sun4m_irq.c struct device_node *dp = of_find_node_by_name(NULL, "interrupt"); dp 442 arch/sparc/kernel/sun4m_irq.c if (!dp) { dp 447 arch/sparc/kernel/sun4m_irq.c addr = of_get_property(dp, "address", &len); dp 448 arch/sparc/kernel/sun4m_irq.c of_node_put(dp); dp 273 arch/sparc/kernel/time_32.c struct device_node *dp = op->dev.of_node; dp 274 arch/sparc/kernel/time_32.c const char *model = of_get_property(dp, "model", NULL); dp 280 arch/sparc/kernel/time_32.c if (!of_find_property(dp, "address", NULL)) dp 559 arch/sparc/kernel/time_64.c struct device_node *dp = op->dev.of_node; dp 564 arch/sparc/kernel/time_64.c if (of_node_name_eq(dp->parent, "fhc") && dp 565 arch/sparc/kernel/time_64.c !of_node_name_eq(dp->parent->parent, "central")) dp 569 arch/sparc/kernel/time_64.c dp, op->resource[0].start); dp 191 arch/sparc/kernel/vio.c struct device_node *dp; dp 194 arch/sparc/kernel/vio.c dp = vdev->dp; dp 196 arch/sparc/kernel/vio.c return snprintf (buf, PAGE_SIZE, "%pOF\n", dp); dp 297 arch/sparc/kernel/vio.c struct device_node *dp; dp 367 arch/sparc/kernel/vio.c dp = cdev_node; dp 369 arch/sparc/kernel/vio.c for_each_child_of_node(cdev_node, dp) { dp 370 arch/sparc/kernel/vio.c if (of_node_is_type(dp, type)) dp 374 arch/sparc/kernel/vio.c dp = to_vio_dev(parent)->dp; dp 376 arch/sparc/kernel/vio.c vdev->dp = dp; dp 406 arch/sparc/kernel/vio.c if (vdev->dp) dp 1184 arch/sparc/mm/init_64.c int of_node_to_nid(struct device_node *dp) dp 1199 arch/sparc/mm/init_64.c regs = of_get_property(dp, "reg", NULL); dp 78 arch/sparc/mm/io-unit.c struct device_node *dp; dp 80 arch/sparc/mm/io-unit.c for_each_node_by_name(dp, "sbi") { dp 81 arch/sparc/mm/io-unit.c struct platform_device *op = of_find_device_by_node(dp); dp 136 arch/sparc/mm/iommu.c struct device_node *dp; dp 138 arch/sparc/mm/iommu.c for_each_node_by_name(dp, "iommu") { dp 139 arch/sparc/mm/iommu.c struct platform_device *op = of_find_device_by_node(dp); dp 250 arch/sparc/vdso/vma.c struct page *dp, **dpp = NULL; dp 296 arch/sparc/vdso/vma.c dp = alloc_page(GFP_KERNEL); dp 297 arch/sparc/vdso/vma.c if (!dp) dp 300 arch/sparc/vdso/vma.c dpp[0] = dp; dp 301 arch/sparc/vdso/vma.c vvar_data = page_address(dp); dp 785 arch/x86/kernel/cpu/cacheinfo.c unsigned char *dp = (unsigned char *)regs; dp 804 arch/x86/kernel/cpu/cacheinfo.c unsigned char des = dp[j]; dp 324 block/genhd.c struct blk_major_name *dp; dp 327 block/genhd.c for (dp = major_names[major_to_index(offset)]; dp; dp = dp->next) dp 328 block/genhd.c if (dp->major == offset) dp 329 block/genhd.c seq_printf(seqf, "%3d %s\n", dp->major, dp->name); dp 189 crypto/async_tx/async_raid6_recov.c struct page *p, *q, *g, *dp, *dq; dp 218 crypto/async_tx/async_raid6_recov.c dp = blocks[faila]; dp 222 crypto/async_tx/async_raid6_recov.c tx = async_memcpy(dp, g, 0, 0, bytes, submit); dp 227 crypto/async_tx/async_raid6_recov.c srcs[0] = dp; dp 231 crypto/async_tx/async_raid6_recov.c tx = async_xor(dp, srcs, 0, 2, bytes, submit); dp 241 crypto/async_tx/async_raid6_recov.c srcs[0] = dp; dp 249 crypto/async_tx/async_raid6_recov.c srcs[0] = dp; dp 253 crypto/async_tx/async_raid6_recov.c tx = async_xor(dp, srcs, 0, 2, bytes, submit); dp 263 crypto/async_tx/async_raid6_recov.c struct page *p, *q, *dp, *dq; dp 278 crypto/async_tx/async_raid6_recov.c dp = blocks[faila]; dp 280 crypto/async_tx/async_raid6_recov.c blocks[disks-2] = dp; dp 289 crypto/async_tx/async_raid6_recov.c blocks[faila] = dp; dp 295 crypto/async_tx/async_raid6_recov.c srcs[0] = dp; dp 299 crypto/async_tx/async_raid6_recov.c tx = async_xor(dp, srcs, 0, 2, bytes, submit); dp 309 crypto/async_tx/async_raid6_recov.c srcs[0] = dp; dp 317 crypto/async_tx/async_raid6_recov.c srcs[0] = dp; dp 321 crypto/async_tx/async_raid6_recov.c tx = async_xor(dp, srcs, 0, 2, bytes, submit); dp 115 crypto/rsa_helper.c key->dp = value; dp 110 drivers/ata/pata_atp867x.c struct atp867x_priv *dp = ap->private_data; dp 122 drivers/ata/pata_atp867x.c if (dp->pci66mhz && mode > ATP867X_IO_DMAMODE_UDMA_0 && dp 127 drivers/ata/pata_atp867x.c b = ioread8(dp->dma_mode); dp 135 drivers/ata/pata_atp867x.c iowrite8(b, dp->dma_mode); dp 141 drivers/ata/pata_atp867x.c struct atp867x_priv *dp = ap->private_data; dp 148 drivers/ata/pata_atp867x.c if (dp->pci66mhz) dp 205 drivers/ata/pata_atp867x.c struct atp867x_priv *dp = ap->private_data; dp 220 drivers/ata/pata_atp867x.c b = ioread8(dp->dma_mode); dp 225 drivers/ata/pata_atp867x.c iowrite8(b, dp->dma_mode); dp 231 drivers/ata/pata_atp867x.c iowrite8(b, dp->slave_piospd); dp 233 drivers/ata/pata_atp867x.c iowrite8(b, dp->mstr_piospd); dp 238 drivers/ata/pata_atp867x.c iowrite8(b, dp->eightb_piospd); dp 291 drivers/ata/pata_atp867x.c struct atp867x_priv *dp = ap->private_data; dp 329 drivers/ata/pata_atp867x.c (unsigned long long)dp->dma_mode, dp 330 drivers/ata/pata_atp867x.c (unsigned long long)dp->mstr_piospd, dp 331 drivers/ata/pata_atp867x.c (unsigned long long)dp->slave_piospd, dp 332 drivers/ata/pata_atp867x.c (unsigned long long)dp->eightb_piospd, dp 333 drivers/ata/pata_atp867x.c (unsigned long)dp->pci66mhz); dp 340 drivers/ata/pata_atp867x.c struct atp867x_priv *dp; dp 343 drivers/ata/pata_atp867x.c dp = ap->private_data = dp 344 drivers/ata/pata_atp867x.c devm_kzalloc(&pdev->dev, sizeof(*dp), GFP_KERNEL); dp 345 drivers/ata/pata_atp867x.c if (dp == NULL) dp 348 drivers/ata/pata_atp867x.c dp->dma_mode = ATP867X_IO_DMAMODE(ap, port); dp 349 drivers/ata/pata_atp867x.c dp->mstr_piospd = ATP867X_IO_MSTRPIOSPD(ap, port); dp 350 drivers/ata/pata_atp867x.c dp->slave_piospd = ATP867X_IO_SLAVPIOSPD(ap, port); dp 351 drivers/ata/pata_atp867x.c dp->eightb_piospd = ATP867X_IO_8BPIOSPD(ap, port); dp 353 drivers/ata/pata_atp867x.c dp->pci66mhz = dp 484 drivers/block/drbd/drbd_debugfs.c static void drbd_debugfs_remove(struct dentry **dp) dp 486 drivers/block/drbd/drbd_debugfs.c debugfs_remove(*dp); dp 487 drivers/block/drbd/drbd_debugfs.c *dp = NULL; dp 1103 drivers/block/drbd/drbd_int.h struct p_data *dp, int data_size); dp 1366 drivers/block/drbd/drbd_main.c struct p_data *dp, int data_size) dp 1370 drivers/block/drbd/drbd_main.c _drbd_send_ack(peer_device, cmd, dp->sector, cpu_to_be32(data_size), dp 1371 drivers/block/drbd/drbd_main.c dp->block_id); dp 3418 drivers/block/floppy.c struct floppy_drive_params dp; dp 3532 drivers/block/floppy.c if (!valid_floppy_drive_params(inparam.dp.autodetect, dp 3533 drivers/block/floppy.c inparam.dp.native_format)) dp 3535 drivers/block/floppy.c *UDP = inparam.dp; dp 864 drivers/char/rtc.c struct device_node *dp; dp 865 drivers/char/rtc.c for_each_child_of_node(ebus_dp, dp) { dp 866 drivers/char/rtc.c if (of_node_name_eq(dp, "rtc")) { dp 867 drivers/char/rtc.c op = of_find_device_by_node(dp); dp 555 drivers/crypto/caam/caampkc.c pdb->dp_dma = dma_map_single(dev, key->dp, p_sz, DMA_TO_DEVICE); dp 797 drivers/crypto/caam/caampkc.c kzfree(key->dp); dp 940 drivers/crypto/caam/caampkc.c rsa_key->dp = caam_read_rsa_crt(raw_key->dp, raw_key->dp_sz, p_sz); dp 941 drivers/crypto/caam/caampkc.c if (!rsa_key->dp) dp 960 drivers/crypto/caam/caampkc.c kzfree(rsa_key->dp); dp 75 drivers/crypto/caam/caampkc.h u8 *dp; dp 206 drivers/crypto/ccp/ccp-dev.c struct ccp_device *dp = NULL; dp 214 drivers/crypto/ccp/ccp-dev.c dp = ccp_rr; dp 224 drivers/crypto/ccp/ccp-dev.c return dp; dp 253 drivers/crypto/ccp/ccp-dev.c struct ccp_device *dp; dp 259 drivers/crypto/ccp/ccp-dev.c dp = list_first_entry(&ccp_units, struct ccp_device, entry); dp 260 drivers/crypto/ccp/ccp-dev.c ret = dp->vdata->version; dp 83 drivers/crypto/qat/qat_common/qat_asym_algs.c dma_addr_t dp; dp 109 drivers/crypto/qat/qat_common/qat_asym_algs.c char *dp; dp 858 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->in.rsa.dec_crt.dp = ctx->dma_dp; dp 1097 drivers/crypto/qat/qat_common/qat_asym_algs.c ptr = rsa_key->dp; dp 1102 drivers/crypto/qat/qat_common/qat_asym_algs.c ctx->dp = dma_alloc_coherent(dev, half_key_sz, &ctx->dma_dp, dp 1104 drivers/crypto/qat/qat_common/qat_asym_algs.c if (!ctx->dp) dp 1106 drivers/crypto/qat/qat_common/qat_asym_algs.c memcpy(ctx->dp + (half_key_sz - len), ptr, len); dp 1140 drivers/crypto/qat/qat_common/qat_asym_algs.c memset(ctx->dp, '\0', half_key_sz); dp 1141 drivers/crypto/qat/qat_common/qat_asym_algs.c dma_free_coherent(dev, half_key_sz, ctx->dp, ctx->dma_dp); dp 1142 drivers/crypto/qat/qat_common/qat_asym_algs.c ctx->dp = NULL; dp 1176 drivers/crypto/qat/qat_common/qat_asym_algs.c if (ctx->dp) { dp 1177 drivers/crypto/qat/qat_common/qat_asym_algs.c memset(ctx->dp, '\0', half_key_sz); dp 1178 drivers/crypto/qat/qat_common/qat_asym_algs.c dma_free_coherent(dev, half_key_sz, ctx->dp, ctx->dma_dp); dp 1194 drivers/crypto/qat/qat_common/qat_asym_algs.c ctx->dp = NULL; dp 30 drivers/extcon/extcon-usbc-cros-ec.c bool dp; /* DisplayPort enabled */ dp 255 drivers/extcon/extcon-usbc-cros-ec.c bool dp = false; dp 282 drivers/extcon/extcon-usbc-cros-ec.c dp = pd_mux_state & USB_PD_MUX_DP_ENABLED; dp 288 drivers/extcon/extcon-usbc-cros-ec.c role, power_type, dr, pr, polarity, mux, dp, hpd); dp 299 drivers/extcon/extcon-usbc-cros-ec.c if (force || info->dr != dr || info->pr != pr || info->dp != dp || dp 308 drivers/extcon/extcon-usbc-cros-ec.c info->dp = dp; dp 319 drivers/extcon/extcon-usbc-cros-ec.c extcon_set_state(info->edev, EXTCON_DISP_DP, dp); dp 98 drivers/gpu/drm/amd/display/dc/dce/dce_link_encoder.c (reg + enc110->offsets.dp) dp 42 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c static int analogix_dp_init_dp(struct analogix_dp_device *dp) dp 46 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_reset(dp); dp 48 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_swreset(dp); dp 50 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_init_analog_param(dp); dp 51 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_init_interrupt(dp); dp 54 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_enable_sw_function(dp); dp 56 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_config_interrupt(dp); dp 57 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = analogix_dp_init_analog_func(dp); dp 61 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_init_hpd(dp); dp 62 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_init_aux(dp); dp 66 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c static int analogix_dp_detect_hpd(struct analogix_dp_device *dp) dp 71 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (analogix_dp_get_plug_in_status(dp) == 0) dp 83 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (!dp->force_hpd) dp 91 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_dbg(dp->dev, "failed to get hpd plug status, try to force hpd\n"); dp 93 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_force_hpd(dp); dp 95 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (analogix_dp_get_plug_in_status(dp) != 0) { dp 96 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_err(dp->dev, "failed to get hpd plug in status\n"); dp 100 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_dbg(dp->dev, "success to get plug in status after force hpd\n"); dp 105 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c static bool analogix_dp_detect_sink_psr(struct analogix_dp_device *dp) dp 110 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = drm_dp_dpcd_readb(&dp->aux, DP_PSR_SUPPORT, &psr_version); dp 112 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_err(dp->dev, "failed to get PSR version, disable it\n"); dp 116 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_dbg(dp->dev, "Panel PSR version : %x\n", psr_version); dp 120 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c static int analogix_dp_enable_sink_psr(struct analogix_dp_device *dp) dp 126 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = drm_dp_dpcd_readb(&dp->aux, DP_PSR_EN_CFG, &psr_en); dp 128 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_err(dp->dev, "failed to get psr config\n"); dp 133 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = drm_dp_dpcd_writeb(&dp->aux, DP_PSR_EN_CFG, psr_en); dp 135 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_err(dp->dev, "failed to disable panel psr\n"); dp 141 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = drm_dp_dpcd_writeb(&dp->aux, DP_PSR_EN_CFG, psr_en); dp 143 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_err(dp->dev, "failed to set panel psr\n"); dp 149 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = drm_dp_dpcd_writeb(&dp->aux, DP_PSR_EN_CFG, psr_en); dp 151 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_err(dp->dev, "failed to set panel psr\n"); dp 155 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_enable_psr_crc(dp); dp 157 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->psr_supported = true; dp 161 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_err(dp->dev, "enable psr fail, force to disable psr\n"); dp 167 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_enable_rx_to_enhanced_mode(struct analogix_dp_device *dp, dp 173 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = drm_dp_dpcd_readb(&dp->aux, DP_LANE_COUNT_SET, &data); dp 178 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = drm_dp_dpcd_writeb(&dp->aux, DP_LANE_COUNT_SET, dp 182 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = drm_dp_dpcd_writeb(&dp->aux, DP_LANE_COUNT_SET, dp 188 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c static int analogix_dp_is_enhanced_mode_available(struct analogix_dp_device *dp, dp 194 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = drm_dp_dpcd_readb(&dp->aux, DP_MAX_LANE_COUNT, &data); dp 205 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c static int analogix_dp_set_enhanced_mode(struct analogix_dp_device *dp) dp 210 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = analogix_dp_is_enhanced_mode_available(dp, &data); dp 214 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = analogix_dp_enable_rx_to_enhanced_mode(dp, data); dp 218 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_enable_enhanced_mode(dp, data); dp 223 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c static int analogix_dp_training_pattern_dis(struct analogix_dp_device *dp) dp 227 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_set_training_pattern(dp, DP_NONE); dp 229 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = drm_dp_dpcd_writeb(&dp->aux, DP_TRAINING_PATTERN_SET, dp 236 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_set_lane_lane_pre_emphasis(struct analogix_dp_device *dp, dp 241 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_set_lane0_pre_emphasis(dp, pre_emphasis); dp 244 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_set_lane1_pre_emphasis(dp, pre_emphasis); dp 248 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_set_lane2_pre_emphasis(dp, pre_emphasis); dp 252 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_set_lane3_pre_emphasis(dp, pre_emphasis); dp 257 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c static int analogix_dp_link_start(struct analogix_dp_device *dp) dp 262 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c lane_count = dp->link_train.lane_count; dp 264 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->link_train.lt_state = CLOCK_RECOVERY; dp 265 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->link_train.eq_loop = 0; dp 268 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->link_train.cr_loop[lane] = 0; dp 271 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_set_link_bandwidth(dp, dp->link_train.link_rate); dp 272 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_set_lane_count(dp, dp->link_train.lane_count); dp 275 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c buf[0] = dp->link_train.link_rate; dp 276 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c buf[1] = dp->link_train.lane_count; dp 277 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c retval = drm_dp_dpcd_write(&dp->aux, DP_LINK_BW_SET, buf, 2); dp 281 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c retval = analogix_dp_set_enhanced_mode(dp); dp 283 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_err(dp->dev, "failed to set enhance mode\n"); dp 289 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_set_lane_lane_pre_emphasis(dp, dp 294 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c while (analogix_dp_get_pll_lock_status(dp) == PLL_UNLOCKED) { dp 296 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_err(dp->dev, "Wait for PLL lock timed out\n"); dp 305 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_set_training_pattern(dp, TRAINING_PTN1); dp 308 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c retval = drm_dp_dpcd_writeb(&dp->aux, DP_TRAINING_PATTERN_SET, dp 318 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c retval = drm_dp_dpcd_write(&dp->aux, DP_TRAINING_LANE0_SET, buf, dp 385 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c static void analogix_dp_set_lane_link_training(struct analogix_dp_device *dp, dp 390 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_set_lane0_link_training(dp, training_lane_set); dp 393 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_set_lane1_link_training(dp, training_lane_set); dp 397 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_set_lane2_link_training(dp, training_lane_set); dp 401 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_set_lane3_link_training(dp, training_lane_set); dp 407 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_get_lane_link_training(struct analogix_dp_device *dp, dp 414 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c reg = analogix_dp_get_lane0_link_training(dp); dp 417 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c reg = analogix_dp_get_lane1_link_training(dp); dp 420 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c reg = analogix_dp_get_lane2_link_training(dp); dp 423 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c reg = analogix_dp_get_lane3_link_training(dp); dp 433 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c static void analogix_dp_reduce_link_rate(struct analogix_dp_device *dp) dp 435 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_training_pattern_dis(dp); dp 436 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_set_enhanced_mode(dp); dp 438 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->link_train.lt_state = FAILED; dp 441 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c static void analogix_dp_get_adjust_training_lane(struct analogix_dp_device *dp, dp 447 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c lane_count = dp->link_train.lane_count; dp 461 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->link_train.training_lane[lane] = training_lane; dp 465 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c static int analogix_dp_process_clock_recovery(struct analogix_dp_device *dp) dp 473 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c lane_count = dp->link_train.lane_count; dp 475 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c retval = drm_dp_dpcd_read(&dp->aux, DP_LANE0_1_STATUS, link_status, 2); dp 479 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c retval = drm_dp_dpcd_read(&dp->aux, DP_ADJUST_REQUEST_LANE0_1, dp 486 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_set_training_pattern(dp, TRAINING_PTN2); dp 488 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c retval = drm_dp_dpcd_writeb(&dp->aux, DP_TRAINING_PATTERN_SET, dp 494 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_dbg(dp->dev, "Link Training Clock Recovery success\n"); dp 495 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->link_train.lt_state = EQUALIZER_TRAINING; dp 499 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp, lane); dp 509 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->link_train.cr_loop[lane]++; dp 511 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (dp->link_train.cr_loop[lane] == MAX_CR_LOOP || dp 514 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_err(dp->dev, "CR Max reached (%d,%d,%d)\n", dp 515 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->link_train.cr_loop[lane], dp 517 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_reduce_link_rate(dp); dp 523 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_get_adjust_training_lane(dp, adjust_request); dp 526 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_set_lane_link_training(dp, dp 527 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->link_train.training_lane[lane], lane); dp 529 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c retval = drm_dp_dpcd_write(&dp->aux, DP_TRAINING_LANE0_SET, dp 530 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->link_train.training_lane, lane_count); dp 537 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c static int analogix_dp_process_equalizer_training(struct analogix_dp_device *dp) dp 545 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c lane_count = dp->link_train.lane_count; dp 547 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c retval = drm_dp_dpcd_read(&dp->aux, DP_LANE0_1_STATUS, link_status, 2); dp 552 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_reduce_link_rate(dp); dp 556 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c retval = drm_dp_dpcd_read(&dp->aux, DP_ADJUST_REQUEST_LANE0_1, dp 561 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c retval = drm_dp_dpcd_readb(&dp->aux, DP_LANE_ALIGN_STATUS_UPDATED, dp 566 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_get_adjust_training_lane(dp, adjust_request); dp 570 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c retval = analogix_dp_training_pattern_dis(dp); dp 574 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_dbg(dp->dev, "Link Training success!\n"); dp 575 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_get_link_bandwidth(dp, ®); dp 576 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->link_train.link_rate = reg; dp 577 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_dbg(dp->dev, "final bandwidth = %.2x\n", dp 578 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->link_train.link_rate); dp 580 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_get_lane_count(dp, ®); dp 581 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->link_train.lane_count = reg; dp 582 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_dbg(dp->dev, "final lane count = %.2x\n", dp 583 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->link_train.lane_count); dp 585 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->link_train.lt_state = FINISHED; dp 591 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->link_train.eq_loop++; dp 593 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (dp->link_train.eq_loop > MAX_EQ_LOOP) { dp 594 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_err(dp->dev, "EQ Max loop\n"); dp 595 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_reduce_link_rate(dp); dp 600 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_set_lane_link_training(dp, dp 601 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->link_train.training_lane[lane], lane); dp 603 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c retval = drm_dp_dpcd_write(&dp->aux, DP_TRAINING_LANE0_SET, dp 604 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->link_train.training_lane, lane_count); dp 611 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c static void analogix_dp_get_max_rx_bandwidth(struct analogix_dp_device *dp, dp 622 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c drm_dp_dpcd_readb(&dp->aux, DP_MAX_LINK_RATE, &data); dp 626 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c static void analogix_dp_get_max_rx_lane_count(struct analogix_dp_device *dp, dp 635 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c drm_dp_dpcd_readb(&dp->aux, DP_MAX_LANE_COUNT, &data); dp 639 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c static int analogix_dp_full_link_train(struct analogix_dp_device *dp, dp 649 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_reset_macro(dp); dp 652 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_get_max_rx_bandwidth(dp, &dp->link_train.link_rate); dp 653 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_get_max_rx_lane_count(dp, &dp->link_train.lane_count); dp 655 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if ((dp->link_train.link_rate != DP_LINK_BW_1_62) && dp 656 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c (dp->link_train.link_rate != DP_LINK_BW_2_7) && dp 657 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c (dp->link_train.link_rate != DP_LINK_BW_5_4)) { dp 658 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_err(dp->dev, "Rx Max Link Rate is abnormal :%x !\n", dp 659 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->link_train.link_rate); dp 660 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->link_train.link_rate = DP_LINK_BW_1_62; dp 663 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (dp->link_train.lane_count == 0) { dp 664 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_err(dp->dev, "Rx Max Lane count is abnormal :%x !\n", dp 665 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->link_train.lane_count); dp 666 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->link_train.lane_count = (u8)LANE_COUNT1; dp 670 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (dp->link_train.lane_count > max_lanes) dp 671 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->link_train.lane_count = max_lanes; dp 672 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (dp->link_train.link_rate > max_rate) dp 673 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->link_train.link_rate = max_rate; dp 676 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_set_analog_power_down(dp, POWER_ALL, 0); dp 678 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->link_train.lt_state = START; dp 682 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c switch (dp->link_train.lt_state) { dp 684 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c retval = analogix_dp_link_start(dp); dp 686 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_err(dp->dev, "LT link start failed!\n"); dp 689 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c retval = analogix_dp_process_clock_recovery(dp); dp 691 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_err(dp->dev, "LT CR failed!\n"); dp 694 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c retval = analogix_dp_process_equalizer_training(dp); dp 696 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_err(dp->dev, "LT EQ failed!\n"); dp 706 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_err(dp->dev, "eDP link training failed (%d)\n", retval); dp 711 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c static int analogix_dp_fast_link_train(struct analogix_dp_device *dp) dp 717 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_reset_macro(dp); dp 719 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_set_link_bandwidth(dp, dp->link_train.link_rate); dp 720 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_set_lane_count(dp, dp->link_train.lane_count); dp 722 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c for (i = 0; i < dp->link_train.lane_count; i++) { dp 723 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_set_lane_link_training(dp, dp 724 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->link_train.training_lane[i], i); dp 727 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = readx_poll_timeout(analogix_dp_get_pll_lock_status, dp, status, dp 731 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c DRM_DEV_ERROR(dp->dev, "Wait for pll lock failed %d\n", ret); dp 736 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_set_training_pattern(dp, TRAINING_PTN1); dp 740 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_set_training_pattern(dp, TRAINING_PTN2); dp 745 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_set_training_pattern(dp, DP_NONE); dp 752 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = drm_dp_dpcd_readb(&dp->aux, DP_LANE_ALIGN_STATUS_UPDATED, dp 755 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c DRM_DEV_ERROR(dp->dev, "Read align status failed %d\n", dp 760 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = drm_dp_dpcd_read(&dp->aux, DP_LANE0_1_STATUS, link_status, dp 763 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c DRM_DEV_ERROR(dp->dev, "Read link status failed %d\n", dp 769 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->link_train.lane_count)) { dp 770 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c DRM_DEV_ERROR(dp->dev, "Clock recovery failed\n"); dp 771 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_reduce_link_rate(dp); dp 776 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->link_train.lane_count)) { dp 777 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c DRM_DEV_ERROR(dp->dev, "Channel EQ failed\n"); dp 778 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_reduce_link_rate(dp); dp 786 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c static int analogix_dp_train_link(struct analogix_dp_device *dp) dp 788 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (dp->fast_train_enable) dp 789 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c return analogix_dp_fast_link_train(dp); dp 791 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c return analogix_dp_full_link_train(dp, dp->video_info.max_lane_count, dp 792 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->video_info.max_link_rate); dp 795 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c static int analogix_dp_config_video(struct analogix_dp_device *dp) dp 800 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_config_video_slave_mode(dp); dp 802 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_set_video_color_format(dp); dp 804 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (analogix_dp_get_pll_lock_status(dp) == PLL_UNLOCKED) { dp 805 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_err(dp->dev, "PLL is not locked yet.\n"); dp 811 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (analogix_dp_is_slave_video_stream_clock_on(dp) == 0) dp 814 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_err(dp->dev, "Timeout of slave video streamclk ok\n"); dp 821 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_set_video_cr_mn(dp, CALCULATED_M, 0, 0); dp 824 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_set_video_timing_mode(dp, VIDEO_TIMING_FROM_CAPTURE); dp 827 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_enable_video_mute(dp, 0); dp 830 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_enable_video_master(dp, 0); dp 833 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_start_video(dp); dp 839 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (analogix_dp_is_video_stream_on(dp) == 0) { dp 847 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_warn(dp->dev, dp 858 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c static int analogix_dp_enable_scramble(struct analogix_dp_device *dp, dp 865 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_enable_scrambling(dp); dp 867 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = drm_dp_dpcd_readb(&dp->aux, DP_TRAINING_PATTERN_SET, dp 871 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = drm_dp_dpcd_writeb(&dp->aux, DP_TRAINING_PATTERN_SET, dp 874 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_disable_scrambling(dp); dp 876 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = drm_dp_dpcd_readb(&dp->aux, DP_TRAINING_PATTERN_SET, dp 880 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = drm_dp_dpcd_writeb(&dp->aux, DP_TRAINING_PATTERN_SET, dp 888 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c struct analogix_dp_device *dp = arg; dp 892 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c irq_type = analogix_dp_get_irq_type(dp); dp 894 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_mute_hpd_interrupt(dp); dp 903 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c struct analogix_dp_device *dp = arg; dp 906 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c irq_type = analogix_dp_get_irq_type(dp); dp 909 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_dbg(dp->dev, "Detected cable status changed!\n"); dp 910 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (dp->drm_dev) dp 911 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c drm_helper_hpd_irq_event(dp->drm_dev); dp 915 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_clear_hotplug_interrupts(dp); dp 916 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_unmute_hpd_interrupt(dp); dp 922 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c static int analogix_dp_fast_link_train_detection(struct analogix_dp_device *dp) dp 927 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = drm_dp_dpcd_readb(&dp->aux, DP_MAX_DOWNSPREAD, &spread); dp 929 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_err(dp->dev, "failed to read downspread %d\n", ret); dp 932 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->fast_train_enable = !!(spread & DP_NO_AUX_HANDSHAKE_LINK_TRAINING); dp 933 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_dbg(dp->dev, "fast link training %s\n", dp 934 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->fast_train_enable ? "supported" : "unsupported"); dp 938 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c static int analogix_dp_commit(struct analogix_dp_device *dp) dp 943 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (dp->plat_data->panel) { dp 944 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (drm_panel_disable(dp->plat_data->panel)) dp 948 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = analogix_dp_train_link(dp); dp 950 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_err(dp->dev, "unable to do link train, ret=%d\n", ret); dp 954 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = analogix_dp_enable_scramble(dp, 1); dp 956 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_err(dp->dev, "can not enable scramble\n"); dp 960 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_init_video(dp); dp 961 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = analogix_dp_config_video(dp); dp 963 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_err(dp->dev, "unable to config video\n"); dp 968 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (dp->plat_data->panel) { dp 969 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = drm_panel_enable(dp->plat_data->panel); dp 977 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = analogix_dp_fast_link_train_detection(dp); dp 981 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (analogix_dp_detect_sink_psr(dp)) { dp 982 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = analogix_dp_enable_sink_psr(dp); dp 990 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c static int analogix_dp_enable_psr(struct analogix_dp_device *dp) dp 996 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = drm_dp_dpcd_readb(&dp->aux, DP_PSR_STATUS, &sink); dp 998 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c DRM_DEV_ERROR(dp->dev, "Failed to read psr status %d\n", ret); dp 1011 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = analogix_dp_send_psr_spd(dp, &psr_vsc, true); dp 1013 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_set_analog_power_down(dp, POWER_ALL, true); dp 1018 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c static int analogix_dp_disable_psr(struct analogix_dp_device *dp) dp 1024 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_set_analog_power_down(dp, POWER_ALL, false); dp 1026 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = drm_dp_dpcd_writeb(&dp->aux, DP_SET_POWER, DP_SET_POWER_D0); dp 1028 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c DRM_DEV_ERROR(dp->dev, "Failed to set DP Power0 %d\n", ret); dp 1032 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = drm_dp_dpcd_readb(&dp->aux, DP_PSR_STATUS, &sink); dp 1034 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c DRM_DEV_ERROR(dp->dev, "Failed to read psr status %d\n", ret); dp 1037 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c DRM_DEV_ERROR(dp->dev, "sink inactive, skip disable psr"); dp 1041 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = analogix_dp_train_link(dp); dp 1043 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c DRM_DEV_ERROR(dp->dev, "Failed to train the link %d\n", ret); dp 1057 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c return analogix_dp_send_psr_spd(dp, &psr_vsc, true); dp 1073 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c static int analogix_dp_prepare_panel(struct analogix_dp_device *dp, dp 1078 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (!dp->plat_data->panel) dp 1081 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c mutex_lock(&dp->panel_lock); dp 1087 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (dp->panel_is_modeset && !is_modeset_prepare) dp 1091 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = drm_panel_prepare(dp->plat_data->panel); dp 1093 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = drm_panel_unprepare(dp->plat_data->panel); dp 1099 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->panel_is_modeset = prepare; dp 1102 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c mutex_unlock(&dp->panel_lock); dp 1108 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c struct analogix_dp_device *dp = to_dp(connector); dp 1112 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (dp->plat_data->panel) { dp 1113 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c num_modes += drm_panel_get_modes(dp->plat_data->panel); dp 1115 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = analogix_dp_prepare_panel(dp, true, false); dp 1121 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c pm_runtime_get_sync(dp->dev); dp 1122 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c edid = drm_get_edid(connector, &dp->aux.ddc); dp 1123 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c pm_runtime_put(dp->dev); dp 1125 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c drm_connector_update_edid_property(&dp->connector, dp 1127 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c num_modes += drm_add_edid_modes(&dp->connector, edid); dp 1131 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = analogix_dp_prepare_panel(dp, false, false); dp 1136 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (dp->plat_data->get_modes) dp 1137 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c num_modes += dp->plat_data->get_modes(dp->plat_data, connector); dp 1145 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c struct analogix_dp_device *dp = to_dp(connector); dp 1147 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c return dp->encoder; dp 1154 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c struct analogix_dp_device *dp = to_dp(connector); dp 1171 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (crtc_state->self_refresh_active && !dp->psr_supported) dp 1186 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c struct analogix_dp_device *dp = to_dp(connector); dp 1190 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (dp->plat_data->panel) dp 1193 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = analogix_dp_prepare_panel(dp, true, false); dp 1199 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (!analogix_dp_detect_hpd(dp)) dp 1202 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = analogix_dp_prepare_panel(dp, false, false); dp 1220 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c struct analogix_dp_device *dp = bridge->driver_private; dp 1221 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c struct drm_encoder *encoder = dp->encoder; dp 1230 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (!dp->plat_data->skip_connector) { dp 1231 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c connector = &dp->connector; dp 1234 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = drm_connector_init(dp->drm_dev, connector, dp 1253 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (dp->plat_data->attach) { dp 1254 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = dp->plat_data->attach(dp->plat_data, bridge, connector); dp 1261 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (dp->plat_data->panel) { dp 1262 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = drm_panel_attach(dp->plat_data->panel, &dp->connector); dp 1273 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c struct drm_crtc *analogix_dp_get_new_crtc(struct analogix_dp_device *dp, dp 1276 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c struct drm_encoder *encoder = dp->encoder; dp 1294 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c struct analogix_dp_device *dp = bridge->driver_private; dp 1299 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c crtc = analogix_dp_get_new_crtc(dp, state); dp 1308 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = analogix_dp_prepare_panel(dp, true, true); dp 1313 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c static int analogix_dp_set_bridge(struct analogix_dp_device *dp) dp 1317 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c pm_runtime_get_sync(dp->dev); dp 1319 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = clk_prepare_enable(dp->clock); dp 1325 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (dp->plat_data->power_on_start) dp 1326 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->plat_data->power_on_start(dp->plat_data); dp 1328 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c phy_power_on(dp->phy); dp 1330 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = analogix_dp_init_dp(dp); dp 1339 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = analogix_dp_detect_hpd(dp); dp 1345 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = analogix_dp_commit(dp); dp 1351 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (dp->plat_data->power_on_end) dp 1352 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->plat_data->power_on_end(dp->plat_data); dp 1354 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c enable_irq(dp->irq); dp 1358 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c phy_power_off(dp->phy); dp 1359 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (dp->plat_data->power_off) dp 1360 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->plat_data->power_off(dp->plat_data); dp 1361 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c clk_disable_unprepare(dp->clock); dp 1363 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c pm_runtime_put_sync(dp->dev); dp 1371 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c struct analogix_dp_device *dp = bridge->driver_private; dp 1377 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c crtc = analogix_dp_get_new_crtc(dp, state); dp 1384 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = analogix_dp_disable_psr(dp); dp 1390 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (dp->dpms_mode == DRM_MODE_DPMS_ON) dp 1394 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (analogix_dp_set_bridge(dp) == 0) { dp 1395 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->dpms_mode = DRM_MODE_DPMS_ON; dp 1398 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_err(dp->dev, "failed to set bridge, retry: %d\n", dp 1403 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_err(dp->dev, "too many times retry set bridge, give it up\n"); dp 1408 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c struct analogix_dp_device *dp = bridge->driver_private; dp 1411 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (dp->dpms_mode != DRM_MODE_DPMS_ON) dp 1414 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (dp->plat_data->panel) { dp 1415 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (drm_panel_disable(dp->plat_data->panel)) { dp 1421 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c disable_irq(dp->irq); dp 1423 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (dp->plat_data->power_off) dp 1424 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->plat_data->power_off(dp->plat_data); dp 1426 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_set_analog_power_down(dp, POWER_ALL, 1); dp 1427 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c phy_power_off(dp->phy); dp 1429 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c clk_disable_unprepare(dp->clock); dp 1431 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c pm_runtime_put_sync(dp->dev); dp 1433 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = analogix_dp_prepare_panel(dp, false, true); dp 1437 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->fast_train_enable = false; dp 1438 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->psr_supported = false; dp 1439 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->dpms_mode = DRM_MODE_DPMS_OFF; dp 1445 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c struct analogix_dp_device *dp = bridge->driver_private; dp 1449 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c crtc = analogix_dp_get_new_crtc(dp, state); dp 1469 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c struct analogix_dp_device *dp = bridge->driver_private; dp 1474 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c crtc = analogix_dp_get_new_crtc(dp, state); dp 1482 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = analogix_dp_enable_psr(dp); dp 1491 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c struct analogix_dp_device *dp = bridge->driver_private; dp 1492 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c struct drm_display_info *display_info = &dp->connector.display_info; dp 1493 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c struct video_info *video = &dp->video_info; dp 1494 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c struct device_node *dp_node = dp->dev->of_node; dp 1574 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c struct analogix_dp_device *dp) dp 1585 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->bridge = bridge; dp 1587 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c bridge->driver_private = dp; dp 1590 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = drm_bridge_attach(dp->encoder, bridge, NULL); dp 1599 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c static int analogix_dp_dt_parse_pdata(struct analogix_dp_device *dp) dp 1601 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c struct device_node *dp_node = dp->dev->of_node; dp 1602 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c struct video_info *video_info = &dp->video_info; dp 1604 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c switch (dp->plat_data->dev_type) { dp 1632 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c struct analogix_dp_device *dp = to_dp(aux); dp 1634 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c return analogix_dp_transfer(dp, msg); dp 1641 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c struct analogix_dp_device *dp; dp 1651 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp = devm_kzalloc(dev, sizeof(struct analogix_dp_device), GFP_KERNEL); dp 1652 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (!dp) dp 1655 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->dev = &pdev->dev; dp 1656 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->dpms_mode = DRM_MODE_DPMS_OFF; dp 1658 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c mutex_init(&dp->panel_lock); dp 1659 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->panel_is_modeset = false; dp 1666 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->plat_data = plat_data; dp 1668 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = analogix_dp_dt_parse_pdata(dp); dp 1672 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->phy = devm_phy_get(dp->dev, "dp"); dp 1673 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (IS_ERR(dp->phy)) { dp 1674 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dev_err(dp->dev, "no DP phy configured\n"); dp 1675 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = PTR_ERR(dp->phy); dp 1682 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->phy = NULL; dp 1688 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->clock = devm_clk_get(&pdev->dev, "dp"); dp 1689 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (IS_ERR(dp->clock)) { dp 1691 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c return ERR_CAST(dp->clock); dp 1694 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c clk_prepare_enable(dp->clock); dp 1698 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->reg_base = devm_ioremap_resource(&pdev->dev, res); dp 1699 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (IS_ERR(dp->reg_base)) dp 1700 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c return ERR_CAST(dp->reg_base); dp 1702 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->force_hpd = of_property_read_bool(dev->of_node, "force-hpd"); dp 1705 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->hpd_gpiod = devm_gpiod_get_optional(dev, "hpd", GPIOD_IN); dp 1706 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (!dp->hpd_gpiod) dp 1707 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->hpd_gpiod = devm_gpiod_get_optional(dev, "samsung,hpd", dp 1709 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (IS_ERR(dp->hpd_gpiod)) { dp 1711 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c PTR_ERR(dp->hpd_gpiod)); dp 1712 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c return ERR_CAST(dp->hpd_gpiod); dp 1715 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (dp->hpd_gpiod) { dp 1723 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->irq = gpiod_to_irq(dp->hpd_gpiod); dp 1726 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->irq = platform_get_irq(pdev, 0); dp 1730 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (dp->irq == -ENXIO) { dp 1735 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = devm_request_threaded_irq(&pdev->dev, dp->irq, dp 1738 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c irq_flags, "analogix-dp", dp); dp 1743 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c disable_irq(dp->irq); dp 1745 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c return dp; dp 1749 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c int analogix_dp_bind(struct analogix_dp_device *dp, struct drm_device *drm_dev) dp 1753 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->drm_dev = drm_dev; dp 1754 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->encoder = dp->plat_data->encoder; dp 1756 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->aux.name = "DP-AUX"; dp 1757 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->aux.transfer = analogix_dpaux_transfer; dp 1758 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->aux.dev = dp->dev; dp 1760 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = drm_dp_aux_register(&dp->aux); dp 1764 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c pm_runtime_enable(dp->dev); dp 1766 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = analogix_dp_create_bridge(drm_dev, dp); dp 1775 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c pm_runtime_disable(dp->dev); dp 1781 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c void analogix_dp_unbind(struct analogix_dp_device *dp) dp 1783 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c analogix_dp_bridge_disable(dp->bridge); dp 1784 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c dp->connector.funcs->destroy(&dp->connector); dp 1786 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (dp->plat_data->panel) { dp 1787 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (drm_panel_unprepare(dp->plat_data->panel)) dp 1789 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c drm_panel_detach(dp->plat_data->panel); dp 1792 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c drm_dp_aux_unregister(&dp->aux); dp 1793 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c pm_runtime_disable(dp->dev); dp 1797 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c void analogix_dp_remove(struct analogix_dp_device *dp) dp 1799 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c clk_disable_unprepare(dp->clock); dp 1804 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c int analogix_dp_suspend(struct analogix_dp_device *dp) dp 1806 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c clk_disable_unprepare(dp->clock); dp 1808 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (dp->plat_data->panel) { dp 1809 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (drm_panel_unprepare(dp->plat_data->panel)) dp 1817 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c int analogix_dp_resume(struct analogix_dp_device *dp) dp 1821 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c ret = clk_prepare_enable(dp->clock); dp 1827 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (dp->plat_data->panel) { dp 1828 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c if (drm_panel_prepare(dp->plat_data->panel)) { dp 1841 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c struct analogix_dp_device *dp = to_dp(connector); dp 1849 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c return drm_dp_start_crc(&dp->aux, connector->state->crtc); dp 1855 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c struct analogix_dp_device *dp = to_dp(connector); dp 1857 drivers/gpu/drm/bridge/analogix/analogix_dp_core.c return drm_dp_stop_crc(&dp->aux); dp 184 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_enable_video_mute(struct analogix_dp_device *dp, bool enable); dp 185 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_stop_video(struct analogix_dp_device *dp); dp 186 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_lane_swap(struct analogix_dp_device *dp, bool enable); dp 187 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_init_analog_param(struct analogix_dp_device *dp); dp 188 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_init_interrupt(struct analogix_dp_device *dp); dp 189 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_reset(struct analogix_dp_device *dp); dp 190 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_swreset(struct analogix_dp_device *dp); dp 191 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_config_interrupt(struct analogix_dp_device *dp); dp 192 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_mute_hpd_interrupt(struct analogix_dp_device *dp); dp 193 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_unmute_hpd_interrupt(struct analogix_dp_device *dp); dp 194 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h enum pll_status analogix_dp_get_pll_lock_status(struct analogix_dp_device *dp); dp 195 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_set_pll_power_down(struct analogix_dp_device *dp, bool enable); dp 196 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_set_analog_power_down(struct analogix_dp_device *dp, dp 199 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h int analogix_dp_init_analog_func(struct analogix_dp_device *dp); dp 200 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_init_hpd(struct analogix_dp_device *dp); dp 201 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_force_hpd(struct analogix_dp_device *dp); dp 202 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h enum dp_irq_type analogix_dp_get_irq_type(struct analogix_dp_device *dp); dp 203 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_clear_hotplug_interrupts(struct analogix_dp_device *dp); dp 204 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_reset_aux(struct analogix_dp_device *dp); dp 205 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_init_aux(struct analogix_dp_device *dp); dp 206 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h int analogix_dp_get_plug_in_status(struct analogix_dp_device *dp); dp 207 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_enable_sw_function(struct analogix_dp_device *dp); dp 208 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_set_link_bandwidth(struct analogix_dp_device *dp, u32 bwtype); dp 209 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_get_link_bandwidth(struct analogix_dp_device *dp, u32 *bwtype); dp 210 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_set_lane_count(struct analogix_dp_device *dp, u32 count); dp 211 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_get_lane_count(struct analogix_dp_device *dp, u32 *count); dp 212 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_enable_enhanced_mode(struct analogix_dp_device *dp, dp 214 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_set_training_pattern(struct analogix_dp_device *dp, dp 216 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_set_lane0_pre_emphasis(struct analogix_dp_device *dp, dp 218 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_set_lane1_pre_emphasis(struct analogix_dp_device *dp, dp 220 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_set_lane2_pre_emphasis(struct analogix_dp_device *dp, dp 222 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_set_lane3_pre_emphasis(struct analogix_dp_device *dp, dp 224 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_set_lane0_link_training(struct analogix_dp_device *dp, dp 226 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_set_lane1_link_training(struct analogix_dp_device *dp, dp 228 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_set_lane2_link_training(struct analogix_dp_device *dp, dp 230 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_set_lane3_link_training(struct analogix_dp_device *dp, dp 232 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h u32 analogix_dp_get_lane0_link_training(struct analogix_dp_device *dp); dp 233 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h u32 analogix_dp_get_lane1_link_training(struct analogix_dp_device *dp); dp 234 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h u32 analogix_dp_get_lane2_link_training(struct analogix_dp_device *dp); dp 235 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h u32 analogix_dp_get_lane3_link_training(struct analogix_dp_device *dp); dp 236 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_reset_macro(struct analogix_dp_device *dp); dp 237 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_init_video(struct analogix_dp_device *dp); dp 239 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_set_video_color_format(struct analogix_dp_device *dp); dp 240 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h int analogix_dp_is_slave_video_stream_clock_on(struct analogix_dp_device *dp); dp 241 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_set_video_cr_mn(struct analogix_dp_device *dp, dp 245 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_set_video_timing_mode(struct analogix_dp_device *dp, u32 type); dp 246 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_enable_video_master(struct analogix_dp_device *dp, dp 248 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_start_video(struct analogix_dp_device *dp); dp 249 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h int analogix_dp_is_video_stream_on(struct analogix_dp_device *dp); dp 250 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_config_video_slave_mode(struct analogix_dp_device *dp); dp 251 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_enable_scrambling(struct analogix_dp_device *dp); dp 252 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_disable_scrambling(struct analogix_dp_device *dp); dp 253 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h void analogix_dp_enable_psr_crc(struct analogix_dp_device *dp); dp 254 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h int analogix_dp_send_psr_spd(struct analogix_dp_device *dp, dp 256 drivers/gpu/drm/bridge/analogix/analogix_dp_core.h ssize_t analogix_dp_transfer(struct analogix_dp_device *dp, dp 26 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_enable_video_mute(struct analogix_dp_device *dp, bool enable) dp 31 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_VIDEO_CTL_1); dp 33 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_VIDEO_CTL_1); dp 35 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_VIDEO_CTL_1); dp 37 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_VIDEO_CTL_1); dp 41 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_stop_video(struct analogix_dp_device *dp) dp 45 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_VIDEO_CTL_1); dp 47 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_VIDEO_CTL_1); dp 50 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_lane_swap(struct analogix_dp_device *dp, bool enable) dp 61 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_LANE_MAP); dp 64 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_init_analog_param(struct analogix_dp_device *dp) dp 69 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_ANALOG_CTL_1); dp 72 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_ANALOG_CTL_2); dp 74 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c if (dp->plat_data && is_rockchip(dp->plat_data->dev_type)) { dp 76 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c if (dp->plat_data->dev_type == RK3288_DP) dp 79 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_PLL_REG_1); dp 80 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x95, dp->reg_base + ANALOGIX_DP_PLL_REG_2); dp 81 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x40, dp->reg_base + ANALOGIX_DP_PLL_REG_3); dp 82 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x58, dp->reg_base + ANALOGIX_DP_PLL_REG_4); dp 83 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x22, dp->reg_base + ANALOGIX_DP_PLL_REG_5); dp 87 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_ANALOG_CTL_3); dp 91 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_PLL_FILTER_CTL_1); dp 95 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_TX_AMP_TUNING_CTL); dp 98 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_init_interrupt(struct analogix_dp_device *dp) dp 101 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(INT_POL1 | INT_POL0, dp->reg_base + ANALOGIX_DP_INT_CTL); dp 104 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0xff, dp->reg_base + ANALOGIX_DP_COMMON_INT_STA_1); dp 105 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x4f, dp->reg_base + ANALOGIX_DP_COMMON_INT_STA_2); dp 106 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0xe0, dp->reg_base + ANALOGIX_DP_COMMON_INT_STA_3); dp 107 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0xe7, dp->reg_base + ANALOGIX_DP_COMMON_INT_STA_4); dp 108 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x63, dp->reg_base + ANALOGIX_DP_INT_STA); dp 111 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x00, dp->reg_base + ANALOGIX_DP_COMMON_INT_MASK_1); dp 112 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x00, dp->reg_base + ANALOGIX_DP_COMMON_INT_MASK_2); dp 113 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x00, dp->reg_base + ANALOGIX_DP_COMMON_INT_MASK_3); dp 114 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x00, dp->reg_base + ANALOGIX_DP_COMMON_INT_MASK_4); dp 115 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x00, dp->reg_base + ANALOGIX_DP_INT_STA_MASK); dp 118 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_reset(struct analogix_dp_device *dp) dp 122 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c analogix_dp_stop_video(dp); dp 123 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c analogix_dp_enable_video_mute(dp, 0); dp 125 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c if (dp->plat_data && is_rockchip(dp->plat_data->dev_type)) dp 133 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_FUNC_EN_1); dp 138 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_FUNC_EN_2); dp 142 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c analogix_dp_lane_swap(dp, 0); dp 144 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x0, dp->reg_base + ANALOGIX_DP_SYS_CTL_1); dp 145 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x40, dp->reg_base + ANALOGIX_DP_SYS_CTL_2); dp 146 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x0, dp->reg_base + ANALOGIX_DP_SYS_CTL_3); dp 147 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x0, dp->reg_base + ANALOGIX_DP_SYS_CTL_4); dp 149 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x0, dp->reg_base + ANALOGIX_DP_PKT_SEND_CTL); dp 150 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x0, dp->reg_base + ANALOGIX_DP_HDCP_CTL); dp 152 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x5e, dp->reg_base + ANALOGIX_DP_HPD_DEGLITCH_L); dp 153 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x1a, dp->reg_base + ANALOGIX_DP_HPD_DEGLITCH_H); dp 155 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x10, dp->reg_base + ANALOGIX_DP_LINK_DEBUG_CTL); dp 157 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x0, dp->reg_base + ANALOGIX_DP_PHY_TEST); dp 159 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x0, dp->reg_base + ANALOGIX_DP_VIDEO_FIFO_THRD); dp 160 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x20, dp->reg_base + ANALOGIX_DP_AUDIO_MARGIN); dp 162 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x4, dp->reg_base + ANALOGIX_DP_M_VID_GEN_FILTER_TH); dp 163 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x2, dp->reg_base + ANALOGIX_DP_M_AUD_GEN_FILTER_TH); dp 165 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x00000101, dp->reg_base + ANALOGIX_DP_SOC_GENERAL_CTL); dp 168 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_swreset(struct analogix_dp_device *dp) dp 170 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(RESET_DP_TX, dp->reg_base + ANALOGIX_DP_TX_SW_RESET); dp 173 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_config_interrupt(struct analogix_dp_device *dp) dp 179 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_COMMON_INT_MASK_1); dp 182 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_COMMON_INT_MASK_2); dp 185 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_COMMON_INT_MASK_3); dp 188 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_COMMON_INT_MASK_4); dp 191 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_INT_STA_MASK); dp 194 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_mute_hpd_interrupt(struct analogix_dp_device *dp) dp 199 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_COMMON_INT_MASK_4); dp 201 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_COMMON_INT_MASK_4); dp 203 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_INT_STA_MASK); dp 205 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_INT_STA_MASK); dp 208 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_unmute_hpd_interrupt(struct analogix_dp_device *dp) dp 214 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_COMMON_INT_MASK_4); dp 217 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_INT_STA_MASK); dp 220 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c enum pll_status analogix_dp_get_pll_lock_status(struct analogix_dp_device *dp) dp 224 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_DEBUG_CTL); dp 231 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_set_pll_power_down(struct analogix_dp_device *dp, bool enable) dp 237 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c if (dp->plat_data && is_rockchip(dp->plat_data->dev_type)) { dp 242 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + pd_addr); dp 247 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + pd_addr); dp 250 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_set_analog_power_down(struct analogix_dp_device *dp, dp 258 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c if (dp->plat_data && is_rockchip(dp->plat_data->dev_type)) dp 263 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c if (dp->plat_data && is_rockchip(dp->plat_data->dev_type)) dp 268 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + phy_pd_addr); dp 273 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + phy_pd_addr); dp 277 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + phy_pd_addr); dp 283 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + phy_pd_addr); dp 287 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + phy_pd_addr); dp 293 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + phy_pd_addr); dp 297 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + phy_pd_addr); dp 303 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + phy_pd_addr); dp 307 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + phy_pd_addr); dp 313 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + phy_pd_addr); dp 321 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c if (dp->plat_data && is_rockchip(dp->plat_data->dev_type)) dp 326 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + phy_pd_addr); dp 332 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + phy_pd_addr); dp 333 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c if (dp->plat_data && is_rockchip(dp->plat_data->dev_type)) dp 339 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + phy_pd_addr); dp 342 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + phy_pd_addr); dp 345 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + phy_pd_addr); dp 348 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x00, dp->reg_base + phy_pd_addr); dp 356 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c int analogix_dp_init_analog_func(struct analogix_dp_device *dp) dp 361 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c analogix_dp_set_analog_power_down(dp, POWER_ALL, 0); dp 364 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_COMMON_INT_STA_1); dp 366 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_DEBUG_CTL); dp 368 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_DEBUG_CTL); dp 371 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c if (analogix_dp_get_pll_lock_status(dp) == PLL_UNLOCKED) { dp 372 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c analogix_dp_set_pll_power_down(dp, 0); dp 374 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c while (analogix_dp_get_pll_lock_status(dp) == PLL_UNLOCKED) { dp 377 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c dev_err(dp->dev, "failed to get pll lock status\n"); dp 385 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_FUNC_EN_2); dp 388 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_FUNC_EN_2); dp 392 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_clear_hotplug_interrupts(struct analogix_dp_device *dp) dp 396 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c if (dp->hpd_gpiod) dp 400 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_COMMON_INT_STA_4); dp 403 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_INT_STA); dp 406 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_init_hpd(struct analogix_dp_device *dp) dp 410 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c if (dp->hpd_gpiod) dp 413 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c analogix_dp_clear_hotplug_interrupts(dp); dp 415 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_SYS_CTL_3); dp 417 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_SYS_CTL_3); dp 420 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_force_hpd(struct analogix_dp_device *dp) dp 424 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_SYS_CTL_3); dp 426 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_SYS_CTL_3); dp 429 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c enum dp_irq_type analogix_dp_get_irq_type(struct analogix_dp_device *dp) dp 433 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c if (dp->hpd_gpiod) { dp 434 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = gpiod_get_value(dp->hpd_gpiod); dp 441 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_COMMON_INT_STA_4); dp 456 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_reset_aux(struct analogix_dp_device *dp) dp 461 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_FUNC_EN_2); dp 463 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_FUNC_EN_2); dp 466 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_init_aux(struct analogix_dp_device *dp) dp 472 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_INT_STA); dp 474 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c analogix_dp_set_analog_power_down(dp, AUX_BLOCK, true); dp 476 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c analogix_dp_set_analog_power_down(dp, AUX_BLOCK, false); dp 478 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c analogix_dp_reset_aux(dp); dp 481 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c if (dp->plat_data && is_rockchip(dp->plat_data->dev_type)) dp 490 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_AUX_HW_RETRY_CTL); dp 494 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_AUX_CH_DEFER_CTL); dp 497 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_FUNC_EN_2); dp 499 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_FUNC_EN_2); dp 502 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c int analogix_dp_get_plug_in_status(struct analogix_dp_device *dp) dp 506 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c if (dp->hpd_gpiod) { dp 507 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c if (gpiod_get_value(dp->hpd_gpiod)) dp 510 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_SYS_CTL_3); dp 518 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_enable_sw_function(struct analogix_dp_device *dp) dp 522 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_FUNC_EN_1); dp 524 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_FUNC_EN_1); dp 527 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c int analogix_dp_start_aux_transaction(struct analogix_dp_device *dp) dp 534 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_AUX_CH_CTL_2); dp 536 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_AUX_CH_CTL_2); dp 539 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_INT_STA); dp 543 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c dev_err(dp->dev, "AUX CH command reply failed!\n"); dp 546 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_INT_STA); dp 551 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(RPLY_RECEIV, dp->reg_base + ANALOGIX_DP_INT_STA); dp 554 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_INT_STA); dp 556 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(AUX_ERR, dp->reg_base + ANALOGIX_DP_INT_STA); dp 561 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_AUX_CH_STA); dp 563 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c dev_err(dp->dev, "AUX CH error happens: %d\n\n", dp 571 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c int analogix_dp_write_byte_to_dpcd(struct analogix_dp_device *dp, dp 582 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_BUFFER_DATA_CTL); dp 586 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_AUX_ADDR_7_0); dp 588 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_AUX_ADDR_15_8); dp 590 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_AUX_ADDR_19_16); dp 594 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_BUF_DATA_0); dp 602 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_AUX_CH_CTL_1); dp 605 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c retval = analogix_dp_start_aux_transaction(dp); dp 609 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c dev_dbg(dp->dev, "%s: Aux Transaction fail!\n", __func__); dp 615 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_set_link_bandwidth(struct analogix_dp_device *dp, u32 bwtype) dp 621 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_LINK_BW_SET); dp 624 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_get_link_bandwidth(struct analogix_dp_device *dp, u32 *bwtype) dp 628 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_LINK_BW_SET); dp 632 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_set_lane_count(struct analogix_dp_device *dp, u32 count) dp 637 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_LANE_COUNT_SET); dp 640 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_get_lane_count(struct analogix_dp_device *dp, u32 *count) dp 644 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_LANE_COUNT_SET); dp 648 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_enable_enhanced_mode(struct analogix_dp_device *dp, dp 654 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_SYS_CTL_4); dp 656 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_SYS_CTL_4); dp 658 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_SYS_CTL_4); dp 660 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_SYS_CTL_4); dp 664 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_set_training_pattern(struct analogix_dp_device *dp, dp 672 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_TRAINING_PTN_SET); dp 676 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_TRAINING_PTN_SET); dp 680 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_TRAINING_PTN_SET); dp 684 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_TRAINING_PTN_SET); dp 690 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_TRAINING_PTN_SET); dp 697 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_set_lane0_pre_emphasis(struct analogix_dp_device *dp, dp 702 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_LN0_LINK_TRAINING_CTL); dp 705 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_LN0_LINK_TRAINING_CTL); dp 708 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_set_lane1_pre_emphasis(struct analogix_dp_device *dp, dp 713 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_LN1_LINK_TRAINING_CTL); dp 716 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_LN1_LINK_TRAINING_CTL); dp 719 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_set_lane2_pre_emphasis(struct analogix_dp_device *dp, dp 724 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_LN2_LINK_TRAINING_CTL); dp 727 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_LN2_LINK_TRAINING_CTL); dp 730 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_set_lane3_pre_emphasis(struct analogix_dp_device *dp, dp 735 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_LN3_LINK_TRAINING_CTL); dp 738 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_LN3_LINK_TRAINING_CTL); dp 741 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_set_lane0_link_training(struct analogix_dp_device *dp, dp 747 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_LN0_LINK_TRAINING_CTL); dp 750 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_set_lane1_link_training(struct analogix_dp_device *dp, dp 756 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_LN1_LINK_TRAINING_CTL); dp 759 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_set_lane2_link_training(struct analogix_dp_device *dp, dp 765 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_LN2_LINK_TRAINING_CTL); dp 768 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_set_lane3_link_training(struct analogix_dp_device *dp, dp 774 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_LN3_LINK_TRAINING_CTL); dp 777 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c u32 analogix_dp_get_lane0_link_training(struct analogix_dp_device *dp) dp 779 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c return readl(dp->reg_base + ANALOGIX_DP_LN0_LINK_TRAINING_CTL); dp 782 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c u32 analogix_dp_get_lane1_link_training(struct analogix_dp_device *dp) dp 784 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c return readl(dp->reg_base + ANALOGIX_DP_LN1_LINK_TRAINING_CTL); dp 787 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c u32 analogix_dp_get_lane2_link_training(struct analogix_dp_device *dp) dp 789 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c return readl(dp->reg_base + ANALOGIX_DP_LN2_LINK_TRAINING_CTL); dp 792 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c u32 analogix_dp_get_lane3_link_training(struct analogix_dp_device *dp) dp 794 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c return readl(dp->reg_base + ANALOGIX_DP_LN3_LINK_TRAINING_CTL); dp 797 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_reset_macro(struct analogix_dp_device *dp) dp 801 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_PHY_TEST); dp 803 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_PHY_TEST); dp 809 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_PHY_TEST); dp 812 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_init_video(struct analogix_dp_device *dp) dp 817 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_COMMON_INT_STA_1); dp 820 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_SYS_CTL_1); dp 823 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_SYS_CTL_2); dp 826 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_SYS_CTL_3); dp 829 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_VIDEO_CTL_8); dp 832 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_set_video_color_format(struct analogix_dp_device *dp) dp 837 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = (dp->video_info.dynamic_range << IN_D_RANGE_SHIFT) | dp 838 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c (dp->video_info.color_depth << IN_BPC_SHIFT) | dp 839 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c (dp->video_info.color_space << IN_COLOR_F_SHIFT); dp 840 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_VIDEO_CTL_2); dp 843 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_VIDEO_CTL_3); dp 845 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c if (dp->video_info.ycbcr_coeff) dp 849 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_VIDEO_CTL_3); dp 852 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c int analogix_dp_is_slave_video_stream_clock_on(struct analogix_dp_device *dp) dp 856 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_SYS_CTL_1); dp 857 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_SYS_CTL_1); dp 859 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_SYS_CTL_1); dp 862 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c dev_dbg(dp->dev, "Input stream clock not detected.\n"); dp 866 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_SYS_CTL_2); dp 867 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_SYS_CTL_2); dp 869 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_SYS_CTL_2); dp 870 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c dev_dbg(dp->dev, "wait SYS_CTL_2.\n"); dp 873 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c dev_dbg(dp->dev, "Input stream clk is changing\n"); dp 880 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_set_video_cr_mn(struct analogix_dp_device *dp, dp 887 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_SYS_CTL_4); dp 889 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_SYS_CTL_4); dp 891 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_M_VID_0); dp 893 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_M_VID_1); dp 895 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_M_VID_2); dp 898 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_N_VID_0); dp 900 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_N_VID_1); dp 902 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_N_VID_2); dp 904 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_SYS_CTL_4); dp 906 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_SYS_CTL_4); dp 908 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x00, dp->reg_base + ANALOGIX_DP_N_VID_0); dp 909 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x80, dp->reg_base + ANALOGIX_DP_N_VID_1); dp 910 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x00, dp->reg_base + ANALOGIX_DP_N_VID_2); dp 914 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_set_video_timing_mode(struct analogix_dp_device *dp, u32 type) dp 919 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_VIDEO_CTL_10); dp 921 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_VIDEO_CTL_10); dp 923 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_VIDEO_CTL_10); dp 925 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_VIDEO_CTL_10); dp 929 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_enable_video_master(struct analogix_dp_device *dp, bool enable) dp 934 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_SOC_GENERAL_CTL); dp 937 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_SOC_GENERAL_CTL); dp 939 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_SOC_GENERAL_CTL); dp 942 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_SOC_GENERAL_CTL); dp 946 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_start_video(struct analogix_dp_device *dp) dp 950 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_VIDEO_CTL_1); dp 952 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_VIDEO_CTL_1); dp 955 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c int analogix_dp_is_video_stream_on(struct analogix_dp_device *dp) dp 959 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_SYS_CTL_3); dp 960 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_SYS_CTL_3); dp 962 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_SYS_CTL_3); dp 964 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c dev_dbg(dp->dev, "Input video stream is not detected.\n"); dp 971 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_config_video_slave_mode(struct analogix_dp_device *dp) dp 975 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_FUNC_EN_1); dp 976 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c if (dp->plat_data && is_rockchip(dp->plat_data->dev_type)) { dp 982 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_FUNC_EN_1); dp 984 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_VIDEO_CTL_10); dp 986 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg |= (dp->video_info.interlaced << 2); dp 987 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_VIDEO_CTL_10); dp 989 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_VIDEO_CTL_10); dp 991 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg |= (dp->video_info.v_sync_polarity << 1); dp 992 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_VIDEO_CTL_10); dp 994 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_VIDEO_CTL_10); dp 996 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg |= (dp->video_info.h_sync_polarity << 0); dp 997 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_VIDEO_CTL_10); dp 1000 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_SOC_GENERAL_CTL); dp 1003 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_enable_scrambling(struct analogix_dp_device *dp) dp 1007 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_TRAINING_PTN_SET); dp 1009 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_TRAINING_PTN_SET); dp 1012 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_disable_scrambling(struct analogix_dp_device *dp) dp 1016 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_TRAINING_PTN_SET); dp 1018 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_TRAINING_PTN_SET); dp 1021 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c void analogix_dp_enable_psr_crc(struct analogix_dp_device *dp) dp 1023 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(PSR_VID_CRC_ENABLE, dp->reg_base + ANALOGIX_DP_CRC_CON); dp 1026 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c static ssize_t analogix_dp_get_psr_status(struct analogix_dp_device *dp) dp 1031 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c val = drm_dp_dpcd_readb(&dp->aux, DP_PSR_STATUS, &status); dp 1033 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c dev_err(dp->dev, "PSR_STATUS read failed ret=%zd", val); dp 1039 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c int analogix_dp_send_psr_spd(struct analogix_dp_device *dp, dp 1047 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c val = readl(dp->reg_base + ANALOGIX_DP_PKT_SEND_CTL); dp 1049 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(val, dp->reg_base + ANALOGIX_DP_PKT_SEND_CTL); dp 1053 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c dp->reg_base + ANALOGIX_DP_PSR_FRAME_UPDATE_CTRL); dp 1056 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(vsc->sdp_header.HB0, dp->reg_base + ANALOGIX_DP_SPD_HB0); dp 1057 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(vsc->sdp_header.HB1, dp->reg_base + ANALOGIX_DP_SPD_HB1); dp 1058 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(vsc->sdp_header.HB2, dp->reg_base + ANALOGIX_DP_SPD_HB2); dp 1059 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(vsc->sdp_header.HB3, dp->reg_base + ANALOGIX_DP_SPD_HB3); dp 1062 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x00, dp->reg_base + ANALOGIX_DP_SPD_PB0); dp 1063 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x16, dp->reg_base + ANALOGIX_DP_SPD_PB1); dp 1064 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0xCE, dp->reg_base + ANALOGIX_DP_SPD_PB2); dp 1065 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(0x5D, dp->reg_base + ANALOGIX_DP_SPD_PB3); dp 1068 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(vsc->db[0], dp->reg_base + ANALOGIX_DP_VSC_SHADOW_DB0); dp 1069 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(vsc->db[1], dp->reg_base + ANALOGIX_DP_VSC_SHADOW_DB1); dp 1072 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c val = readl(dp->reg_base + ANALOGIX_DP_VIDEO_CTL_3); dp 1074 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(val, dp->reg_base + ANALOGIX_DP_VIDEO_CTL_3); dp 1077 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c val = readl(dp->reg_base + ANALOGIX_DP_PKT_SEND_CTL); dp 1079 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(val, dp->reg_base + ANALOGIX_DP_PKT_SEND_CTL); dp 1082 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c val = readl(dp->reg_base + ANALOGIX_DP_PKT_SEND_CTL); dp 1084 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(val, dp->reg_base + ANALOGIX_DP_PKT_SEND_CTL); dp 1089 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c ret = readx_poll_timeout(analogix_dp_get_psr_status, dp, psr_status, dp 1095 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c dev_warn(dp->dev, "Failed to apply PSR %d\n", ret); dp 1101 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c ssize_t analogix_dp_transfer(struct analogix_dp_device *dp, dp 1117 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_BUFFER_DATA_CTL); dp 1145 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_AUX_CH_CTL_1); dp 1149 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_AUX_ADDR_7_0); dp 1151 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_AUX_ADDR_15_8); dp 1153 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_AUX_ADDR_19_16); dp 1158 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_BUF_DATA_0 + dp 1171 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(reg, dp->reg_base + ANALOGIX_DP_AUX_CH_CTL_2); dp 1173 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c ret = readx_poll_timeout(readl, dp->reg_base + ANALOGIX_DP_AUX_CH_CTL_2, dp 1176 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c dev_err(dp->dev, "AUX CH enable timeout!\n"); dp 1182 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c ret = readx_poll_timeout(readl, dp->reg_base + ANALOGIX_DP_INT_STA, dp 1185 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c dev_err(dp->dev, "AUX CH cmd reply timeout!\n"); dp 1190 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(RPLY_RECEIV, dp->reg_base + ANALOGIX_DP_INT_STA); dp 1193 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_INT_STA); dp 1194 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c status_reg = readl(dp->reg_base + ANALOGIX_DP_AUX_CH_STA); dp 1196 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c writel(AUX_ERR, dp->reg_base + ANALOGIX_DP_INT_STA); dp 1198 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c dev_warn(dp->dev, "AUX CH error happened: %#x (%d)\n", dp 1205 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_BUF_DATA_0 + dp 1213 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c reg = readl(dp->reg_base + ANALOGIX_DP_AUX_RX_COMM); dp 1229 drivers/gpu/drm/bridge/analogix/analogix_dp_reg.c analogix_dp_init_aux(dp); dp 48 drivers/gpu/drm/exynos/exynos_dp.c struct exynos_dp_device *dp = to_dp(plat_data); dp 49 drivers/gpu/drm/exynos/exynos_dp.c struct drm_encoder *encoder = &dp->encoder; dp 72 drivers/gpu/drm/exynos/exynos_dp.c struct exynos_dp_device *dp = to_dp(plat_data); dp 76 drivers/gpu/drm/exynos/exynos_dp.c if (dp->plat_data.panel) dp 81 drivers/gpu/drm/exynos/exynos_dp.c DRM_DEV_ERROR(dp->dev, dp 86 drivers/gpu/drm/exynos/exynos_dp.c drm_display_mode_from_videomode(&dp->vm, mode); dp 101 drivers/gpu/drm/exynos/exynos_dp.c struct exynos_dp_device *dp = to_dp(plat_data); dp 104 drivers/gpu/drm/exynos/exynos_dp.c dp->connector = connector; dp 107 drivers/gpu/drm/exynos/exynos_dp.c if (dp->ptn_bridge) { dp 108 drivers/gpu/drm/exynos/exynos_dp.c ret = drm_bridge_attach(&dp->encoder, dp->ptn_bridge, bridge); dp 110 drivers/gpu/drm/exynos/exynos_dp.c DRM_DEV_ERROR(dp->dev, dp 141 drivers/gpu/drm/exynos/exynos_dp.c static int exynos_dp_dt_parse_panel(struct exynos_dp_device *dp) dp 145 drivers/gpu/drm/exynos/exynos_dp.c ret = of_get_videomode(dp->dev->of_node, &dp->vm, OF_USE_NATIVE_MODE); dp 147 drivers/gpu/drm/exynos/exynos_dp.c DRM_DEV_ERROR(dp->dev, dp 156 drivers/gpu/drm/exynos/exynos_dp.c struct exynos_dp_device *dp = dev_get_drvdata(dev); dp 157 drivers/gpu/drm/exynos/exynos_dp.c struct drm_encoder *encoder = &dp->encoder; dp 161 drivers/gpu/drm/exynos/exynos_dp.c dp->drm_dev = drm_dev; dp 163 drivers/gpu/drm/exynos/exynos_dp.c if (!dp->plat_data.panel && !dp->ptn_bridge) { dp 164 drivers/gpu/drm/exynos/exynos_dp.c ret = exynos_dp_dt_parse_panel(dp); dp 178 drivers/gpu/drm/exynos/exynos_dp.c dp->plat_data.encoder = encoder; dp 180 drivers/gpu/drm/exynos/exynos_dp.c ret = analogix_dp_bind(dp->adp, dp->drm_dev); dp 182 drivers/gpu/drm/exynos/exynos_dp.c dp->encoder.funcs->destroy(&dp->encoder); dp 190 drivers/gpu/drm/exynos/exynos_dp.c struct exynos_dp_device *dp = dev_get_drvdata(dev); dp 192 drivers/gpu/drm/exynos/exynos_dp.c analogix_dp_unbind(dp->adp); dp 193 drivers/gpu/drm/exynos/exynos_dp.c dp->encoder.funcs->destroy(&dp->encoder); dp 205 drivers/gpu/drm/exynos/exynos_dp.c struct exynos_dp_device *dp; dp 210 drivers/gpu/drm/exynos/exynos_dp.c dp = devm_kzalloc(&pdev->dev, sizeof(struct exynos_dp_device), dp 212 drivers/gpu/drm/exynos/exynos_dp.c if (!dp) dp 215 drivers/gpu/drm/exynos/exynos_dp.c dp->dev = dev; dp 221 drivers/gpu/drm/exynos/exynos_dp.c platform_set_drvdata(pdev, dp); dp 226 drivers/gpu/drm/exynos/exynos_dp.c dp->plat_data.panel = of_drm_find_panel(np); dp 229 drivers/gpu/drm/exynos/exynos_dp.c if (IS_ERR(dp->plat_data.panel)) dp 230 drivers/gpu/drm/exynos/exynos_dp.c return PTR_ERR(dp->plat_data.panel); dp 240 drivers/gpu/drm/exynos/exynos_dp.c dp->plat_data.panel = panel; dp 241 drivers/gpu/drm/exynos/exynos_dp.c dp->plat_data.dev_type = EXYNOS_DP; dp 242 drivers/gpu/drm/exynos/exynos_dp.c dp->plat_data.power_on_start = exynos_dp_poweron; dp 243 drivers/gpu/drm/exynos/exynos_dp.c dp->plat_data.power_off = exynos_dp_poweroff; dp 244 drivers/gpu/drm/exynos/exynos_dp.c dp->plat_data.attach = exynos_dp_bridge_attach; dp 245 drivers/gpu/drm/exynos/exynos_dp.c dp->plat_data.get_modes = exynos_dp_get_modes; dp 246 drivers/gpu/drm/exynos/exynos_dp.c dp->plat_data.skip_connector = !!bridge; dp 248 drivers/gpu/drm/exynos/exynos_dp.c dp->ptn_bridge = bridge; dp 251 drivers/gpu/drm/exynos/exynos_dp.c dp->adp = analogix_dp_probe(dev, &dp->plat_data); dp 252 drivers/gpu/drm/exynos/exynos_dp.c if (IS_ERR(dp->adp)) dp 253 drivers/gpu/drm/exynos/exynos_dp.c return PTR_ERR(dp->adp); dp 260 drivers/gpu/drm/exynos/exynos_dp.c struct exynos_dp_device *dp = platform_get_drvdata(pdev); dp 263 drivers/gpu/drm/exynos/exynos_dp.c analogix_dp_remove(dp->adp); dp 271 drivers/gpu/drm/exynos/exynos_dp.c struct exynos_dp_device *dp = dev_get_drvdata(dev); dp 273 drivers/gpu/drm/exynos/exynos_dp.c return analogix_dp_suspend(dp->adp); dp 278 drivers/gpu/drm/exynos/exynos_dp.c struct exynos_dp_device *dp = dev_get_drvdata(dev); dp 280 drivers/gpu/drm/exynos/exynos_dp.c return analogix_dp_resume(dp->adp); dp 2024 drivers/gpu/drm/i915/display/intel_bios.c u16 dp, hdmi; dp 2053 drivers/gpu/drm/i915/display/intel_bios.c if ((child->dvo_port == port_mapping[port].dp || dp 2104 drivers/gpu/drm/i915/display/intel_bios.c u16 dp, hdmi; dp 2124 drivers/gpu/drm/i915/display/intel_bios.c if (child->dvo_port == port_mapping[port].dp) dp 3355 drivers/gpu/drm/i915/display/intel_ddi.c struct intel_dp *intel_dp = &dig_port->dp; dp 4033 drivers/gpu/drm/i915/display/intel_ddi.c intel_dig_port->dp.output_reg = DDI_BUF_CTL(port); dp 4034 drivers/gpu/drm/i915/display/intel_ddi.c intel_dig_port->dp.prepare_link_retrain = dp 4193 drivers/gpu/drm/i915/display/intel_ddi.c !dig_port->dp.is_mst) dp 4344 drivers/gpu/drm/i915/display/intel_ddi.c intel_dig_port->dp.output_reg = INVALID_MMIO_REG; dp 1222 drivers/gpu/drm/i915/display/intel_display_types.h u32 (*get_aux_clock_divider)(struct intel_dp *dp, int index); dp 1227 drivers/gpu/drm/i915/display/intel_display_types.h u32 (*get_aux_send_ctl)(struct intel_dp *dp, int send_bytes, dp 1230 drivers/gpu/drm/i915/display/intel_display_types.h i915_reg_t (*aux_ch_ctl_reg)(struct intel_dp *dp); dp 1231 drivers/gpu/drm/i915/display/intel_display_types.h i915_reg_t (*aux_ch_data_reg)(struct intel_dp *dp, int index); dp 1257 drivers/gpu/drm/i915/display/intel_display_types.h struct intel_dp dp; dp 1400 drivers/gpu/drm/i915/display/intel_display_types.h return &enc_to_dig_port(encoder)->dp; dp 1426 drivers/gpu/drm/i915/display/intel_display_types.h return container_of(intel_dp, struct intel_digital_port, dp); dp 720 drivers/gpu/drm/i915/display/intel_dp.c #define with_pps_lock(dp, wf) \ dp 721 drivers/gpu/drm/i915/display/intel_dp.c for ((wf) = pps_lock(dp); (wf); (wf) = pps_unlock((dp), (wf))) dp 5542 drivers/gpu/drm/i915/display/intel_dp.c struct intel_dp *intel_dp = &intel_dig_port->dp; dp 5617 drivers/gpu/drm/i915/display/intel_dp.c dpcd_ret = drm_dp_dpcd_write(&intel_dig_port->dp.aux, DP_AUX_HDCP_AN, dp 5657 drivers/gpu/drm/i915/display/intel_dp.c ret = drm_dp_dpcd_read(&intel_dig_port->dp.aux, DP_AUX_HDCP_BKSV, bksv, dp 5675 drivers/gpu/drm/i915/display/intel_dp.c ret = drm_dp_dpcd_read(&intel_dig_port->dp.aux, DP_AUX_HDCP_BINFO, dp 5690 drivers/gpu/drm/i915/display/intel_dp.c ret = drm_dp_dpcd_read(&intel_dig_port->dp.aux, DP_AUX_HDCP_BCAPS, dp 5720 drivers/gpu/drm/i915/display/intel_dp.c ret = drm_dp_dpcd_read(&intel_dig_port->dp.aux, DP_AUX_HDCP_RI_PRIME, dp 5735 drivers/gpu/drm/i915/display/intel_dp.c ret = drm_dp_dpcd_read(&intel_dig_port->dp.aux, DP_AUX_HDCP_BSTATUS, dp 5755 drivers/gpu/drm/i915/display/intel_dp.c ret = drm_dp_dpcd_read(&intel_dig_port->dp.aux, dp 5777 drivers/gpu/drm/i915/display/intel_dp.c ret = drm_dp_dpcd_read(&intel_dig_port->dp.aux, dp 5801 drivers/gpu/drm/i915/display/intel_dp.c ret = drm_dp_dpcd_read(&intel_dig_port->dp.aux, DP_AUX_HDCP_BSTATUS, dp 5881 drivers/gpu/drm/i915/display/intel_dp.c ret = drm_dp_dpcd_read(&intel_dig_port->dp.aux, dp 5929 drivers/gpu/drm/i915/display/intel_dp.c struct intel_dp *dp = &intel_dig_port->dp; dp 5930 drivers/gpu/drm/i915/display/intel_dp.c struct intel_hdcp *hdcp = &dp->attached_connector->hdcp; dp 5981 drivers/gpu/drm/i915/display/intel_dp.c struct intel_dp *dp = &intel_dig_port->dp; dp 5982 drivers/gpu/drm/i915/display/intel_dp.c struct intel_hdcp *hdcp = &dp->attached_connector->hdcp; dp 6004 drivers/gpu/drm/i915/display/intel_dp.c ret = drm_dp_dpcd_write(&intel_dig_port->dp.aux, dp 6024 drivers/gpu/drm/i915/display/intel_dp.c ret = drm_dp_dpcd_read(&intel_dig_port->dp.aux, dp 6077 drivers/gpu/drm/i915/display/intel_dp.c ret = drm_dp_dpcd_read(&intel_dig_port->dp.aux, offset, dp 6145 drivers/gpu/drm/i915/display/intel_dp.c ret = drm_dp_dpcd_read(&intel_dig_port->dp.aux, dp 6274 drivers/gpu/drm/i915/display/intel_dp.c struct intel_dp *intel_dp = &intel_dig_port->dp; dp 6652 drivers/gpu/drm/i915/display/intel_dp.c struct intel_dp *intel_dp = dev_priv->drrs.dp; dp 6750 drivers/gpu/drm/i915/display/intel_dp.c if (dev_priv->drrs.dp) { dp 6757 drivers/gpu/drm/i915/display/intel_dp.c dev_priv->drrs.dp = intel_dp; dp 6778 drivers/gpu/drm/i915/display/intel_dp.c if (!dev_priv->drrs.dp) { dp 6787 drivers/gpu/drm/i915/display/intel_dp.c dev_priv->drrs.dp = NULL; dp 6801 drivers/gpu/drm/i915/display/intel_dp.c intel_dp = dev_priv->drrs.dp; dp 6847 drivers/gpu/drm/i915/display/intel_dp.c if (!dev_priv->drrs.dp) { dp 6852 drivers/gpu/drm/i915/display/intel_dp.c crtc = dp_to_dig_port(dev_priv->drrs.dp)->base.base.crtc; dp 6861 drivers/gpu/drm/i915/display/intel_dp.c dev_priv->drrs.dp->attached_connector->panel.fixed_mode->vrefresh); dp 6890 drivers/gpu/drm/i915/display/intel_dp.c if (!dev_priv->drrs.dp) { dp 6895 drivers/gpu/drm/i915/display/intel_dp.c crtc = dp_to_dig_port(dev_priv->drrs.dp)->base.base.crtc; dp 6904 drivers/gpu/drm/i915/display/intel_dp.c dev_priv->drrs.dp->attached_connector->panel.fixed_mode->vrefresh); dp 7140 drivers/gpu/drm/i915/display/intel_dp.c struct intel_dp *intel_dp = &intel_dig_port->dp; dp 7306 drivers/gpu/drm/i915/display/intel_dp.c intel_dig_port->dp.output_reg = output_reg; dp 47 drivers/gpu/drm/i915/display/intel_dp_mst.c struct intel_dp *intel_dp = &intel_mst->primary->dp; dp 96 drivers/gpu/drm/i915/display/intel_dp_mst.c struct intel_dp *intel_dp = &intel_mst->primary->dp; dp 194 drivers/gpu/drm/i915/display/intel_dp_mst.c mgr = &enc_to_mst(old_conn_state->best_encoder)->primary->dp.mst_mgr; dp 207 drivers/gpu/drm/i915/display/intel_dp_mst.c struct intel_dp *intel_dp = &intel_dig_port->dp; dp 231 drivers/gpu/drm/i915/display/intel_dp_mst.c struct intel_dp *intel_dp = &intel_dig_port->dp; dp 269 drivers/gpu/drm/i915/display/intel_dp_mst.c struct intel_dp *intel_dp = &intel_dig_port->dp; dp 282 drivers/gpu/drm/i915/display/intel_dp_mst.c struct intel_dp *intel_dp = &intel_dig_port->dp; dp 296 drivers/gpu/drm/i915/display/intel_dp_mst.c struct intel_dp *intel_dp = &intel_dig_port->dp; dp 343 drivers/gpu/drm/i915/display/intel_dp_mst.c struct intel_dp *intel_dp = &intel_dig_port->dp; dp 638 drivers/gpu/drm/i915/display/intel_dp_mst.c struct intel_dp *intel_dp = &intel_dig_port->dp; dp 650 drivers/gpu/drm/i915/display/intel_dp_mst.c return intel_dig_port->dp.active_mst_links; dp 657 drivers/gpu/drm/i915/display/intel_dp_mst.c struct intel_dp *intel_dp = &intel_dig_port->dp; dp 687 drivers/gpu/drm/i915/display/intel_dp_mst.c struct intel_dp *intel_dp = &intel_dig_port->dp; dp 3235 drivers/gpu/drm/i915/display/intel_hdmi.c intel_dig_port->dp.output_reg = INVALID_MMIO_REG; dp 55 drivers/gpu/drm/i915/display/intel_lspcon.c return &dig_port->dp; dp 75 drivers/gpu/drm/i915/display/intel_lspcon.c struct intel_dp *dp = lspcon_to_intel_dp(lspcon); dp 79 drivers/gpu/drm/i915/display/intel_lspcon.c if (drm_dp_read_desc(&dp->aux, &dp->desc, drm_dp_is_branch(dp->dpcd))) { dp 84 drivers/gpu/drm/i915/display/intel_lspcon.c ident = &dp->desc.ident; dp 555 drivers/gpu/drm/i915/display/intel_lspcon.c struct intel_dp *dp = &intel_dig_port->dp; dp 559 drivers/gpu/drm/i915/display/intel_lspcon.c struct drm_connector *connector = &dp->attached_connector->base; dp 574 drivers/gpu/drm/i915/display/intel_lspcon.c if (!intel_dp_read_dpcd(dp)) { dp 308 drivers/gpu/drm/i915/display/intel_psr.c WARN_ON(dev_priv->psr.dp); dp 309 drivers/gpu/drm/i915/display/intel_psr.c dev_priv->psr.dp = intel_dp; dp 604 drivers/gpu/drm/i915/display/intel_psr.c if (intel_dp != dev_priv->psr.dp) dp 729 drivers/gpu/drm/i915/display/intel_psr.c struct intel_dp *intel_dp = dev_priv->psr.dp; dp 765 drivers/gpu/drm/i915/display/intel_psr.c WARN_ON(dev_priv->drrs.dp); dp 901 drivers/gpu/drm/i915/display/intel_psr.c if (!CAN_PSR(dev_priv) || READ_ONCE(psr->dp) != intel_dp) dp 1084 drivers/gpu/drm/i915/display/intel_psr.c intel_psr_disable_locked(psr->dp); dp 1087 drivers/gpu/drm/i915/display/intel_psr.c drm_dp_dpcd_writeb(&psr->dp->aux, DP_SET_POWER, DP_SET_POWER_D0); dp 1120 drivers/gpu/drm/i915/display/intel_psr.c intel_psr_activate(dev_priv->psr.dp); dp 1267 drivers/gpu/drm/i915/display/intel_psr.c if (!psr->enabled || psr->dp != intel_dp) dp 1315 drivers/gpu/drm/i915/display/intel_psr.c ret = (dev_priv->psr.dp == intel_dp && dev_priv->psr.enabled); dp 421 drivers/gpu/drm/i915/display/intel_tc.c if (dig_port->dp.is_mst) dp 891 drivers/gpu/drm/i915/gvt/handlers.c #define _REG_HSW_DP_AUX_CH_CTL(dp) \ dp 892 drivers/gpu/drm/i915/gvt/handlers.c ((dp) ? (_PCH_DPB_AUX_CH_CTL + ((dp)-1)*0x100) : 0x64010) dp 894 drivers/gpu/drm/i915/gvt/handlers.c #define _REG_SKL_DP_AUX_CH_CTL(dp) (0x64010 + (dp) * 0x100) dp 2175 drivers/gpu/drm/i915/i915_debugfs.c if (psr->dp) dp 2176 drivers/gpu/drm/i915/i915_debugfs.c seq_printf(m, " [0x%02x]", psr->dp->psr_dpcd[0]); dp 2532 drivers/gpu/drm/i915/i915_debugfs.c struct intel_dp *intel_dp = &intel_dig_port->dp; dp 3030 drivers/gpu/drm/i915/i915_debugfs.c if (!drrs->dp) { dp 3039 drivers/gpu/drm/i915/i915_debugfs.c panel = &drrs->dp->attached_connector->panel; dp 3110 drivers/gpu/drm/i915/i915_debugfs.c if (!intel_dig_port->dp.can_mst) dp 3115 drivers/gpu/drm/i915/i915_debugfs.c drm_dp_mst_dump_topology(m, &intel_dig_port->dp.mst_mgr); dp 461 drivers/gpu/drm/i915/i915_drv.h struct intel_dp *dp; dp 480 drivers/gpu/drm/i915/i915_drv.h struct intel_dp *dp; dp 364 drivers/gpu/drm/imx/ipuv3-crtc.c int dp = -EINVAL; dp 374 drivers/gpu/drm/imx/ipuv3-crtc.c if (pdata->dp >= 0) dp 375 drivers/gpu/drm/imx/ipuv3-crtc.c dp = IPU_DP_FLOW_SYNC_BG; dp 376 drivers/gpu/drm/imx/ipuv3-crtc.c ipu_crtc->plane[0] = ipu_plane_init(drm, ipu, pdata->dma[0], dp, 0, dp 396 drivers/gpu/drm/imx/ipuv3-crtc.c if (pdata->dp >= 0 && pdata->dma[1] > 0) { dp 147 drivers/gpu/drm/imx/ipuv3-plane.c if (!IS_ERR_OR_NULL(ipu_plane->dp)) dp 148 drivers/gpu/drm/imx/ipuv3-plane.c ipu_dp_put(ipu_plane->dp); dp 188 drivers/gpu/drm/imx/ipuv3-plane.c ipu_plane->dp = ipu_dp_get(ipu_plane->ipu, ipu_plane->dp_flow); dp 189 drivers/gpu/drm/imx/ipuv3-plane.c if (IS_ERR(ipu_plane->dp)) { dp 190 drivers/gpu/drm/imx/ipuv3-plane.c ret = PTR_ERR(ipu_plane->dp); dp 220 drivers/gpu/drm/imx/ipuv3-plane.c if (ipu_plane->dp) dp 226 drivers/gpu/drm/imx/ipuv3-plane.c if (ipu_plane->dp) dp 227 drivers/gpu/drm/imx/ipuv3-plane.c ipu_dp_enable_channel(ipu_plane->dp); dp 242 drivers/gpu/drm/imx/ipuv3-plane.c if (ipu_plane->dp && disable_dp_channel) dp 243 drivers/gpu/drm/imx/ipuv3-plane.c ipu_dp_disable_channel(ipu_plane->dp, false); dp 248 drivers/gpu/drm/imx/ipuv3-plane.c if (ipu_plane->dp) dp 504 drivers/gpu/drm/imx/ipuv3-plane.c if (ipu_plane->dp) dp 505 drivers/gpu/drm/imx/ipuv3-plane.c ipu_dp_disable_channel(ipu_plane->dp, true); dp 563 drivers/gpu/drm/imx/ipuv3-plane.c ipu_dp_set_window_pos(ipu_plane->dp, dst->x1, dst->y1); dp 568 drivers/gpu/drm/imx/ipuv3-plane.c ipu_dp_set_global_alpha(ipu_plane->dp, dp 572 drivers/gpu/drm/imx/ipuv3-plane.c ipu_dp_set_global_alpha(ipu_plane->dp, true, 0, true); dp 577 drivers/gpu/drm/imx/ipuv3-plane.c ipu_dp_set_global_alpha(ipu_plane->dp, dp 618 drivers/gpu/drm/imx/ipuv3-plane.c ipu_dp_setup_channel(ipu_plane->dp, ics, IPUV3_COLORSPACE_RGB); dp 621 drivers/gpu/drm/imx/ipuv3-plane.c ipu_dp_setup_channel(ipu_plane->dp, ics, dp 826 drivers/gpu/drm/imx/ipuv3-plane.c int dma, int dp, unsigned int possible_crtcs, dp 835 drivers/gpu/drm/imx/ipuv3-plane.c dma, dp, possible_crtcs); dp 845 drivers/gpu/drm/imx/ipuv3-plane.c ipu_plane->dp_flow = dp; dp 862 drivers/gpu/drm/imx/ipuv3-plane.c if (dp == IPU_DP_FLOW_SYNC_BG || dp == IPU_DP_FLOW_SYNC_FG) dp 24 drivers/gpu/drm/imx/ipuv3-plane.h struct ipu_dp *dp; dp 33 drivers/gpu/drm/imx/ipuv3-plane.h int dma, int dp, unsigned int possible_crtcs, dp 83 drivers/gpu/drm/msm/edp/edp_aux.c u8 *dp; dp 90 drivers/gpu/drm/msm/edp/edp_aux.c dp = msg->buffer; dp 96 drivers/gpu/drm/msm/edp/edp_aux.c dp[i] = (u8)((data >> 8) & 0xff); dp 124 drivers/gpu/drm/nouveau/dispnv50/atom.h } dp; dp 809 drivers/gpu/drm/nouveau/dispnv50/disp.c asyh->dp.pbn = drm_dp_calc_pbn_mode(clock, asyh->or.bpc * 3); dp 813 drivers/gpu/drm/nouveau/dispnv50/disp.c asyh->dp.pbn); dp 817 drivers/gpu/drm/nouveau/dispnv50/disp.c asyh->dp.tu = slots; dp 859 drivers/gpu/drm/nouveau/dispnv50/disp.c r = drm_dp_mst_allocate_vcpi(&mstm->mgr, mstc->port, armh->dp.pbn, dp 860 drivers/gpu/drm/nouveau/dispnv50/disp.c armh->dp.tu); dp 1562 drivers/gpu/drm/nouveau/dispnv50/disp.c nv50_mstm_del(&nv_encoder->dp.mstm); dp 1631 drivers/gpu/drm/nouveau/dispnv50/disp.c &nv_encoder->dp.mstm); dp 1787 drivers/gpu/drm/nouveau/dispnv50/disp.c mstm = nouveau_encoder(encoder)->dp.mstm; dp 1801 drivers/gpu/drm/nouveau/dispnv50/disp.c mstm = nouveau_encoder(encoder)->dp.mstm; dp 2268 drivers/gpu/drm/nouveau/dispnv50/disp.c nv50_mstm_fini(nv_encoder->dp.mstm); dp 2286 drivers/gpu/drm/nouveau/dispnv50/disp.c nv50_mstm_init(nv_encoder->dp.mstm); dp 436 drivers/gpu/drm/nouveau/dispnv50/head.c asyh->dp = armh->dp; dp 38 drivers/gpu/drm/nouveau/include/nvkm/subdev/bios/conn.h u8 dp; dp 1064 drivers/gpu/drm/nouveau/nouveau_connector.c max_clock = nv_encoder->dp.link_nr; dp 1065 drivers/gpu/drm/nouveau/nouveau_connector.c max_clock *= nv_encoder->dp.link_bw; dp 1154 drivers/gpu/drm/nouveau/nouveau_connector.c nv50_mstm_service(nv_encoder->dp.mstm); dp 1183 drivers/gpu/drm/nouveau/nouveau_connector.c nv50_mstm_remove(nv_encoder->dp.mstm); dp 75 drivers/gpu/drm/nouveau/nouveau_dp.c nv_encoder->dp.link_bw = 27000 * dpcd[1]; dp 76 drivers/gpu/drm/nouveau/nouveau_dp.c nv_encoder->dp.link_nr = dpcd[2] & DP_MAX_LANE_COUNT_MASK; dp 79 drivers/gpu/drm/nouveau/nouveau_dp.c nv_encoder->dp.link_nr, nv_encoder->dp.link_bw, dpcd[0]); dp 84 drivers/gpu/drm/nouveau/nouveau_dp.c if (nv_encoder->dcb->dpconf.link_nr < nv_encoder->dp.link_nr) dp 85 drivers/gpu/drm/nouveau/nouveau_dp.c nv_encoder->dp.link_nr = nv_encoder->dcb->dpconf.link_nr; dp 86 drivers/gpu/drm/nouveau/nouveau_dp.c if (nv_encoder->dcb->dpconf.link_bw < nv_encoder->dp.link_bw) dp 87 drivers/gpu/drm/nouveau/nouveau_dp.c nv_encoder->dp.link_bw = nv_encoder->dcb->dpconf.link_bw; dp 90 drivers/gpu/drm/nouveau/nouveau_dp.c nv_encoder->dp.link_nr, nv_encoder->dp.link_bw); dp 94 drivers/gpu/drm/nouveau/nouveau_dp.c ret = nv50_mstm_detect(nv_encoder->dp.mstm, dpcd, nouveau_mst); dp 66 drivers/gpu/drm/nouveau/nouveau_encoder.h } dp; dp 91 drivers/gpu/drm/nouveau/nvkm/engine/disp/conn.c info->type, info->location, info->hpd, info->dp, dp 37 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c struct nvkm_dp *dp; dp 48 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c struct nvkm_dp *dp = lt->dp; dp 51 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c if (dp->dpcd[DPCD_RC0E_AUX_RD_INTERVAL]) dp 52 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c mdelay(dp->dpcd[DPCD_RC0E_AUX_RD_INTERVAL] * 4); dp 56 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c ret = nvkm_rdaux(dp->aux, DPCD_LS02, lt->stat, 6); dp 61 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c ret = nvkm_rdaux(dp->aux, DPCD_LS0C, <->pc2stat, 1); dp 64 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c OUTP_TRACE(&dp->outp, "status %6ph pc2 %02x", dp 67 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c OUTP_TRACE(&dp->outp, "status %6ph", lt->stat); dp 76 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c struct nvkm_dp *dp = lt->dp; dp 77 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c struct nvkm_ior *ior = dp->outp.ior; dp 85 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c for (i = 0; i < ior->dp.nr; i++) { dp 107 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c OUTP_TRACE(&dp->outp, "config lane %d %02x %02x", dp 110 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c data = nvbios_dpout_match(bios, dp->outp.info.hasht, dp 111 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c dp->outp.info.hashm, dp 122 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c ior->func->dp.drive(ior, i, ocfg.pc, ocfg.dc, dp 126 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c ret = nvkm_wraux(dp->aux, DPCD_LC03(0), lt->conf, 4); dp 131 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c ret = nvkm_wraux(dp->aux, DPCD_LC0F, lt->pc2conf, 2); dp 142 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c struct nvkm_dp *dp = lt->dp; dp 145 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c OUTP_TRACE(&dp->outp, "training pattern %d", pattern); dp 146 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c dp->outp.ior->func->dp.pattern(dp->outp.ior, pattern); dp 148 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c nvkm_rdaux(dp->aux, DPCD_LC02, &sink_tp, 1); dp 151 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c nvkm_wraux(dp->aux, DPCD_LC02, &sink_tp, 1); dp 160 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c if (lt->dp->dpcd[DPCD_RC02] & DPCD_RC02_TPS3_SUPPORTED) dp 172 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c for (i = 0; i < lt->dp->outp.ior->dp.nr && eq_done; i++) { dp 200 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c for (i = 0; i < lt->dp->outp.ior->dp.nr; i++) { dp 220 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c nvkm_dp_train_links(struct nvkm_dp *dp) dp 222 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c struct nvkm_ior *ior = dp->outp.ior; dp 223 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c struct nvkm_disp *disp = dp->outp.disp; dp 227 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c .dp = dp, dp 233 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c OUTP_DBG(&dp->outp, "training %d x %d MB/s", dp 234 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c ior->dp.nr, ior->dp.bw * 27); dp 238 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c dp->dpcd[DPCD_RC02] &= ~DPCD_RC02_TPS3_SUPPORTED; dp 239 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c lt.pc2 = dp->dpcd[DPCD_RC02] & DPCD_RC02_TPS3_SUPPORTED; dp 242 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c if ((lnkcmp = lt.dp->info.lnkcmp)) { dp 243 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c if (dp->version < 0x30) { dp 244 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c while ((ior->dp.bw * 2700) < nvbios_rd16(bios, lnkcmp)) dp 248 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c while (ior->dp.bw < nvbios_rd08(bios, lnkcmp)) dp 254 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c init.outp = &dp->outp.info; dp 260 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c ret = ior->func->dp.links(ior, dp->aux); dp 263 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c OUTP_ERR(&dp->outp, "train failed with %d", ret); dp 269 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c ior->func->dp.power(ior, ior->dp.nr); dp 272 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c sink[0] = ior->dp.bw; dp 273 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c sink[1] = ior->dp.nr; dp 274 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c if (ior->dp.ef) dp 277 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c ret = nvkm_wraux(dp->aux, DPCD_LC00_LINK_BW_SET, sink, 2); dp 291 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c nvkm_dp_train_fini(struct nvkm_dp *dp) dp 294 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c nvbios_init(&dp->outp.disp->engine.subdev, dp->info.script[1], dp 295 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c init.outp = &dp->outp.info; dp 296 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c init.or = dp->outp.ior->id; dp 297 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c init.link = dp->outp.ior->asy.link; dp 302 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c nvkm_dp_train_init(struct nvkm_dp *dp) dp 305 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c if (dp->dpcd[DPCD_RC03] & DPCD_RC03_MAX_DOWNSPREAD) { dp 306 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c nvbios_init(&dp->outp.disp->engine.subdev, dp->info.script[2], dp 307 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c init.outp = &dp->outp.info; dp 308 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c init.or = dp->outp.ior->id; dp 309 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c init.link = dp->outp.ior->asy.link; dp 312 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c nvbios_init(&dp->outp.disp->engine.subdev, dp->info.script[3], dp 313 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c init.outp = &dp->outp.info; dp 314 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c init.or = dp->outp.ior->id; dp 315 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c init.link = dp->outp.ior->asy.link; dp 320 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c nvbios_init(&dp->outp.disp->engine.subdev, dp->info.script[0], dp 321 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c init.outp = &dp->outp.info; dp 322 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c init.or = dp->outp.ior->id; dp 323 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c init.link = dp->outp.ior->asy.link; dp 345 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c nvkm_dp_train(struct nvkm_dp *dp, u32 dataKBps) dp 347 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c struct nvkm_ior *ior = dp->outp.ior; dp 348 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c const u8 sink_nr = dp->dpcd[DPCD_RC02] & DPCD_RC02_MAX_LANE_COUNT; dp 349 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c const u8 sink_bw = dp->dpcd[DPCD_RC01_MAX_LINK_RATE]; dp 350 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c const u8 outp_nr = dp->outp.info.dpconf.link_nr; dp 351 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c const u8 outp_bw = dp->outp.info.dpconf.link_bw; dp 385 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c if (!nvkm_rdaux(dp->aux, DPCD_SC00, &pwr, 1)) { dp 389 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c nvkm_wraux(dp->aux, DPCD_SC00, &pwr, 1); dp 394 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c OUTP_DBG(&dp->outp, "training (min: %d x %d MB/s)", dp 396 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c nvkm_dp_train_init(dp); dp 403 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c OUTP_ERR(&dp->outp, "link rate unsupported by sink"); dp 405 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c ior->dp.mst = dp->lt.mst; dp 406 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c ior->dp.ef = dp->dpcd[DPCD_RC02] & DPCD_RC02_ENHANCED_FRAME_CAP; dp 407 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c ior->dp.bw = cfg->bw; dp 408 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c ior->dp.nr = cfg->nr; dp 411 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c ret = nvkm_dp_train_links(dp); dp 413 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c nvkm_dp_train_fini(dp); dp 415 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c OUTP_ERR(&dp->outp, "training failed"); dp 417 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c OUTP_DBG(&dp->outp, "training done"); dp 418 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c atomic_set(&dp->lt.done, 1); dp 425 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c struct nvkm_dp *dp = nvkm_dp(outp); dp 428 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c nvbios_init(&ior->disp->engine.subdev, dp->info.script[4], dp 429 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c init.outp = &dp->outp.info; dp 438 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c struct nvkm_dp *dp = nvkm_dp(outp); dp 441 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c atomic_set(&dp->lt.done, 0); dp 442 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c dp->outp.ior->dp.nr = 0; dp 448 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c struct nvkm_dp *dp = nvkm_dp(outp); dp 449 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c struct nvkm_ior *ior = dp->outp.ior; dp 458 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c mutex_lock(&dp->mutex); dp 468 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c linkKBps = ior->dp.bw * 27000 * ior->dp.nr; dp 470 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c OUTP_DBG(&dp->outp, "data %d KB/s link %d KB/s mst %d->%d", dp 471 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c dataKBps, linkKBps, ior->dp.mst, dp->lt.mst); dp 472 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c if (linkKBps < dataKBps || ior->dp.mst != dp->lt.mst) { dp 473 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c OUTP_DBG(&dp->outp, "link requirements changed"); dp 478 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c ret = nvkm_rdaux(dp->aux, DPCD_LS02, stat, 3); dp 480 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c OUTP_DBG(&dp->outp, dp 486 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c for (i = 0; i < ior->dp.nr; i++) { dp 491 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c OUTP_DBG(&dp->outp, dp 498 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c OUTP_DBG(&dp->outp, "no inter-lane alignment"); dp 502 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c if (retrain || !atomic_read(&dp->lt.done)) dp 503 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c ret = nvkm_dp_train(dp, dataKBps); dp 504 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c mutex_unlock(&dp->mutex); dp 509 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c nvkm_dp_enable(struct nvkm_dp *dp, bool enable) dp 511 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c struct nvkm_i2c_aux *aux = dp->aux; dp 514 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c if (!dp->present) { dp 515 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c OUTP_DBG(&dp->outp, "aux power -> always"); dp 517 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c dp->present = true; dp 520 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c if (!nvkm_rdaux(aux, DPCD_RC00_DPCD_REV, dp->dpcd, dp 521 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c sizeof(dp->dpcd))) dp 525 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c if (dp->present) { dp 526 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c OUTP_DBG(&dp->outp, "aux power -> demand"); dp 528 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c dp->present = false; dp 531 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c atomic_set(&dp->lt.done, 0); dp 539 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c struct nvkm_dp *dp = container_of(notify, typeof(*dp), hpd); dp 540 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c struct nvkm_conn *conn = dp->outp.conn; dp 541 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c struct nvkm_disp *disp = dp->outp.disp; dp 544 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c OUTP_DBG(&dp->outp, "HPD: %d", line->mask); dp 546 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c if (atomic_read(&dp->lt.done)) dp 547 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c dp->outp.func->acquire(&dp->outp); dp 550 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c nvkm_dp_enable(dp, true); dp 565 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c struct nvkm_dp *dp = nvkm_dp(outp); dp 566 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c nvkm_notify_put(&dp->hpd); dp 567 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c nvkm_dp_enable(dp, false); dp 574 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c struct nvkm_dp *dp = nvkm_dp(outp); dp 576 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c nvkm_notify_put(&dp->outp.conn->hpd); dp 582 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c if (dp->outp.conn->info.type == DCB_CONNECTOR_eDP) { dp 599 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c if (!nvkm_dp_enable(dp, true) && power == 0) dp 602 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c nvkm_dp_enable(dp, true); dp 605 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c nvkm_notify_get(&dp->hpd); dp 611 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c struct nvkm_dp *dp = nvkm_dp(outp); dp 612 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c nvkm_notify_fini(&dp->hpd); dp 613 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c return dp; dp 628 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c struct nvkm_i2c_aux *aux, struct nvkm_dp *dp) dp 637 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c ret = nvkm_outp_ctor(&nvkm_dp_func, disp, index, dcbE, &dp->outp); dp 641 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c dp->aux = aux; dp 642 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c if (!dp->aux) { dp 643 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c OUTP_ERR(&dp->outp, "no aux"); dp 648 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c data = nvbios_dpout_match(bios, dp->outp.info.hasht, dp 649 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c dp->outp.info.hashm, &dp->version, dp 650 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c &hdr, &cnt, &len, &dp->info); dp 652 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c OUTP_ERR(&dp->outp, "no bios dp data"); dp 656 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c OUTP_DBG(&dp->outp, "bios dp %02x %02x %02x %02x", dp 657 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c dp->version, hdr, cnt, len); dp 664 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c .port = dp->aux->id, dp 668 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c &dp->hpd); dp 670 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c OUTP_ERR(&dp->outp, "error monitoring aux hpd: %d", ret); dp 674 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c mutex_init(&dp->mutex); dp 675 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c atomic_set(&dp->lt.done, 0); dp 685 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c struct nvkm_dp *dp; dp 692 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c if (!(dp = kzalloc(sizeof(*dp), GFP_KERNEL))) dp 694 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c *poutp = &dp->outp; dp 696 drivers/gpu/drm/nouveau/nvkm/engine/disp/dp.c return nvkm_dp_ctor(disp, index, dcbE, aux, dp); dp 43 drivers/gpu/drm/nouveau/nvkm/engine/disp/ior.h } dp; dp 86 drivers/gpu/drm/nouveau/nvkm/engine/disp/ior.h } dp; dp 329 drivers/gpu/drm/nouveau/nvkm/engine/disp/nv50.c const u32 linkKBps = ior->dp.bw * 27000; dp 341 drivers/gpu/drm/nouveau/nvkm/engine/disp/nv50.c h = h - (3 * ior->dp.ef) - (12 / ior->dp.nr); dp 347 drivers/gpu/drm/nouveau/nvkm/engine/disp/nv50.c v = v - ((36 / ior->dp.nr) + 3) - 1; dp 349 drivers/gpu/drm/nouveau/nvkm/engine/disp/nv50.c ior->func->dp.audio_sym(ior, head->id, h, v); dp 352 drivers/gpu/drm/nouveau/nvkm/engine/disp/nv50.c link_data_rate = (khz * head->asy.or.depth / 8) / ior->dp.nr; dp 358 drivers/gpu/drm/nouveau/nvkm/engine/disp/nv50.c for (TU = 64; ior->func->dp.activesym && TU >= 32; TU--) { dp 409 drivers/gpu/drm/nouveau/nvkm/engine/disp/nv50.c if (ior->func->dp.activesym) { dp 414 drivers/gpu/drm/nouveau/nvkm/engine/disp/nv50.c ior->func->dp.activesym(ior, head->id, bestTU, dp 427 drivers/gpu/drm/nouveau/nvkm/engine/disp/nv50.c ior->func->dp.watermark(ior, head->id, unk); dp 41 drivers/gpu/drm/nouveau/nvkm/engine/disp/piornv50.c int ret = nvkm_i2c_aux_lnk_ctl(aux, pior->dp.nr, pior->dp.bw, dp 42 drivers/gpu/drm/nouveau/nvkm/engine/disp/piornv50.c pior->dp.ef); dp 122 drivers/gpu/drm/nouveau/nvkm/engine/disp/piornv50.c .dp = { dp 156 drivers/gpu/drm/nouveau/nvkm/engine/disp/rootnv50.c ior->func->dp.audio(ior, hidx, true); dp 161 drivers/gpu/drm/nouveau/nvkm/engine/disp/rootnv50.c ior->func->dp.audio(ior, hidx, false); dp 231 drivers/gpu/drm/nouveau/nvkm/engine/disp/rootnv50.c struct nvkm_dp *dp = nvkm_dp(outp); dp 240 drivers/gpu/drm/nouveau/nvkm/engine/disp/rootnv50.c dp->lt.mst = !!args->v0.state; dp 258 drivers/gpu/drm/nouveau/nvkm/engine/disp/rootnv50.c if (!outp->ior->func->dp.vcpi) dp 260 drivers/gpu/drm/nouveau/nvkm/engine/disp/rootnv50.c outp->ior->func->dp.vcpi(outp->ior, hidx, dp 62 drivers/gpu/drm/nouveau/nvkm/engine/disp/sorg94.c const u32 shift = sor->func->dp.lanes[ln] * 8; dp 92 drivers/gpu/drm/nouveau/nvkm/engine/disp/sorg94.c mask |= 1 << sor->func->dp.lanes[i]; dp 111 drivers/gpu/drm/nouveau/nvkm/engine/disp/sorg94.c dpctrl |= ((1 << sor->dp.nr) - 1) << 16; dp 112 drivers/gpu/drm/nouveau/nvkm/engine/disp/sorg94.c if (sor->dp.ef) dp 114 drivers/gpu/drm/nouveau/nvkm/engine/disp/sorg94.c if (sor->dp.bw > 0x06) dp 267 drivers/gpu/drm/nouveau/nvkm/engine/disp/sorg94.c .dp = { dp 75 drivers/gpu/drm/nouveau/nvkm/engine/disp/sorgf119.c const u32 shift = sor->func->dp.lanes[ln] * 8; dp 107 drivers/gpu/drm/nouveau/nvkm/engine/disp/sorgf119.c clksor |= sor->dp.bw << 18; dp 108 drivers/gpu/drm/nouveau/nvkm/engine/disp/sorgf119.c dpctrl |= ((1 << sor->dp.nr) - 1) << 16; dp 109 drivers/gpu/drm/nouveau/nvkm/engine/disp/sorgf119.c if (sor->dp.mst) dp 111 drivers/gpu/drm/nouveau/nvkm/engine/disp/sorgf119.c if (sor->dp.ef) dp 166 drivers/gpu/drm/nouveau/nvkm/engine/disp/sorgf119.c .dp = { dp 32 drivers/gpu/drm/nouveau/nvkm/engine/disp/sorgk104.c .dp = { dp 46 drivers/gpu/drm/nouveau/nvkm/engine/disp/sorgm107.c .dp = { dp 31 drivers/gpu/drm/nouveau/nvkm/engine/disp/sorgm200.c const u32 shift = sor->func->dp.lanes[ln] * 8; dp 104 drivers/gpu/drm/nouveau/nvkm/engine/disp/sorgm200.c .dp = { dp 48 drivers/gpu/drm/nouveau/nvkm/engine/disp/sorgt215.c .dp = { dp 93 drivers/gpu/drm/nouveau/nvkm/engine/disp/sorgv100.c .dp = { dp 32 drivers/gpu/drm/nouveau/nvkm/engine/disp/sormcp77.c .dp = { dp 32 drivers/gpu/drm/nouveau/nvkm/engine/disp/sormcp89.c .dp = { dp 46 drivers/gpu/drm/nouveau/nvkm/engine/disp/sortu102.c clksor |= sor->dp.bw << 18; dp 47 drivers/gpu/drm/nouveau/nvkm/engine/disp/sortu102.c dpctrl |= ((1 << sor->dp.nr) - 1) << 16; dp 48 drivers/gpu/drm/nouveau/nvkm/engine/disp/sortu102.c if (sor->dp.mst) dp 50 drivers/gpu/drm/nouveau/nvkm/engine/disp/sortu102.c if (sor->dp.ef) dp 77 drivers/gpu/drm/nouveau/nvkm/engine/disp/sortu102.c .dp = { dp 83 drivers/gpu/drm/nouveau/nvkm/subdev/bios/conn.c info->dp = (nvbios_rd08(bios, data + 0x01) & 0xc0) >> 6; dp 87 drivers/gpu/drm/nouveau/nvkm/subdev/bios/conn.c info->dp |= nvbios_rd08(bios, data + 0x02) & 0x0c; dp 106 drivers/gpu/drm/radeon/radeon_clocks.c struct device_node *dp = rdev->pdev->dev.of_node; dp 113 drivers/gpu/drm/radeon/radeon_clocks.c if (dp == NULL) dp 115 drivers/gpu/drm/radeon/radeon_clocks.c val = of_get_property(dp, "ATY,RefCLK", NULL); dp 154 drivers/gpu/drm/radeon/radeon_clocks.c val = of_get_property(dp, "ATY,SCLK", NULL); dp 161 drivers/gpu/drm/radeon/radeon_clocks.c val = of_get_property(dp, "ATY,MCLK", NULL); dp 75 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c static int rockchip_dp_pre_init(struct rockchip_dp_device *dp) dp 77 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c reset_control_assert(dp->rst); dp 79 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c reset_control_deassert(dp->rst); dp 86 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c struct rockchip_dp_device *dp = to_dp(plat_data); dp 89 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c ret = clk_prepare_enable(dp->pclk); dp 91 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c DRM_DEV_ERROR(dp->dev, "failed to enable pclk %d\n", ret); dp 95 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c ret = rockchip_dp_pre_init(dp); dp 97 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c DRM_DEV_ERROR(dp->dev, "failed to dp pre init %d\n", ret); dp 98 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c clk_disable_unprepare(dp->pclk); dp 107 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c struct rockchip_dp_device *dp = to_dp(plat_data); dp 109 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c clk_disable_unprepare(dp->pclk); dp 168 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c struct rockchip_dp_device *dp = to_dp(encoder); dp 183 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c ret = drm_of_encoder_active_endpoint_id(dp->dev->of_node, encoder); dp 188 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c val = dp->data->lcdsel_lit; dp 190 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c val = dp->data->lcdsel_big; dp 192 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c DRM_DEV_DEBUG(dp->dev, "vop %s output to dp\n", (ret) ? "LIT" : "BIG"); dp 194 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c ret = clk_prepare_enable(dp->grfclk); dp 196 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c DRM_DEV_ERROR(dp->dev, "failed to enable grfclk %d\n", ret); dp 200 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c ret = regmap_write(dp->grf, dp->data->lcdsel_grf_reg, val); dp 202 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c DRM_DEV_ERROR(dp->dev, "Could not write to GRF: %d\n", ret); dp 204 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c clk_disable_unprepare(dp->grfclk); dp 210 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c struct rockchip_dp_device *dp = to_dp(encoder); dp 227 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c DRM_DEV_ERROR(dp->dev, "line flag irq timed out\n"); dp 265 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c static int rockchip_dp_of_probe(struct rockchip_dp_device *dp) dp 267 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c struct device *dev = dp->dev; dp 270 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c dp->grf = syscon_regmap_lookup_by_phandle(np, "rockchip,grf"); dp 271 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c if (IS_ERR(dp->grf)) { dp 273 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c return PTR_ERR(dp->grf); dp 276 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c dp->grfclk = devm_clk_get(dev, "grf"); dp 277 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c if (PTR_ERR(dp->grfclk) == -ENOENT) { dp 278 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c dp->grfclk = NULL; dp 279 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c } else if (PTR_ERR(dp->grfclk) == -EPROBE_DEFER) { dp 281 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c } else if (IS_ERR(dp->grfclk)) { dp 283 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c return PTR_ERR(dp->grfclk); dp 286 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c dp->pclk = devm_clk_get(dev, "pclk"); dp 287 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c if (IS_ERR(dp->pclk)) { dp 289 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c return PTR_ERR(dp->pclk); dp 292 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c dp->rst = devm_reset_control_get(dev, "dp"); dp 293 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c if (IS_ERR(dp->rst)) { dp 295 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c return PTR_ERR(dp->rst); dp 301 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c static int rockchip_dp_drm_create_encoder(struct rockchip_dp_device *dp) dp 303 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c struct drm_encoder *encoder = &dp->encoder; dp 304 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c struct drm_device *drm_dev = dp->drm_dev; dp 305 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c struct device *dev = dp->dev; dp 327 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c struct rockchip_dp_device *dp = dev_get_drvdata(dev); dp 331 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c dp->drm_dev = drm_dev; dp 333 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c ret = rockchip_dp_drm_create_encoder(dp); dp 339 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c dp->plat_data.encoder = &dp->encoder; dp 341 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c ret = analogix_dp_bind(dp->adp, drm_dev); dp 347 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c dp->encoder.funcs->destroy(&dp->encoder); dp 354 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c struct rockchip_dp_device *dp = dev_get_drvdata(dev); dp 356 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c analogix_dp_unbind(dp->adp); dp 357 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c dp->encoder.funcs->destroy(&dp->encoder); dp 370 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c struct rockchip_dp_device *dp; dp 381 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c dp = devm_kzalloc(dev, sizeof(*dp), GFP_KERNEL); dp 382 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c if (!dp) dp 385 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c dp->dev = dev; dp 386 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c dp->adp = ERR_PTR(-ENODEV); dp 387 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c dp->data = dp_data; dp 388 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c dp->plat_data.panel = panel; dp 389 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c dp->plat_data.dev_type = dp->data->chip_type; dp 390 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c dp->plat_data.power_on_start = rockchip_dp_poweron_start; dp 391 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c dp->plat_data.power_off = rockchip_dp_powerdown; dp 392 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c dp->plat_data.get_modes = rockchip_dp_get_modes; dp 394 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c ret = rockchip_dp_of_probe(dp); dp 398 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c platform_set_drvdata(pdev, dp); dp 400 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c dp->adp = analogix_dp_probe(dev, &dp->plat_data); dp 401 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c if (IS_ERR(dp->adp)) dp 402 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c return PTR_ERR(dp->adp); dp 409 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c struct rockchip_dp_device *dp = platform_get_drvdata(pdev); dp 412 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c analogix_dp_remove(dp->adp); dp 420 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c struct rockchip_dp_device *dp = dev_get_drvdata(dev); dp 422 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c if (IS_ERR(dp->adp)) dp 425 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c return analogix_dp_suspend(dp->adp); dp 430 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c struct rockchip_dp_device *dp = dev_get_drvdata(dev); dp 432 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c if (IS_ERR(dp->adp)) dp 435 drivers/gpu/drm/rockchip/analogix_dp-rockchip.c return analogix_dp_resume(dp->adp); dp 61 drivers/gpu/drm/rockchip/cdn-dp-core.c static int cdn_dp_grf_write(struct cdn_dp_device *dp, dp 66 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = clk_prepare_enable(dp->grf_clk); dp 68 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, "Failed to prepare_enable grf clock\n"); dp 72 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = regmap_write(dp->grf, reg, val); dp 74 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, "Could not write to GRF: %d\n", ret); dp 78 drivers/gpu/drm/rockchip/cdn-dp-core.c clk_disable_unprepare(dp->grf_clk); dp 83 drivers/gpu/drm/rockchip/cdn-dp-core.c static int cdn_dp_clk_enable(struct cdn_dp_device *dp) dp 88 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = clk_prepare_enable(dp->pclk); dp 90 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, "cannot enable dp pclk %d\n", ret); dp 94 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = clk_prepare_enable(dp->core_clk); dp 96 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, "cannot enable core_clk %d\n", ret); dp 100 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = pm_runtime_get_sync(dp->dev); dp 102 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, "cannot get pm runtime %d\n", ret); dp 106 drivers/gpu/drm/rockchip/cdn-dp-core.c reset_control_assert(dp->core_rst); dp 107 drivers/gpu/drm/rockchip/cdn-dp-core.c reset_control_assert(dp->dptx_rst); dp 108 drivers/gpu/drm/rockchip/cdn-dp-core.c reset_control_assert(dp->apb_rst); dp 109 drivers/gpu/drm/rockchip/cdn-dp-core.c reset_control_deassert(dp->core_rst); dp 110 drivers/gpu/drm/rockchip/cdn-dp-core.c reset_control_deassert(dp->dptx_rst); dp 111 drivers/gpu/drm/rockchip/cdn-dp-core.c reset_control_deassert(dp->apb_rst); dp 113 drivers/gpu/drm/rockchip/cdn-dp-core.c rate = clk_get_rate(dp->core_clk); dp 115 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, "get clk rate failed\n"); dp 120 drivers/gpu/drm/rockchip/cdn-dp-core.c cdn_dp_set_fw_clk(dp, rate); dp 121 drivers/gpu/drm/rockchip/cdn-dp-core.c cdn_dp_clock_reset(dp); dp 126 drivers/gpu/drm/rockchip/cdn-dp-core.c pm_runtime_put(dp->dev); dp 128 drivers/gpu/drm/rockchip/cdn-dp-core.c clk_disable_unprepare(dp->core_clk); dp 130 drivers/gpu/drm/rockchip/cdn-dp-core.c clk_disable_unprepare(dp->pclk); dp 135 drivers/gpu/drm/rockchip/cdn-dp-core.c static void cdn_dp_clk_disable(struct cdn_dp_device *dp) dp 137 drivers/gpu/drm/rockchip/cdn-dp-core.c pm_runtime_put_sync(dp->dev); dp 138 drivers/gpu/drm/rockchip/cdn-dp-core.c clk_disable_unprepare(dp->pclk); dp 139 drivers/gpu/drm/rockchip/cdn-dp-core.c clk_disable_unprepare(dp->core_clk); dp 164 drivers/gpu/drm/rockchip/cdn-dp-core.c static int cdn_dp_get_sink_count(struct cdn_dp_device *dp, u8 *sink_count) dp 170 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = cdn_dp_dpcd_read(dp, DP_SINK_COUNT, &value, 1); dp 178 drivers/gpu/drm/rockchip/cdn-dp-core.c static struct cdn_dp_port *cdn_dp_connected_port(struct cdn_dp_device *dp) dp 183 drivers/gpu/drm/rockchip/cdn-dp-core.c for (i = 0; i < dp->ports; i++) { dp 184 drivers/gpu/drm/rockchip/cdn-dp-core.c port = dp->port[i]; dp 192 drivers/gpu/drm/rockchip/cdn-dp-core.c static bool cdn_dp_check_sink_connection(struct cdn_dp_device *dp) dp 198 drivers/gpu/drm/rockchip/cdn-dp-core.c if (dp->active_port < 0 || dp->active_port >= dp->ports) { dp 199 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, "active_port is wrong!\n"); dp 203 drivers/gpu/drm/rockchip/cdn-dp-core.c port = dp->port[dp->active_port]; dp 215 drivers/gpu/drm/rockchip/cdn-dp-core.c if (!cdn_dp_get_sink_count(dp, &sink_count)) dp 221 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, "Get sink capability timed out\n"); dp 228 drivers/gpu/drm/rockchip/cdn-dp-core.c struct cdn_dp_device *dp = connector_to_dp(connector); dp 231 drivers/gpu/drm/rockchip/cdn-dp-core.c mutex_lock(&dp->lock); dp 232 drivers/gpu/drm/rockchip/cdn-dp-core.c if (dp->connected) dp 234 drivers/gpu/drm/rockchip/cdn-dp-core.c mutex_unlock(&dp->lock); dp 256 drivers/gpu/drm/rockchip/cdn-dp-core.c struct cdn_dp_device *dp = connector_to_dp(connector); dp 260 drivers/gpu/drm/rockchip/cdn-dp-core.c mutex_lock(&dp->lock); dp 261 drivers/gpu/drm/rockchip/cdn-dp-core.c edid = dp->edid; dp 263 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_DEBUG_KMS(dp->dev, "got edid: width[%d] x height[%d]\n", dp 266 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->sink_has_audio = drm_detect_monitor_audio(edid); dp 272 drivers/gpu/drm/rockchip/cdn-dp-core.c mutex_unlock(&dp->lock); dp 280 drivers/gpu/drm/rockchip/cdn-dp-core.c struct cdn_dp_device *dp = connector_to_dp(connector); dp 281 drivers/gpu/drm/rockchip/cdn-dp-core.c struct drm_display_info *display_info = &dp->connector.display_info; dp 286 drivers/gpu/drm/rockchip/cdn-dp-core.c if (!dp->connected) dp 303 drivers/gpu/drm/rockchip/cdn-dp-core.c source_max = dp->lanes; dp 304 drivers/gpu/drm/rockchip/cdn-dp-core.c sink_max = drm_dp_max_lane_count(dp->dpcd); dp 308 drivers/gpu/drm/rockchip/cdn-dp-core.c sink_max = drm_dp_max_link_rate(dp->dpcd); dp 317 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_DEBUG_KMS(dp->dev, dp 331 drivers/gpu/drm/rockchip/cdn-dp-core.c static int cdn_dp_firmware_init(struct cdn_dp_device *dp) dp 335 drivers/gpu/drm/rockchip/cdn-dp-core.c const struct firmware *fw = dp->fw; dp 340 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, "firmware is invalid\n"); dp 347 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = cdn_dp_load_firmware(dp, iram_data, hdr->iram_size, dp 352 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = cdn_dp_set_firmware_active(dp, true); dp 354 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, "active ucpu failed: %d\n", ret); dp 358 drivers/gpu/drm/rockchip/cdn-dp-core.c return cdn_dp_event_config(dp); dp 361 drivers/gpu/drm/rockchip/cdn-dp-core.c static int cdn_dp_get_sink_capability(struct cdn_dp_device *dp) dp 365 drivers/gpu/drm/rockchip/cdn-dp-core.c if (!cdn_dp_check_sink_connection(dp)) dp 368 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = cdn_dp_dpcd_read(dp, DP_DPCD_REV, dp->dpcd, dp 371 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, "Failed to get caps %d\n", ret); dp 375 drivers/gpu/drm/rockchip/cdn-dp-core.c kfree(dp->edid); dp 376 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->edid = drm_do_get_edid(&dp->connector, dp 377 drivers/gpu/drm/rockchip/cdn-dp-core.c cdn_dp_get_edid_block, dp); dp 381 drivers/gpu/drm/rockchip/cdn-dp-core.c static int cdn_dp_enable_phy(struct cdn_dp_device *dp, struct cdn_dp_port *port) dp 389 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, "phy power on failed: %d\n", dp 396 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = cdn_dp_grf_write(dp, GRF_SOC_CON26, dp 399 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, "Failed to write HPD_SEL %d\n", ret); dp 403 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = cdn_dp_get_hpd_status(dp); dp 406 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, "hpd does not exist\n"); dp 413 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, "get property failed\n"); dp 418 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = cdn_dp_set_host_cap(dp, port->lanes, property.intval); dp 420 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, "set host capabilities failed: %d\n", dp 425 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->active_port = port->id; dp 430 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, "phy power off failed: %d", ret); dp 435 drivers/gpu/drm/rockchip/cdn-dp-core.c cdn_dp_grf_write(dp, GRF_SOC_CON26, dp 440 drivers/gpu/drm/rockchip/cdn-dp-core.c static int cdn_dp_disable_phy(struct cdn_dp_device *dp, dp 448 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, "phy power off failed: %d", ret); dp 455 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->active_port = -1; dp 459 drivers/gpu/drm/rockchip/cdn-dp-core.c static int cdn_dp_disable(struct cdn_dp_device *dp) dp 463 drivers/gpu/drm/rockchip/cdn-dp-core.c if (!dp->active) dp 466 drivers/gpu/drm/rockchip/cdn-dp-core.c for (i = 0; i < dp->ports; i++) dp 467 drivers/gpu/drm/rockchip/cdn-dp-core.c cdn_dp_disable_phy(dp, dp->port[i]); dp 469 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = cdn_dp_grf_write(dp, GRF_SOC_CON26, dp 472 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, "Failed to clear hpd sel %d\n", dp 477 drivers/gpu/drm/rockchip/cdn-dp-core.c cdn_dp_set_firmware_active(dp, false); dp 478 drivers/gpu/drm/rockchip/cdn-dp-core.c cdn_dp_clk_disable(dp); dp 479 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->active = false; dp 480 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->link.rate = 0; dp 481 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->link.num_lanes = 0; dp 482 drivers/gpu/drm/rockchip/cdn-dp-core.c if (!dp->connected) { dp 483 drivers/gpu/drm/rockchip/cdn-dp-core.c kfree(dp->edid); dp 484 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->edid = NULL; dp 490 drivers/gpu/drm/rockchip/cdn-dp-core.c static int cdn_dp_enable(struct cdn_dp_device *dp) dp 495 drivers/gpu/drm/rockchip/cdn-dp-core.c port = cdn_dp_connected_port(dp); dp 497 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, dp 502 drivers/gpu/drm/rockchip/cdn-dp-core.c if (dp->active) dp 505 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = cdn_dp_clk_enable(dp); dp 509 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = cdn_dp_firmware_init(dp); dp 511 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, "firmware init failed: %d", ret); dp 516 drivers/gpu/drm/rockchip/cdn-dp-core.c for (i = port->id; i < dp->ports; i++) { dp 517 drivers/gpu/drm/rockchip/cdn-dp-core.c port = dp->port[i]; dp 520 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = cdn_dp_enable_phy(dp, port); dp 524 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = cdn_dp_get_sink_capability(dp); dp 526 drivers/gpu/drm/rockchip/cdn-dp-core.c cdn_dp_disable_phy(dp, port); dp 528 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->active = true; dp 529 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->lanes = port->lanes; dp 536 drivers/gpu/drm/rockchip/cdn-dp-core.c cdn_dp_clk_disable(dp); dp 544 drivers/gpu/drm/rockchip/cdn-dp-core.c struct cdn_dp_device *dp = encoder_to_dp(encoder); dp 545 drivers/gpu/drm/rockchip/cdn-dp-core.c struct drm_display_info *display_info = &dp->connector.display_info; dp 546 drivers/gpu/drm/rockchip/cdn-dp-core.c struct video_info *video = &dp->video_info; dp 564 drivers/gpu/drm/rockchip/cdn-dp-core.c memcpy(&dp->mode, adjusted, sizeof(*mode)); dp 567 drivers/gpu/drm/rockchip/cdn-dp-core.c static bool cdn_dp_check_link_status(struct cdn_dp_device *dp) dp 570 drivers/gpu/drm/rockchip/cdn-dp-core.c struct cdn_dp_port *port = cdn_dp_connected_port(dp); dp 571 drivers/gpu/drm/rockchip/cdn-dp-core.c u8 sink_lanes = drm_dp_max_lane_count(dp->dpcd); dp 573 drivers/gpu/drm/rockchip/cdn-dp-core.c if (!port || !dp->link.rate || !dp->link.num_lanes) dp 576 drivers/gpu/drm/rockchip/cdn-dp-core.c if (cdn_dp_dpcd_read(dp, DP_LANE0_1_STATUS, link_status, dp 588 drivers/gpu/drm/rockchip/cdn-dp-core.c struct cdn_dp_device *dp = encoder_to_dp(encoder); dp 591 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = drm_of_encoder_active_endpoint_id(dp->dev->of_node, encoder); dp 593 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, "Could not get vop id, %d", ret); dp 597 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_DEBUG_KMS(dp->dev, "vop %s output to cdn-dp\n", dp 604 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = cdn_dp_grf_write(dp, GRF_SOC_CON9, val); dp 608 drivers/gpu/drm/rockchip/cdn-dp-core.c mutex_lock(&dp->lock); dp 610 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = cdn_dp_enable(dp); dp 612 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, "Failed to enable encoder %d\n", dp 616 drivers/gpu/drm/rockchip/cdn-dp-core.c if (!cdn_dp_check_link_status(dp)) { dp 617 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = cdn_dp_train_link(dp); dp 619 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, "Failed link train %d\n", ret); dp 624 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = cdn_dp_set_video_status(dp, CONTROL_VIDEO_IDLE); dp 626 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, "Failed to idle video %d\n", ret); dp 630 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = cdn_dp_config_video(dp); dp 632 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, "Failed to config video %d\n", ret); dp 636 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = cdn_dp_set_video_status(dp, CONTROL_VIDEO_VALID); dp 638 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, "Failed to valid video %d\n", ret); dp 642 drivers/gpu/drm/rockchip/cdn-dp-core.c mutex_unlock(&dp->lock); dp 647 drivers/gpu/drm/rockchip/cdn-dp-core.c struct cdn_dp_device *dp = encoder_to_dp(encoder); dp 650 drivers/gpu/drm/rockchip/cdn-dp-core.c mutex_lock(&dp->lock); dp 651 drivers/gpu/drm/rockchip/cdn-dp-core.c if (dp->active) { dp 652 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = cdn_dp_disable(dp); dp 654 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, "Failed to disable encoder %d\n", dp 658 drivers/gpu/drm/rockchip/cdn-dp-core.c mutex_unlock(&dp->lock); dp 669 drivers/gpu/drm/rockchip/cdn-dp-core.c if (!dp->connected && cdn_dp_connected_port(dp)) dp 670 drivers/gpu/drm/rockchip/cdn-dp-core.c schedule_work(&dp->event_work); dp 696 drivers/gpu/drm/rockchip/cdn-dp-core.c static int cdn_dp_parse_dt(struct cdn_dp_device *dp) dp 698 drivers/gpu/drm/rockchip/cdn-dp-core.c struct device *dev = dp->dev; dp 703 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->grf = syscon_regmap_lookup_by_phandle(np, "rockchip,grf"); dp 704 drivers/gpu/drm/rockchip/cdn-dp-core.c if (IS_ERR(dp->grf)) { dp 706 drivers/gpu/drm/rockchip/cdn-dp-core.c return PTR_ERR(dp->grf); dp 710 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->regs = devm_ioremap_resource(dev, res); dp 711 drivers/gpu/drm/rockchip/cdn-dp-core.c if (IS_ERR(dp->regs)) { dp 713 drivers/gpu/drm/rockchip/cdn-dp-core.c return PTR_ERR(dp->regs); dp 716 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->core_clk = devm_clk_get(dev, "core-clk"); dp 717 drivers/gpu/drm/rockchip/cdn-dp-core.c if (IS_ERR(dp->core_clk)) { dp 719 drivers/gpu/drm/rockchip/cdn-dp-core.c return PTR_ERR(dp->core_clk); dp 722 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->pclk = devm_clk_get(dev, "pclk"); dp 723 drivers/gpu/drm/rockchip/cdn-dp-core.c if (IS_ERR(dp->pclk)) { dp 725 drivers/gpu/drm/rockchip/cdn-dp-core.c return PTR_ERR(dp->pclk); dp 728 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->spdif_clk = devm_clk_get(dev, "spdif"); dp 729 drivers/gpu/drm/rockchip/cdn-dp-core.c if (IS_ERR(dp->spdif_clk)) { dp 731 drivers/gpu/drm/rockchip/cdn-dp-core.c return PTR_ERR(dp->spdif_clk); dp 734 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->grf_clk = devm_clk_get(dev, "grf"); dp 735 drivers/gpu/drm/rockchip/cdn-dp-core.c if (IS_ERR(dp->grf_clk)) { dp 737 drivers/gpu/drm/rockchip/cdn-dp-core.c return PTR_ERR(dp->grf_clk); dp 740 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->spdif_rst = devm_reset_control_get(dev, "spdif"); dp 741 drivers/gpu/drm/rockchip/cdn-dp-core.c if (IS_ERR(dp->spdif_rst)) { dp 743 drivers/gpu/drm/rockchip/cdn-dp-core.c return PTR_ERR(dp->spdif_rst); dp 746 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->dptx_rst = devm_reset_control_get(dev, "dptx"); dp 747 drivers/gpu/drm/rockchip/cdn-dp-core.c if (IS_ERR(dp->dptx_rst)) { dp 749 drivers/gpu/drm/rockchip/cdn-dp-core.c return PTR_ERR(dp->dptx_rst); dp 752 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->core_rst = devm_reset_control_get(dev, "core"); dp 753 drivers/gpu/drm/rockchip/cdn-dp-core.c if (IS_ERR(dp->core_rst)) { dp 755 drivers/gpu/drm/rockchip/cdn-dp-core.c return PTR_ERR(dp->core_rst); dp 758 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->apb_rst = devm_reset_control_get(dev, "apb"); dp 759 drivers/gpu/drm/rockchip/cdn-dp-core.c if (IS_ERR(dp->apb_rst)) { dp 761 drivers/gpu/drm/rockchip/cdn-dp-core.c return PTR_ERR(dp->apb_rst); dp 771 drivers/gpu/drm/rockchip/cdn-dp-core.c struct cdn_dp_device *dp = dev_get_drvdata(dev); dp 779 drivers/gpu/drm/rockchip/cdn-dp-core.c mutex_lock(&dp->lock); dp 780 drivers/gpu/drm/rockchip/cdn-dp-core.c if (!dp->active) { dp 798 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = cdn_dp_audio_config(dp, &audio); dp 800 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->audio_info = audio; dp 803 drivers/gpu/drm/rockchip/cdn-dp-core.c mutex_unlock(&dp->lock); dp 809 drivers/gpu/drm/rockchip/cdn-dp-core.c struct cdn_dp_device *dp = dev_get_drvdata(dev); dp 812 drivers/gpu/drm/rockchip/cdn-dp-core.c mutex_lock(&dp->lock); dp 813 drivers/gpu/drm/rockchip/cdn-dp-core.c if (!dp->active) dp 816 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = cdn_dp_audio_stop(dp, &dp->audio_info); dp 818 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->audio_info.format = AFMT_UNUSED; dp 820 drivers/gpu/drm/rockchip/cdn-dp-core.c mutex_unlock(&dp->lock); dp 826 drivers/gpu/drm/rockchip/cdn-dp-core.c struct cdn_dp_device *dp = dev_get_drvdata(dev); dp 829 drivers/gpu/drm/rockchip/cdn-dp-core.c mutex_lock(&dp->lock); dp 830 drivers/gpu/drm/rockchip/cdn-dp-core.c if (!dp->active) { dp 835 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = cdn_dp_audio_mute(dp, enable); dp 838 drivers/gpu/drm/rockchip/cdn-dp-core.c mutex_unlock(&dp->lock); dp 845 drivers/gpu/drm/rockchip/cdn-dp-core.c struct cdn_dp_device *dp = dev_get_drvdata(dev); dp 847 drivers/gpu/drm/rockchip/cdn-dp-core.c memcpy(buf, dp->connector.eld, min(sizeof(dp->connector.eld), len)); dp 859 drivers/gpu/drm/rockchip/cdn-dp-core.c static int cdn_dp_audio_codec_init(struct cdn_dp_device *dp, dp 869 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->audio_pdev = platform_device_register_data( dp 873 drivers/gpu/drm/rockchip/cdn-dp-core.c return PTR_ERR_OR_ZERO(dp->audio_pdev); dp 876 drivers/gpu/drm/rockchip/cdn-dp-core.c static int cdn_dp_request_firmware(struct cdn_dp_device *dp) dp 882 drivers/gpu/drm/rockchip/cdn-dp-core.c WARN_ON(!mutex_is_locked(&dp->lock)); dp 884 drivers/gpu/drm/rockchip/cdn-dp-core.c if (dp->fw_loaded) dp 888 drivers/gpu/drm/rockchip/cdn-dp-core.c mutex_unlock(&dp->lock); dp 891 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = request_firmware(&dp->fw, CDN_DP_FIRMWARE, dp->dev); dp 897 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, dp 902 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->fw_loaded = true; dp 907 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, "Timed out trying to load firmware\n"); dp 910 drivers/gpu/drm/rockchip/cdn-dp-core.c mutex_lock(&dp->lock); dp 916 drivers/gpu/drm/rockchip/cdn-dp-core.c struct cdn_dp_device *dp = container_of(work, struct cdn_dp_device, dp 918 drivers/gpu/drm/rockchip/cdn-dp-core.c struct drm_connector *connector = &dp->connector; dp 923 drivers/gpu/drm/rockchip/cdn-dp-core.c mutex_lock(&dp->lock); dp 925 drivers/gpu/drm/rockchip/cdn-dp-core.c if (dp->suspended) dp 928 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = cdn_dp_request_firmware(dp); dp 932 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->connected = true; dp 935 drivers/gpu/drm/rockchip/cdn-dp-core.c if (!cdn_dp_connected_port(dp)) { dp 936 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_INFO(dp->dev, "Not connected. Disabling cdn\n"); dp 937 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->connected = false; dp 940 drivers/gpu/drm/rockchip/cdn-dp-core.c } else if (!dp->active) { dp 941 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_INFO(dp->dev, "Connected, not enabled. Enabling cdn\n"); dp 942 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = cdn_dp_enable(dp); dp 944 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, "Enable dp failed %d\n", ret); dp 945 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->connected = false; dp 949 drivers/gpu/drm/rockchip/cdn-dp-core.c } else if (!cdn_dp_check_sink_connection(dp)) { dp 950 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_INFO(dp->dev, "Connected without sink. Assert hpd\n"); dp 951 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->connected = false; dp 954 drivers/gpu/drm/rockchip/cdn-dp-core.c } else if (!cdn_dp_check_link_status(dp)) { dp 955 drivers/gpu/drm/rockchip/cdn-dp-core.c unsigned int rate = dp->link.rate; dp 956 drivers/gpu/drm/rockchip/cdn-dp-core.c unsigned int lanes = dp->link.num_lanes; dp 957 drivers/gpu/drm/rockchip/cdn-dp-core.c struct drm_display_mode *mode = &dp->mode; dp 959 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_INFO(dp->dev, "Connected with sink. Re-train link\n"); dp 960 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = cdn_dp_train_link(dp); dp 962 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->connected = false; dp 963 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, "Train link failed %d\n", ret); dp 969 drivers/gpu/drm/rockchip/cdn-dp-core.c (rate != dp->link.rate || lanes != dp->link.num_lanes)) { dp 970 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = cdn_dp_config_video(dp); dp 972 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->connected = false; dp 973 drivers/gpu/drm/rockchip/cdn-dp-core.c DRM_DEV_ERROR(dp->dev, dp 981 drivers/gpu/drm/rockchip/cdn-dp-core.c mutex_unlock(&dp->lock); dp 986 drivers/gpu/drm/rockchip/cdn-dp-core.c drm_kms_helper_hotplug_event(dp->drm_dev); dp 994 drivers/gpu/drm/rockchip/cdn-dp-core.c struct cdn_dp_device *dp = port->dp; dp 1001 drivers/gpu/drm/rockchip/cdn-dp-core.c schedule_work(&dp->event_work); dp 1008 drivers/gpu/drm/rockchip/cdn-dp-core.c struct cdn_dp_device *dp = dev_get_drvdata(dev); dp 1015 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = cdn_dp_parse_dt(dp); dp 1019 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->drm_dev = drm_dev; dp 1020 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->connected = false; dp 1021 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->active = false; dp 1022 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->active_port = -1; dp 1023 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->fw_loaded = false; dp 1025 drivers/gpu/drm/rockchip/cdn-dp-core.c INIT_WORK(&dp->event_work, cdn_dp_pd_event_work); dp 1027 drivers/gpu/drm/rockchip/cdn-dp-core.c encoder = &dp->encoder; dp 1042 drivers/gpu/drm/rockchip/cdn-dp-core.c connector = &dp->connector; dp 1062 drivers/gpu/drm/rockchip/cdn-dp-core.c for (i = 0; i < dp->ports; i++) { dp 1063 drivers/gpu/drm/rockchip/cdn-dp-core.c port = dp->port[i]; dp 1066 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = devm_extcon_register_notifier(dp->dev, port->extcon, dp 1078 drivers/gpu/drm/rockchip/cdn-dp-core.c schedule_work(&dp->event_work); dp 1091 drivers/gpu/drm/rockchip/cdn-dp-core.c struct cdn_dp_device *dp = dev_get_drvdata(dev); dp 1092 drivers/gpu/drm/rockchip/cdn-dp-core.c struct drm_encoder *encoder = &dp->encoder; dp 1093 drivers/gpu/drm/rockchip/cdn-dp-core.c struct drm_connector *connector = &dp->connector; dp 1095 drivers/gpu/drm/rockchip/cdn-dp-core.c cancel_work_sync(&dp->event_work); dp 1101 drivers/gpu/drm/rockchip/cdn-dp-core.c if (dp->fw_loaded) dp 1102 drivers/gpu/drm/rockchip/cdn-dp-core.c release_firmware(dp->fw); dp 1103 drivers/gpu/drm/rockchip/cdn-dp-core.c kfree(dp->edid); dp 1104 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->edid = NULL; dp 1114 drivers/gpu/drm/rockchip/cdn-dp-core.c struct cdn_dp_device *dp = dev_get_drvdata(dev); dp 1117 drivers/gpu/drm/rockchip/cdn-dp-core.c mutex_lock(&dp->lock); dp 1118 drivers/gpu/drm/rockchip/cdn-dp-core.c if (dp->active) dp 1119 drivers/gpu/drm/rockchip/cdn-dp-core.c ret = cdn_dp_disable(dp); dp 1120 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->suspended = true; dp 1121 drivers/gpu/drm/rockchip/cdn-dp-core.c mutex_unlock(&dp->lock); dp 1128 drivers/gpu/drm/rockchip/cdn-dp-core.c struct cdn_dp_device *dp = dev_get_drvdata(dev); dp 1130 drivers/gpu/drm/rockchip/cdn-dp-core.c mutex_lock(&dp->lock); dp 1131 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->suspended = false; dp 1132 drivers/gpu/drm/rockchip/cdn-dp-core.c if (dp->fw_loaded) dp 1133 drivers/gpu/drm/rockchip/cdn-dp-core.c schedule_work(&dp->event_work); dp 1134 drivers/gpu/drm/rockchip/cdn-dp-core.c mutex_unlock(&dp->lock); dp 1145 drivers/gpu/drm/rockchip/cdn-dp-core.c struct cdn_dp_device *dp; dp 1150 drivers/gpu/drm/rockchip/cdn-dp-core.c dp = devm_kzalloc(dev, sizeof(*dp), GFP_KERNEL); dp 1151 drivers/gpu/drm/rockchip/cdn-dp-core.c if (!dp) dp 1153 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->dev = dev; dp 1175 drivers/gpu/drm/rockchip/cdn-dp-core.c port->dp = dp; dp 1177 drivers/gpu/drm/rockchip/cdn-dp-core.c dp->port[dp->ports++] = port; dp 1180 drivers/gpu/drm/rockchip/cdn-dp-core.c if (!dp->ports) { dp 1185 drivers/gpu/drm/rockchip/cdn-dp-core.c mutex_init(&dp->lock); dp 1186 drivers/gpu/drm/rockchip/cdn-dp-core.c dev_set_drvdata(dev, dp); dp 1188 drivers/gpu/drm/rockchip/cdn-dp-core.c cdn_dp_audio_codec_init(dp, dev); dp 1195 drivers/gpu/drm/rockchip/cdn-dp-core.c struct cdn_dp_device *dp = platform_get_drvdata(pdev); dp 1197 drivers/gpu/drm/rockchip/cdn-dp-core.c platform_device_unregister(dp->audio_pdev); dp 1198 drivers/gpu/drm/rockchip/cdn-dp-core.c cdn_dp_suspend(dp->dev); dp 1206 drivers/gpu/drm/rockchip/cdn-dp-core.c struct cdn_dp_device *dp = platform_get_drvdata(pdev); dp 1208 drivers/gpu/drm/rockchip/cdn-dp-core.c cdn_dp_suspend(dp->dev); dp 55 drivers/gpu/drm/rockchip/cdn-dp-core.h struct cdn_dp_device *dp; dp 24 drivers/gpu/drm/rockchip/cdn-dp-reg.c void cdn_dp_set_fw_clk(struct cdn_dp_device *dp, unsigned long clk) dp 26 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(clk / 1000000, dp->regs + SW_CLK_H); dp 29 drivers/gpu/drm/rockchip/cdn-dp-reg.c void cdn_dp_clock_reset(struct cdn_dp_device *dp) dp 45 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(val, dp->regs + SOURCE_DPTX_CAR); dp 48 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(val, dp->regs + SOURCE_PHY_CAR); dp 54 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(val, dp->regs + SOURCE_PKT_CAR); dp 62 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(val, dp->regs + SOURCE_AIF_CAR); dp 68 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(val, dp->regs + SOURCE_CIPHER_CAR); dp 72 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(val, dp->regs + SOURCE_CRYPTO_CAR); dp 75 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(0, dp->regs + APB_INT_MASK); dp 78 drivers/gpu/drm/rockchip/cdn-dp-reg.c static int cdn_dp_mailbox_read(struct cdn_dp_device *dp) dp 82 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = readx_poll_timeout(readl, dp->regs + MAILBOX_EMPTY_ADDR, dp 88 drivers/gpu/drm/rockchip/cdn-dp-reg.c return readl(dp->regs + MAILBOX0_RD_DATA) & 0xff; dp 91 drivers/gpu/drm/rockchip/cdn-dp-reg.c static int cdp_dp_mailbox_write(struct cdn_dp_device *dp, u8 val) dp 95 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = readx_poll_timeout(readl, dp->regs + MAILBOX_FULL_ADDR, dp 101 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(val, dp->regs + MAILBOX0_WR_DATA); dp 106 drivers/gpu/drm/rockchip/cdn-dp-reg.c static int cdn_dp_mailbox_validate_receive(struct cdn_dp_device *dp, dp 116 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_mailbox_read(dp); dp 132 drivers/gpu/drm/rockchip/cdn-dp-reg.c if (cdn_dp_mailbox_read(dp) < 0) dp 141 drivers/gpu/drm/rockchip/cdn-dp-reg.c static int cdn_dp_mailbox_read_receive(struct cdn_dp_device *dp, dp 148 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_mailbox_read(dp); dp 158 drivers/gpu/drm/rockchip/cdn-dp-reg.c static int cdn_dp_mailbox_send(struct cdn_dp_device *dp, u8 module_id, dp 170 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdp_dp_mailbox_write(dp, header[i]); dp 176 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdp_dp_mailbox_write(dp, message[i]); dp 184 drivers/gpu/drm/rockchip/cdn-dp-reg.c static int cdn_dp_reg_write(struct cdn_dp_device *dp, u16 addr, u32 val) dp 194 drivers/gpu/drm/rockchip/cdn-dp-reg.c return cdn_dp_mailbox_send(dp, MB_MODULE_ID_DP_TX, DPTX_WRITE_REGISTER, dp 198 drivers/gpu/drm/rockchip/cdn-dp-reg.c static int cdn_dp_reg_write_bit(struct cdn_dp_device *dp, u16 addr, dp 212 drivers/gpu/drm/rockchip/cdn-dp-reg.c return cdn_dp_mailbox_send(dp, MB_MODULE_ID_DP_TX, DPTX_WRITE_FIELD, dp 216 drivers/gpu/drm/rockchip/cdn-dp-reg.c int cdn_dp_dpcd_read(struct cdn_dp_device *dp, u32 addr, u8 *data, u16 len) dp 226 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_mailbox_send(dp, MB_MODULE_ID_DP_TX, DPTX_READ_DPCD, dp 231 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_mailbox_validate_receive(dp, MB_MODULE_ID_DP_TX, dp 237 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_mailbox_read_receive(dp, reg, sizeof(reg)); dp 241 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_mailbox_read_receive(dp, data, len); dp 247 drivers/gpu/drm/rockchip/cdn-dp-reg.c int cdn_dp_dpcd_write(struct cdn_dp_device *dp, u32 addr, u8 value) dp 258 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_mailbox_send(dp, MB_MODULE_ID_DP_TX, DPTX_WRITE_DPCD, dp 263 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_mailbox_validate_receive(dp, MB_MODULE_ID_DP_TX, dp 268 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_mailbox_read_receive(dp, reg, sizeof(reg)); dp 277 drivers/gpu/drm/rockchip/cdn-dp-reg.c DRM_DEV_ERROR(dp->dev, "dpcd write failed: %d\n", ret); dp 281 drivers/gpu/drm/rockchip/cdn-dp-reg.c int cdn_dp_load_firmware(struct cdn_dp_device *dp, const u32 *i_mem, dp 289 drivers/gpu/drm/rockchip/cdn-dp-reg.c dp->regs + APB_CTRL); dp 292 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(*i_mem++, dp->regs + ADDR_IMEM + i); dp 295 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(*d_mem++, dp->regs + ADDR_DMEM + i); dp 298 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(0, dp->regs + APB_CTRL); dp 301 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = readx_poll_timeout(readl, dp->regs + KEEP_ALIVE, dp 304 drivers/gpu/drm/rockchip/cdn-dp-reg.c DRM_DEV_ERROR(dp->dev, "failed to loaded the FW reg = %x\n", dp 309 drivers/gpu/drm/rockchip/cdn-dp-reg.c reg = readl(dp->regs + VER_L) & 0xff; dp 310 drivers/gpu/drm/rockchip/cdn-dp-reg.c dp->fw_version = reg; dp 311 drivers/gpu/drm/rockchip/cdn-dp-reg.c reg = readl(dp->regs + VER_H) & 0xff; dp 312 drivers/gpu/drm/rockchip/cdn-dp-reg.c dp->fw_version |= reg << 8; dp 313 drivers/gpu/drm/rockchip/cdn-dp-reg.c reg = readl(dp->regs + VER_LIB_L_ADDR) & 0xff; dp 314 drivers/gpu/drm/rockchip/cdn-dp-reg.c dp->fw_version |= reg << 16; dp 315 drivers/gpu/drm/rockchip/cdn-dp-reg.c reg = readl(dp->regs + VER_LIB_H_ADDR) & 0xff; dp 316 drivers/gpu/drm/rockchip/cdn-dp-reg.c dp->fw_version |= reg << 24; dp 318 drivers/gpu/drm/rockchip/cdn-dp-reg.c DRM_DEV_DEBUG(dp->dev, "firmware version: %x\n", dp->fw_version); dp 323 drivers/gpu/drm/rockchip/cdn-dp-reg.c int cdn_dp_set_firmware_active(struct cdn_dp_device *dp, bool enable) dp 335 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdp_dp_mailbox_write(dp, msg[i]); dp 342 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_mailbox_read(dp); dp 353 drivers/gpu/drm/rockchip/cdn-dp-reg.c DRM_DEV_ERROR(dp->dev, "set firmware active failed\n"); dp 357 drivers/gpu/drm/rockchip/cdn-dp-reg.c int cdn_dp_set_host_cap(struct cdn_dp_device *dp, u8 lanes, bool flip) dp 371 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_mailbox_send(dp, MB_MODULE_ID_DP_TX, dp 377 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_reg_write(dp, DP_AUX_SWAP_INVERSION_CONTROL, dp 382 drivers/gpu/drm/rockchip/cdn-dp-reg.c DRM_DEV_ERROR(dp->dev, "set host cap failed: %d\n", ret); dp 386 drivers/gpu/drm/rockchip/cdn-dp-reg.c int cdn_dp_event_config(struct cdn_dp_device *dp) dp 395 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_mailbox_send(dp, MB_MODULE_ID_DP_TX, DPTX_ENABLE_EVENT, dp 398 drivers/gpu/drm/rockchip/cdn-dp-reg.c DRM_DEV_ERROR(dp->dev, "set event config failed: %d\n", ret); dp 403 drivers/gpu/drm/rockchip/cdn-dp-reg.c u32 cdn_dp_get_event(struct cdn_dp_device *dp) dp 405 drivers/gpu/drm/rockchip/cdn-dp-reg.c return readl(dp->regs + SW_EVENTS0); dp 408 drivers/gpu/drm/rockchip/cdn-dp-reg.c int cdn_dp_get_hpd_status(struct cdn_dp_device *dp) dp 413 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_mailbox_send(dp, MB_MODULE_ID_DP_TX, DPTX_HPD_STATE, dp 418 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_mailbox_validate_receive(dp, MB_MODULE_ID_DP_TX, dp 423 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_mailbox_read_receive(dp, &status, sizeof(status)); dp 430 drivers/gpu/drm/rockchip/cdn-dp-reg.c DRM_DEV_ERROR(dp->dev, "get hpd status failed: %d\n", ret); dp 437 drivers/gpu/drm/rockchip/cdn-dp-reg.c struct cdn_dp_device *dp = data; dp 445 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_mailbox_send(dp, MB_MODULE_ID_DP_TX, DPTX_GET_EDID, dp 450 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_mailbox_validate_receive(dp, MB_MODULE_ID_DP_TX, dp 456 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_mailbox_read_receive(dp, reg, sizeof(reg)); dp 460 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_mailbox_read_receive(dp, edid, length); dp 469 drivers/gpu/drm/rockchip/cdn-dp-reg.c DRM_DEV_ERROR(dp->dev, "get block[%d] edid failed: %d\n", block, dp 475 drivers/gpu/drm/rockchip/cdn-dp-reg.c static int cdn_dp_training_start(struct cdn_dp_device *dp) dp 484 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_mailbox_send(dp, MB_MODULE_ID_DP_TX, DPTX_TRAINING_CONTROL, dp 492 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_mailbox_send(dp, MB_MODULE_ID_DP_TX, dp 497 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_mailbox_validate_receive(dp, MB_MODULE_ID_DP_TX, dp 503 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_mailbox_read_receive(dp, event, sizeof(event)); dp 514 drivers/gpu/drm/rockchip/cdn-dp-reg.c DRM_DEV_ERROR(dp->dev, "training failed: %d\n", ret); dp 518 drivers/gpu/drm/rockchip/cdn-dp-reg.c static int cdn_dp_get_training_status(struct cdn_dp_device *dp) dp 523 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_mailbox_send(dp, MB_MODULE_ID_DP_TX, DPTX_READ_LINK_STAT, dp 528 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_mailbox_validate_receive(dp, MB_MODULE_ID_DP_TX, dp 534 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_mailbox_read_receive(dp, status, sizeof(status)); dp 538 drivers/gpu/drm/rockchip/cdn-dp-reg.c dp->link.rate = drm_dp_bw_code_to_link_rate(status[0]); dp 539 drivers/gpu/drm/rockchip/cdn-dp-reg.c dp->link.num_lanes = status[1]; dp 543 drivers/gpu/drm/rockchip/cdn-dp-reg.c DRM_DEV_ERROR(dp->dev, "get training status failed: %d\n", ret); dp 547 drivers/gpu/drm/rockchip/cdn-dp-reg.c int cdn_dp_train_link(struct cdn_dp_device *dp) dp 551 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_training_start(dp); dp 553 drivers/gpu/drm/rockchip/cdn-dp-reg.c DRM_DEV_ERROR(dp->dev, "Failed to start training %d\n", ret); dp 557 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_get_training_status(dp); dp 559 drivers/gpu/drm/rockchip/cdn-dp-reg.c DRM_DEV_ERROR(dp->dev, "Failed to get training stat %d\n", ret); dp 563 drivers/gpu/drm/rockchip/cdn-dp-reg.c DRM_DEV_DEBUG_KMS(dp->dev, "rate:0x%x, lanes:%d\n", dp->link.rate, dp 564 drivers/gpu/drm/rockchip/cdn-dp-reg.c dp->link.num_lanes); dp 568 drivers/gpu/drm/rockchip/cdn-dp-reg.c int cdn_dp_set_video_status(struct cdn_dp_device *dp, int active) dp 575 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_mailbox_send(dp, MB_MODULE_ID_DP_TX, DPTX_SET_VIDEO, dp 578 drivers/gpu/drm/rockchip/cdn-dp-reg.c DRM_DEV_ERROR(dp->dev, "set video status failed: %d\n", ret); dp 630 drivers/gpu/drm/rockchip/cdn-dp-reg.c int cdn_dp_config_video(struct cdn_dp_device *dp) dp 632 drivers/gpu/drm/rockchip/cdn-dp-reg.c struct video_info *video = &dp->video_info; dp 633 drivers/gpu/drm/rockchip/cdn-dp-reg.c struct drm_display_mode *mode = &dp->mode; dp 642 drivers/gpu/drm/rockchip/cdn-dp-reg.c link_rate = dp->link.rate / 1000; dp 644 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_reg_write(dp, BND_HSYNC2VSYNC, VIF_BYPASS_INTERLACE); dp 648 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_reg_write(dp, HSYNC2VSYNC_POL_CTRL, 0); dp 662 drivers/gpu/drm/rockchip/cdn-dp-reg.c do_div(symbol, dp->link.num_lanes * link_rate * 8); dp 666 drivers/gpu/drm/rockchip/cdn-dp-reg.c DRM_DEV_ERROR(dp->dev, dp 668 drivers/gpu/drm/rockchip/cdn-dp-reg.c mode->clock, dp->link.num_lanes, dp 677 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_reg_write(dp, DP_FRAMER_TU, val); dp 683 drivers/gpu/drm/rockchip/cdn-dp-reg.c val /= (dp->link.num_lanes * link_rate); dp 686 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_reg_write(dp, DP_VC_TABLE(15), val); dp 707 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_reg_write(dp, DP_FRAMER_PXL_REPR, val); dp 713 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_reg_write(dp, DP_FRAMER_SP, val); dp 719 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_reg_write(dp, DP_FRONT_BACK_PORCH, val); dp 724 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_reg_write(dp, DP_BYTE_COUNT, val); dp 729 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_reg_write(dp, MSA_HORIZONTAL_0, val); dp 735 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_reg_write(dp, MSA_HORIZONTAL_1, val); dp 741 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_reg_write(dp, MSA_VERTICAL_0, val); dp 747 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_reg_write(dp, MSA_VERTICAL_1, val); dp 752 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_reg_write(dp, MSA_MISC, val); dp 756 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_reg_write(dp, STREAM_CONFIG, 1); dp 762 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_reg_write(dp, DP_HORIZONTAL, val); dp 768 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_reg_write(dp, DP_VERTICAL_0, val); dp 773 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_reg_write(dp, DP_VERTICAL_1, val); dp 777 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_reg_write_bit(dp, DP_VB_ID, 2, 1, 0); dp 781 drivers/gpu/drm/rockchip/cdn-dp-reg.c DRM_DEV_ERROR(dp->dev, "config video failed: %d\n", ret); dp 785 drivers/gpu/drm/rockchip/cdn-dp-reg.c int cdn_dp_audio_stop(struct cdn_dp_device *dp, struct audio_info *audio) dp 789 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_reg_write(dp, AUDIO_PACK_CONTROL, 0); dp 791 drivers/gpu/drm/rockchip/cdn-dp-reg.c DRM_DEV_ERROR(dp->dev, "audio stop failed: %d\n", ret); dp 795 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(0, dp->regs + SPDIF_CTRL_ADDR); dp 798 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(0, dp->regs + AUDIO_SRC_CNTL); dp 799 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(0, dp->regs + AUDIO_SRC_CNFG); dp 800 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(AUDIO_SW_RST, dp->regs + AUDIO_SRC_CNTL); dp 801 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(0, dp->regs + AUDIO_SRC_CNTL); dp 804 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(0, dp->regs + SMPL2PKT_CNTL); dp 805 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(AUDIO_SW_RST, dp->regs + SMPL2PKT_CNTL); dp 806 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(0, dp->regs + SMPL2PKT_CNTL); dp 809 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(AUDIO_SW_RST, dp->regs + FIFO_CNTL); dp 810 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(0, dp->regs + FIFO_CNTL); dp 813 drivers/gpu/drm/rockchip/cdn-dp-reg.c clk_disable_unprepare(dp->spdif_clk); dp 818 drivers/gpu/drm/rockchip/cdn-dp-reg.c int cdn_dp_audio_mute(struct cdn_dp_device *dp, bool enable) dp 822 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_reg_write_bit(dp, DP_VB_ID, 4, 1, enable); dp 824 drivers/gpu/drm/rockchip/cdn-dp-reg.c DRM_DEV_ERROR(dp->dev, "audio mute failed: %d\n", ret); dp 829 drivers/gpu/drm/rockchip/cdn-dp-reg.c static void cdn_dp_audio_config_i2s(struct cdn_dp_device *dp, dp 836 drivers/gpu/drm/rockchip/cdn-dp-reg.c if (dp->link.num_lanes == 1) dp 846 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(0x0, dp->regs + SPDIF_CTRL_ADDR); dp 848 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(SYNC_WR_TO_CH_ZERO, dp->regs + FIFO_CNTL); dp 854 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(val, dp->regs + SMPL2PKT_CNFG); dp 866 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(val, dp->regs + AUDIO_SRC_CNFG); dp 875 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(val, dp->regs + STTS_BIT_CH(i)); dp 909 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(val, dp->regs + COM_CH_STTS_BITS); dp 911 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(SMPL2PKT_EN, dp->regs + SMPL2PKT_CNTL); dp 912 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(I2S_DEC_START, dp->regs + AUDIO_SRC_CNTL); dp 915 drivers/gpu/drm/rockchip/cdn-dp-reg.c static void cdn_dp_audio_config_spdif(struct cdn_dp_device *dp) dp 919 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(SYNC_WR_TO_CH_ZERO, dp->regs + FIFO_CNTL); dp 922 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(val, dp->regs + SMPL2PKT_CNFG); dp 923 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(SMPL2PKT_EN, dp->regs + SMPL2PKT_CNTL); dp 926 drivers/gpu/drm/rockchip/cdn-dp-reg.c writel(val, dp->regs + SPDIF_CTRL_ADDR); dp 928 drivers/gpu/drm/rockchip/cdn-dp-reg.c clk_prepare_enable(dp->spdif_clk); dp 929 drivers/gpu/drm/rockchip/cdn-dp-reg.c clk_set_rate(dp->spdif_clk, CDN_DP_SPDIF_CLK); dp 932 drivers/gpu/drm/rockchip/cdn-dp-reg.c int cdn_dp_audio_config(struct cdn_dp_device *dp, struct audio_info *audio) dp 938 drivers/gpu/drm/rockchip/cdn-dp-reg.c reset_control_assert(dp->spdif_rst); dp 939 drivers/gpu/drm/rockchip/cdn-dp-reg.c reset_control_deassert(dp->spdif_rst); dp 942 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_reg_write(dp, CM_LANE_CTRL, LANE_REF_CYC); dp 946 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_reg_write(dp, CM_CTRL, 0); dp 951 drivers/gpu/drm/rockchip/cdn-dp-reg.c cdn_dp_audio_config_i2s(dp, audio); dp 953 drivers/gpu/drm/rockchip/cdn-dp-reg.c cdn_dp_audio_config_spdif(dp); dp 955 drivers/gpu/drm/rockchip/cdn-dp-reg.c ret = cdn_dp_reg_write(dp, AUDIO_PACK_CONTROL, AUDIO_PACK_EN); dp 959 drivers/gpu/drm/rockchip/cdn-dp-reg.c DRM_DEV_ERROR(dp->dev, "audio config failed: %d\n", ret); dp 454 drivers/gpu/drm/rockchip/cdn-dp-reg.h void cdn_dp_clock_reset(struct cdn_dp_device *dp); dp 456 drivers/gpu/drm/rockchip/cdn-dp-reg.h void cdn_dp_set_fw_clk(struct cdn_dp_device *dp, unsigned long clk); dp 457 drivers/gpu/drm/rockchip/cdn-dp-reg.h int cdn_dp_load_firmware(struct cdn_dp_device *dp, const u32 *i_mem, dp 459 drivers/gpu/drm/rockchip/cdn-dp-reg.h int cdn_dp_set_firmware_active(struct cdn_dp_device *dp, bool enable); dp 460 drivers/gpu/drm/rockchip/cdn-dp-reg.h int cdn_dp_set_host_cap(struct cdn_dp_device *dp, u8 lanes, bool flip); dp 461 drivers/gpu/drm/rockchip/cdn-dp-reg.h int cdn_dp_event_config(struct cdn_dp_device *dp); dp 462 drivers/gpu/drm/rockchip/cdn-dp-reg.h u32 cdn_dp_get_event(struct cdn_dp_device *dp); dp 463 drivers/gpu/drm/rockchip/cdn-dp-reg.h int cdn_dp_get_hpd_status(struct cdn_dp_device *dp); dp 464 drivers/gpu/drm/rockchip/cdn-dp-reg.h int cdn_dp_dpcd_write(struct cdn_dp_device *dp, u32 addr, u8 value); dp 465 drivers/gpu/drm/rockchip/cdn-dp-reg.h int cdn_dp_dpcd_read(struct cdn_dp_device *dp, u32 addr, u8 *data, u16 len); dp 466 drivers/gpu/drm/rockchip/cdn-dp-reg.h int cdn_dp_get_edid_block(void *dp, u8 *edid, dp 468 drivers/gpu/drm/rockchip/cdn-dp-reg.h int cdn_dp_train_link(struct cdn_dp_device *dp); dp 469 drivers/gpu/drm/rockchip/cdn-dp-reg.h int cdn_dp_set_video_status(struct cdn_dp_device *dp, int active); dp 470 drivers/gpu/drm/rockchip/cdn-dp-reg.h int cdn_dp_config_video(struct cdn_dp_device *dp); dp 471 drivers/gpu/drm/rockchip/cdn-dp-reg.h int cdn_dp_audio_stop(struct cdn_dp_device *dp, struct audio_info *audio); dp 472 drivers/gpu/drm/rockchip/cdn-dp-reg.h int cdn_dp_audio_mute(struct cdn_dp_device *dp, bool enable); dp 473 drivers/gpu/drm/rockchip/cdn-dp-reg.h int cdn_dp_audio_config(struct cdn_dp_device *dp, struct audio_info *audio); dp 1187 drivers/gpu/ipu-v3/ipu-common.c .dp = IPU_DP_FLOW_SYNC_BG, dp 1196 drivers/gpu/ipu-v3/ipu-common.c .dp = -EINVAL, dp 70 drivers/gpu/ipu-v3/ipu-dp.c static inline struct ipu_flow *to_flow(struct ipu_dp *dp) dp 72 drivers/gpu/ipu-v3/ipu-dp.c if (dp->foreground) dp 73 drivers/gpu/ipu-v3/ipu-dp.c return container_of(dp, struct ipu_flow, foreground); dp 75 drivers/gpu/ipu-v3/ipu-dp.c return container_of(dp, struct ipu_flow, background); dp 78 drivers/gpu/ipu-v3/ipu-dp.c int ipu_dp_set_global_alpha(struct ipu_dp *dp, bool enable, dp 81 drivers/gpu/ipu-v3/ipu-dp.c struct ipu_flow *flow = to_flow(dp); dp 114 drivers/gpu/ipu-v3/ipu-dp.c int ipu_dp_set_window_pos(struct ipu_dp *dp, u16 x_pos, u16 y_pos) dp 116 drivers/gpu/ipu-v3/ipu-dp.c struct ipu_flow *flow = to_flow(dp); dp 167 drivers/gpu/ipu-v3/ipu-dp.c int ipu_dp_setup_channel(struct ipu_dp *dp, dp 171 drivers/gpu/ipu-v3/ipu-dp.c struct ipu_flow *flow = to_flow(dp); dp 176 drivers/gpu/ipu-v3/ipu-dp.c dp->in_cs = in; dp 178 drivers/gpu/ipu-v3/ipu-dp.c if (!dp->foreground) dp 227 drivers/gpu/ipu-v3/ipu-dp.c int ipu_dp_enable_channel(struct ipu_dp *dp) dp 229 drivers/gpu/ipu-v3/ipu-dp.c struct ipu_flow *flow = to_flow(dp); dp 233 drivers/gpu/ipu-v3/ipu-dp.c if (!dp->foreground) dp 250 drivers/gpu/ipu-v3/ipu-dp.c void ipu_dp_disable_channel(struct ipu_dp *dp, bool sync) dp 252 drivers/gpu/ipu-v3/ipu-dp.c struct ipu_flow *flow = to_flow(dp); dp 256 drivers/gpu/ipu-v3/ipu-dp.c dp->in_cs = IPUV3_COLORSPACE_UNKNOWN; dp 258 drivers/gpu/ipu-v3/ipu-dp.c if (!dp->foreground) dp 300 drivers/gpu/ipu-v3/ipu-dp.c struct ipu_dp *dp; dp 306 drivers/gpu/ipu-v3/ipu-dp.c dp = &priv->flow[flow >> 1].foreground; dp 308 drivers/gpu/ipu-v3/ipu-dp.c dp = &priv->flow[flow >> 1].background; dp 310 drivers/gpu/ipu-v3/ipu-dp.c if (dp->in_use) dp 313 drivers/gpu/ipu-v3/ipu-dp.c dp->in_use = true; dp 315 drivers/gpu/ipu-v3/ipu-dp.c return dp; dp 319 drivers/gpu/ipu-v3/ipu-dp.c void ipu_dp_put(struct ipu_dp *dp) dp 321 drivers/gpu/ipu-v3/ipu-dp.c dp->in_use = false; dp 291 drivers/hid/hid-led.c union delcom_packet dp = { .tx.major_cmd = 101, .tx.minor_cmd = 12 }; dp 293 drivers/hid/hid-led.c dp.tx.data_lsb = 1 << delcom_get_lednum(led); dp 294 drivers/hid/hid-led.c dp.tx.data_msb = 0; dp 296 drivers/hid/hid-led.c return hidled_send(led->rgb->ldev, dp.data); dp 301 drivers/hid/hid-led.c union delcom_packet dp = { .tx.major_cmd = 101, .tx.minor_cmd = 34 }; dp 303 drivers/hid/hid-led.c dp.tx.data_lsb = delcom_get_lednum(led); dp 304 drivers/hid/hid-led.c dp.tx.data_msb = led->cdev.brightness; dp 306 drivers/hid/hid-led.c return hidled_send(led->rgb->ldev, dp.data); dp 328 drivers/hid/hid-led.c union delcom_packet dp = { .rx.cmd = 104 }; dp 331 drivers/hid/hid-led.c ret = hidled_recv(ldev, dp.data); dp 338 drivers/hid/hid-led.c return le16_to_cpu(dp.fw.family_code) == 2 ? 0 : -ENODEV; dp 61 drivers/ide/au1xxx-ide.c au1x_ddma_desc_t *dp; dp 69 drivers/ide/au1xxx-ide.c dp = ctp->cur_ptr; dp 70 drivers/ide/au1xxx-ide.c while (dp->dscr_cmd0 & DSCR_CMD0_V) dp 72 drivers/ide/au1xxx-ide.c ctp->cur_ptr = au1xxx_ddma_get_nextptr_virt(dp); dp 79 drivers/ide/au1xxx-ide.c au1x_ddma_desc_t *dp; dp 87 drivers/ide/au1xxx-ide.c dp = ctp->cur_ptr; dp 88 drivers/ide/au1xxx-ide.c while (dp->dscr_cmd0 & DSCR_CMD0_V) dp 90 drivers/ide/au1xxx-ide.c ctp->cur_ptr = au1xxx_ddma_get_nextptr_virt(dp); dp 11177 drivers/infiniband/hw/hfi1/chip.c static int get_sc2vlnt(struct hfi1_devdata *dd, struct sc2vlnt *dp) dp 11187 drivers/infiniband/hw/hfi1/chip.c dp->vlnt[2 * i] = byte & 0xf; dp 11188 drivers/infiniband/hw/hfi1/chip.c dp->vlnt[(2 * i) + 1] = (byte & 0xf0) >> 4; dp 11195 drivers/infiniband/hw/hfi1/chip.c dp->vlnt[16 + (2 * i)] = byte & 0xf; dp 11196 drivers/infiniband/hw/hfi1/chip.c dp->vlnt[16 + (2 * i) + 1] = (byte & 0xf0) >> 4; dp 11212 drivers/infiniband/hw/hfi1/chip.c static void set_sc2vlnt(struct hfi1_devdata *dd, struct sc2vlnt *dp) dp 11216 drivers/infiniband/hw/hfi1/chip.c 0, dp->vlnt[0] & 0xf, dp 11217 drivers/infiniband/hw/hfi1/chip.c 1, dp->vlnt[1] & 0xf, dp 11218 drivers/infiniband/hw/hfi1/chip.c 2, dp->vlnt[2] & 0xf, dp 11219 drivers/infiniband/hw/hfi1/chip.c 3, dp->vlnt[3] & 0xf, dp 11220 drivers/infiniband/hw/hfi1/chip.c 4, dp->vlnt[4] & 0xf, dp 11221 drivers/infiniband/hw/hfi1/chip.c 5, dp->vlnt[5] & 0xf, dp 11222 drivers/infiniband/hw/hfi1/chip.c 6, dp->vlnt[6] & 0xf, dp 11223 drivers/infiniband/hw/hfi1/chip.c 7, dp->vlnt[7] & 0xf, dp 11224 drivers/infiniband/hw/hfi1/chip.c 8, dp->vlnt[8] & 0xf, dp 11225 drivers/infiniband/hw/hfi1/chip.c 9, dp->vlnt[9] & 0xf, dp 11226 drivers/infiniband/hw/hfi1/chip.c 10, dp->vlnt[10] & 0xf, dp 11227 drivers/infiniband/hw/hfi1/chip.c 11, dp->vlnt[11] & 0xf, dp 11228 drivers/infiniband/hw/hfi1/chip.c 12, dp->vlnt[12] & 0xf, dp 11229 drivers/infiniband/hw/hfi1/chip.c 13, dp->vlnt[13] & 0xf, dp 11230 drivers/infiniband/hw/hfi1/chip.c 14, dp->vlnt[14] & 0xf, dp 11231 drivers/infiniband/hw/hfi1/chip.c 15, dp->vlnt[15] & 0xf)); dp 11234 drivers/infiniband/hw/hfi1/chip.c 16, dp->vlnt[16] & 0xf, dp 11235 drivers/infiniband/hw/hfi1/chip.c 17, dp->vlnt[17] & 0xf, dp 11236 drivers/infiniband/hw/hfi1/chip.c 18, dp->vlnt[18] & 0xf, dp 11237 drivers/infiniband/hw/hfi1/chip.c 19, dp->vlnt[19] & 0xf, dp 11238 drivers/infiniband/hw/hfi1/chip.c 20, dp->vlnt[20] & 0xf, dp 11239 drivers/infiniband/hw/hfi1/chip.c 21, dp->vlnt[21] & 0xf, dp 11240 drivers/infiniband/hw/hfi1/chip.c 22, dp->vlnt[22] & 0xf, dp 11241 drivers/infiniband/hw/hfi1/chip.c 23, dp->vlnt[23] & 0xf, dp 11242 drivers/infiniband/hw/hfi1/chip.c 24, dp->vlnt[24] & 0xf, dp 11243 drivers/infiniband/hw/hfi1/chip.c 25, dp->vlnt[25] & 0xf, dp 11244 drivers/infiniband/hw/hfi1/chip.c 26, dp->vlnt[26] & 0xf, dp 11245 drivers/infiniband/hw/hfi1/chip.c 27, dp->vlnt[27] & 0xf, dp 11246 drivers/infiniband/hw/hfi1/chip.c 28, dp->vlnt[28] & 0xf, dp 11247 drivers/infiniband/hw/hfi1/chip.c 29, dp->vlnt[29] & 0xf, dp 11248 drivers/infiniband/hw/hfi1/chip.c 30, dp->vlnt[30] & 0xf, dp 11249 drivers/infiniband/hw/hfi1/chip.c 31, dp->vlnt[31] & 0xf)); dp 553 drivers/infiniband/hw/qib/qib_diag.c struct qib_diag_xpkt dp; dp 559 drivers/infiniband/hw/qib/qib_diag.c if (count != sizeof(dp)) { dp 563 drivers/infiniband/hw/qib/qib_diag.c if (copy_from_user(&dp, data, sizeof(dp))) { dp 568 drivers/infiniband/hw/qib/qib_diag.c dd = qib_lookup(dp.unit); dp 579 drivers/infiniband/hw/qib/qib_diag.c if (dp.version != _DIAG_XPKT_VERS) { dp 581 drivers/infiniband/hw/qib/qib_diag.c dp.version); dp 586 drivers/infiniband/hw/qib/qib_diag.c if (dp.len & 3) { dp 590 drivers/infiniband/hw/qib/qib_diag.c if (!dp.port || dp.port > dd->num_pports) { dp 594 drivers/infiniband/hw/qib/qib_diag.c ppd = &dd->pport[dp.port - 1]; dp 603 drivers/infiniband/hw/qib/qib_diag.c if (dp.len > ppd->ibmaxlen - maxlen_reserve) { dp 608 drivers/infiniband/hw/qib/qib_diag.c plen = sizeof(u32) + dp.len; dp 617 drivers/infiniband/hw/qib/qib_diag.c u64_to_user_ptr(dp.data), dp 618 drivers/infiniband/hw/qib/qib_diag.c dp.len)) { dp 625 drivers/infiniband/hw/qib/qib_diag.c if (dp.pbc_wd == 0) dp 626 drivers/infiniband/hw/qib/qib_diag.c dp.pbc_wd = plen; dp 628 drivers/infiniband/hw/qib/qib_diag.c piobuf = dd->f_getsendbuf(ppd, dp.pbc_wd, &pbufn); dp 639 drivers/infiniband/hw/qib/qib_diag.c writeq(dp.pbc_wd, piobuf); dp 669 drivers/infiniband/hw/qib/qib_diag.c ret = sizeof(dp); dp 190 drivers/input/misc/sparcspkr.c struct device_node *dp; dp 201 drivers/input/misc/sparcspkr.c dp = of_find_node_by_path("/"); dp 203 drivers/input/misc/sparcspkr.c if (!dp) dp 207 drivers/input/misc/sparcspkr.c info->clock_freq = of_getintprop_default(dp, "clock-frequency", 0); dp 56 drivers/input/serio/i8042-sparcio.h struct device_node *dp; dp 58 drivers/input/serio/i8042-sparcio.h for_each_child_of_node(op->dev.of_node, dp) { dp 59 drivers/input/serio/i8042-sparcio.h if (of_node_name_eq(dp, OBP_PS2KBD_NAME1) || dp 60 drivers/input/serio/i8042-sparcio.h of_node_name_eq(dp, OBP_PS2KBD_NAME2)) { dp 61 drivers/input/serio/i8042-sparcio.h struct platform_device *kbd = of_find_device_by_node(dp); dp 69 drivers/input/serio/i8042-sparcio.h } else if (of_node_name_eq(dp, OBP_PS2MS_NAME1) || dp 70 drivers/input/serio/i8042-sparcio.h of_node_name_eq(dp, OBP_PS2MS_NAME2)) { dp 71 drivers/input/serio/i8042-sparcio.h struct platform_device *ms = of_find_device_by_node(dp); dp 34 drivers/isdn/hardware/mISDN/iohelper.h static void ReadFiFo##name##_IO(void *p, u8 off, u8 *dp, int size) { \ dp 36 drivers/isdn/hardware/mISDN/iohelper.h insb(hw->ap.port + off, dp, size); \ dp 38 drivers/isdn/hardware/mISDN/iohelper.h static void WriteFiFo##name##_IO(void *p, u8 off, u8 *dp, int size) { \ dp 40 drivers/isdn/hardware/mISDN/iohelper.h outsb(hw->ap.port + off, dp, size); \ dp 54 drivers/isdn/hardware/mISDN/iohelper.h static void ReadFiFo##name##_IND(void *p, u8 off, u8 *dp, int size) { \ dp 57 drivers/isdn/hardware/mISDN/iohelper.h insb(hw->ap.port, dp, size); \ dp 59 drivers/isdn/hardware/mISDN/iohelper.h static void WriteFiFo##name##_IND(void *p, u8 off, u8 *dp, int size) { \ dp 62 drivers/isdn/hardware/mISDN/iohelper.h outsb(hw->ap.port, dp, size); \ dp 74 drivers/isdn/hardware/mISDN/iohelper.h static void ReadFiFo##name##_MIO(void *p, u8 off, u8 *dp, int size) { \ dp 77 drivers/isdn/hardware/mISDN/iohelper.h *dp++ = readb(((typ *)hw->adr) + off); \ dp 79 drivers/isdn/hardware/mISDN/iohelper.h static void WriteFiFo##name##_MIO(void *p, u8 off, u8 *dp, int size) { \ dp 82 drivers/isdn/hardware/mISDN/iohelper.h writeb(*dp++, ((typ *)hw->adr) + off); \ dp 45 drivers/isdn/mISDN/clock.c mISDN_init_clock(u_int *dp) dp 47 drivers/isdn/mISDN/clock.c debug = dp; dp 651 drivers/isdn/mISDN/stack.c mISDN_initstack(u_int *dp) dp 653 drivers/isdn/mISDN/stack.c debug = dp; dp 481 drivers/isdn/mISDN/tei.c u_char *dp = arg; dp 484 drivers/isdn/mISDN/tei.c ri = ((unsigned int) *dp++ << 8); dp 485 drivers/isdn/mISDN/tei.c ri += *dp++; dp 486 drivers/isdn/mISDN/tei.c dp++; dp 487 drivers/isdn/mISDN/tei.c tei = *dp >> 1; dp 510 drivers/isdn/mISDN/tei.c u_char *dp = arg; dp 513 drivers/isdn/mISDN/tei.c ri = ((unsigned int) *dp++ << 8); dp 514 drivers/isdn/mISDN/tei.c ri += *dp++; dp 515 drivers/isdn/mISDN/tei.c dp++; dp 516 drivers/isdn/mISDN/tei.c tei = *dp >> 1; dp 534 drivers/isdn/mISDN/tei.c u_char *dp = arg; dp 537 drivers/isdn/mISDN/tei.c ri = ((unsigned int) *dp++ << 8); dp 538 drivers/isdn/mISDN/tei.c ri += *dp++; dp 539 drivers/isdn/mISDN/tei.c dp++; dp 540 drivers/isdn/mISDN/tei.c tei = *dp >> 1; dp 550 drivers/isdn/mISDN/tei.c u_char *dp = arg; dp 553 drivers/isdn/mISDN/tei.c tei = *(dp + 3) >> 1; dp 568 drivers/isdn/mISDN/tei.c u_char *dp = arg; dp 571 drivers/isdn/mISDN/tei.c tei = *(dp + 3) >> 1; dp 663 drivers/isdn/mISDN/tei.c u_char *dp = arg; dp 670 drivers/isdn/mISDN/tei.c tm->ri = ((unsigned int) *dp++ << 8); dp 671 drivers/isdn/mISDN/tei.c tm->ri += *dp++; dp 674 drivers/isdn/mISDN/tei.c "net assign request ri %d teim %d", tm->ri, *dp); dp 698 drivers/isdn/mISDN/tei.c u_char *dp = arg; dp 701 drivers/isdn/mISDN/tei.c tei = dp[3] >> 1; dp 712 drivers/isdn/mISDN/tei.c u_char *dp = arg; dp 715 drivers/isdn/mISDN/tei.c tei = dp[3] >> 1; dp 761 drivers/isdn/mISDN/tei.c tei_ph_data_ind(struct teimgr *tm, u_int mt, u_char *dp, int len) dp 768 drivers/isdn/mISDN/tei.c mISDN_FsmEvent(&tm->tei_m, EV_ASSIGN, dp); dp 770 drivers/isdn/mISDN/tei.c mISDN_FsmEvent(&tm->tei_m, EV_DENIED, dp); dp 772 drivers/isdn/mISDN/tei.c mISDN_FsmEvent(&tm->tei_m, EV_CHKREQ, dp); dp 774 drivers/isdn/mISDN/tei.c mISDN_FsmEvent(&tm->tei_m, EV_REMOVE, dp); dp 776 drivers/isdn/mISDN/tei.c mISDN_FsmEvent(&tm->tei_m, EV_VERIFY, dp); dp 778 drivers/isdn/mISDN/tei.c mISDN_FsmEvent(&tm->tei_m, EV_CHKRESP, dp); dp 848 drivers/isdn/mISDN/tei.c new_tei_req(struct manager *mgr, u_char *dp) dp 853 drivers/isdn/mISDN/tei.c ri = dp[0] << 8; dp 854 drivers/isdn/mISDN/tei.c ri += dp[1]; dp 857 drivers/isdn/mISDN/tei.c if (!(dp[3] & 1)) /* Extension bit != 1 */ dp 859 drivers/isdn/mISDN/tei.c if (dp[3] != 0xff) dp 860 drivers/isdn/mISDN/tei.c tei = dp[3] >> 1; /* 3GPP TS 08.56 6.1.11.2 */ dp 871 drivers/isdn/mISDN/tei.c mISDN_FsmEvent(&l2->tm->tei_m, EV_ASSIGN_REQ, dp); dp 1298 drivers/md/dm-integrity.c unsigned char *data, *dp; dp 1312 drivers/md/dm-integrity.c dp = data + *metadata_offset; dp 1314 drivers/md/dm-integrity.c memcpy(tag, dp, to_copy); dp 1316 drivers/md/dm-integrity.c memcpy(dp, tag, to_copy); dp 1320 drivers/md/dm-integrity.c if (unlikely(memcmp(dp, tag, to_copy))) { dp 1324 drivers/md/dm-integrity.c if (dp[i] != tag[i]) dp 162 drivers/md/dm-io.c void (*get_page)(struct dpages *dp, dp 164 drivers/md/dm-io.c void (*next_page)(struct dpages *dp); dp 179 drivers/md/dm-io.c static void list_get_page(struct dpages *dp, dp 182 drivers/md/dm-io.c unsigned o = dp->context_u; dp 183 drivers/md/dm-io.c struct page_list *pl = (struct page_list *) dp->context_ptr; dp 190 drivers/md/dm-io.c static void list_next_page(struct dpages *dp) dp 192 drivers/md/dm-io.c struct page_list *pl = (struct page_list *) dp->context_ptr; dp 193 drivers/md/dm-io.c dp->context_ptr = pl->next; dp 194 drivers/md/dm-io.c dp->context_u = 0; dp 197 drivers/md/dm-io.c static void list_dp_init(struct dpages *dp, struct page_list *pl, unsigned offset) dp 199 drivers/md/dm-io.c dp->get_page = list_get_page; dp 200 drivers/md/dm-io.c dp->next_page = list_next_page; dp 201 drivers/md/dm-io.c dp->context_u = offset; dp 202 drivers/md/dm-io.c dp->context_ptr = pl; dp 208 drivers/md/dm-io.c static void bio_get_page(struct dpages *dp, struct page **p, dp 211 drivers/md/dm-io.c struct bio_vec bvec = bvec_iter_bvec((struct bio_vec *)dp->context_ptr, dp 212 drivers/md/dm-io.c dp->context_bi); dp 219 drivers/md/dm-io.c dp->context_bi.bi_sector = (sector_t)bvec.bv_len; dp 222 drivers/md/dm-io.c static void bio_next_page(struct dpages *dp) dp 224 drivers/md/dm-io.c unsigned int len = (unsigned int)dp->context_bi.bi_sector; dp 226 drivers/md/dm-io.c bvec_iter_advance((struct bio_vec *)dp->context_ptr, dp 227 drivers/md/dm-io.c &dp->context_bi, len); dp 230 drivers/md/dm-io.c static void bio_dp_init(struct dpages *dp, struct bio *bio) dp 232 drivers/md/dm-io.c dp->get_page = bio_get_page; dp 233 drivers/md/dm-io.c dp->next_page = bio_next_page; dp 239 drivers/md/dm-io.c dp->context_ptr = bio->bi_io_vec; dp 240 drivers/md/dm-io.c dp->context_bi = bio->bi_iter; dp 246 drivers/md/dm-io.c static void vm_get_page(struct dpages *dp, dp 249 drivers/md/dm-io.c *p = vmalloc_to_page(dp->context_ptr); dp 250 drivers/md/dm-io.c *offset = dp->context_u; dp 251 drivers/md/dm-io.c *len = PAGE_SIZE - dp->context_u; dp 254 drivers/md/dm-io.c static void vm_next_page(struct dpages *dp) dp 256 drivers/md/dm-io.c dp->context_ptr += PAGE_SIZE - dp->context_u; dp 257 drivers/md/dm-io.c dp->context_u = 0; dp 260 drivers/md/dm-io.c static void vm_dp_init(struct dpages *dp, void *data) dp 262 drivers/md/dm-io.c dp->get_page = vm_get_page; dp 263 drivers/md/dm-io.c dp->next_page = vm_next_page; dp 264 drivers/md/dm-io.c dp->context_u = offset_in_page(data); dp 265 drivers/md/dm-io.c dp->context_ptr = data; dp 271 drivers/md/dm-io.c static void km_get_page(struct dpages *dp, struct page **p, unsigned long *len, dp 274 drivers/md/dm-io.c *p = virt_to_page(dp->context_ptr); dp 275 drivers/md/dm-io.c *offset = dp->context_u; dp 276 drivers/md/dm-io.c *len = PAGE_SIZE - dp->context_u; dp 279 drivers/md/dm-io.c static void km_next_page(struct dpages *dp) dp 281 drivers/md/dm-io.c dp->context_ptr += PAGE_SIZE - dp->context_u; dp 282 drivers/md/dm-io.c dp->context_u = 0; dp 285 drivers/md/dm-io.c static void km_dp_init(struct dpages *dp, void *data) dp 287 drivers/md/dm-io.c dp->get_page = km_get_page; dp 288 drivers/md/dm-io.c dp->next_page = km_next_page; dp 289 drivers/md/dm-io.c dp->context_u = offset_in_page(data); dp 290 drivers/md/dm-io.c dp->context_ptr = data; dp 297 drivers/md/dm-io.c struct dm_io_region *where, struct dpages *dp, dp 363 drivers/md/dm-io.c dp->get_page(dp, &page, &len, &offset); dp 370 drivers/md/dm-io.c dp->next_page(dp); dp 375 drivers/md/dm-io.c dp->get_page(dp, &page, &len, &offset); dp 382 drivers/md/dm-io.c dp->next_page(dp); dp 391 drivers/md/dm-io.c struct dm_io_region *where, struct dpages *dp, dp 395 drivers/md/dm-io.c struct dpages old_pages = *dp; dp 407 drivers/md/dm-io.c *dp = old_pages; dp 409 drivers/md/dm-io.c do_region(op, op_flags, i, where + i, dp, io); dp 434 drivers/md/dm-io.c struct dpages *dp, unsigned long *error_bits) dp 453 drivers/md/dm-io.c io->vma_invalidate_address = dp->vma_invalidate_address; dp 454 drivers/md/dm-io.c io->vma_invalidate_size = dp->vma_invalidate_size; dp 456 drivers/md/dm-io.c dispatch_io(op, op_flags, num_regions, where, dp, io, 1); dp 468 drivers/md/dm-io.c struct dpages *dp, io_notify_fn fn, void *context) dp 485 drivers/md/dm-io.c io->vma_invalidate_address = dp->vma_invalidate_address; dp 486 drivers/md/dm-io.c io->vma_invalidate_size = dp->vma_invalidate_size; dp 488 drivers/md/dm-io.c dispatch_io(op, op_flags, num_regions, where, dp, io, 0); dp 492 drivers/md/dm-io.c static int dp_init(struct dm_io_request *io_req, struct dpages *dp, dp 497 drivers/md/dm-io.c dp->vma_invalidate_address = NULL; dp 498 drivers/md/dm-io.c dp->vma_invalidate_size = 0; dp 502 drivers/md/dm-io.c list_dp_init(dp, io_req->mem.ptr.pl, io_req->mem.offset); dp 506 drivers/md/dm-io.c bio_dp_init(dp, io_req->mem.ptr.bio); dp 512 drivers/md/dm-io.c dp->vma_invalidate_address = io_req->mem.ptr.vma; dp 513 drivers/md/dm-io.c dp->vma_invalidate_size = size; dp 515 drivers/md/dm-io.c vm_dp_init(dp, io_req->mem.ptr.vma); dp 519 drivers/md/dm-io.c km_dp_init(dp, io_req->mem.ptr.addr); dp 541 drivers/md/dm-io.c struct dpages dp; dp 543 drivers/md/dm-io.c r = dp_init(io_req, &dp, (unsigned long)where->count << SECTOR_SHIFT); dp 549 drivers/md/dm-io.c io_req->bi_op, io_req->bi_op_flags, &dp, dp 553 drivers/md/dm-io.c io_req->bi_op_flags, &dp, io_req->notify.fn, dp 4970 drivers/media/usb/pvrusb2/pvrusb2-hdw.c int pvr2_hdw_gpio_get_dir(struct pvr2_hdw *hdw,u32 *dp) dp 4972 drivers/media/usb/pvrusb2/pvrusb2-hdw.c return pvr2_read_register(hdw,PVR2_GPIO_DIR,dp); dp 4976 drivers/media/usb/pvrusb2/pvrusb2-hdw.c int pvr2_hdw_gpio_get_out(struct pvr2_hdw *hdw,u32 *dp) dp 4978 drivers/media/usb/pvrusb2/pvrusb2-hdw.c return pvr2_read_register(hdw,PVR2_GPIO_OUT,dp); dp 4982 drivers/media/usb/pvrusb2/pvrusb2-hdw.c int pvr2_hdw_gpio_get_in(struct pvr2_hdw *hdw,u32 *dp) dp 4984 drivers/media/usb/pvrusb2/pvrusb2-hdw.c return pvr2_read_register(hdw,PVR2_GPIO_IN,dp); dp 135 drivers/misc/mic/bus/scif_bus.c struct mic_mw *mmio, struct mic_mw *aper, void *dp, dp 158 drivers/misc/mic/bus/scif_bus.c sdev->dp = dp; dp 52 drivers/misc/mic/bus/scif_bus.h void *dp; dp 111 drivers/misc/mic/bus/scif_bus.h void *dp, void __iomem *rdp, dp 40 drivers/misc/mic/card/mic_device.c mdrv->dp = mic_card_map(mdev, dp_dma_addr, MIC_DP_SIZE); dp 41 drivers/misc/mic/card/mic_device.c if (!mdrv->dp) { dp 45 drivers/misc/mic/card/mic_device.c bootparam = mdrv->dp; dp 57 drivers/misc/mic/card/mic_device.c mic_card_unmap(&g_drv->mdev, g_drv->dp); dp 275 drivers/misc/mic/card/mic_device.c return mdrv->dp; dp 376 drivers/misc/mic/card/mic_device.c bootparam = mdrv->dp; dp 381 drivers/misc/mic/card/mic_device.c NULL, mdrv->dp, mdrv->dma_ch, dp 74 drivers/misc/mic/card/mic_device.h void __iomem *dp; dp 88 drivers/misc/mic/host/mic_boot.c return mdev->dp; dp 371 drivers/misc/mic/host/mic_boot.c struct mic_bootparam *bootparam = mdev->dp; dp 476 drivers/misc/mic/host/mic_boot.c &mdev->aper, mdev->dp, NULL, dp 79 drivers/misc/mic/host/mic_device.h void *dp; dp 51 drivers/misc/mic/host/mic_main.c mdev->dp = kzalloc(MIC_DP_SIZE, GFP_KERNEL); dp 52 drivers/misc/mic/host/mic_main.c if (!mdev->dp) dp 56 drivers/misc/mic/host/mic_main.c mdev->dp, MIC_DP_SIZE); dp 58 drivers/misc/mic/host/mic_main.c kfree(mdev->dp); dp 72 drivers/misc/mic/host/mic_main.c kfree(mdev->dp); dp 84 drivers/misc/mic/scif/scif_main.c struct mic_bootparam *bp = sdev->dp; dp 171 drivers/misc/mic/scif/scif_main.c struct mic_bootparam *bp = sdev->dp; dp 213 drivers/misc/mic/scif/scif_main.c struct mic_bootparam *bp = sdev->dp; dp 609 drivers/misc/mic/vop/vop_main.c static void _vop_scan_devices(void __iomem *dp, struct vop_device *vpdev, dp 621 drivers/misc/mic/vop/vop_main.c d = dp + i; dp 666 drivers/misc/mic/vop/vop_main.c void __iomem *dp = vpdev->hw_ops->get_remote_dp(vpdev); dp 668 drivers/misc/mic/vop/vop_main.c if (!dp) dp 671 drivers/misc/mic/vop/vop_main.c _vop_scan_devices(dp, vpdev, remove, vpdev->dnode); dp 60 drivers/misc/mic/vop/vop_main.h void __iomem *dp; dp 27 drivers/misc/sgi-gru/grukdump.c static int gru_user_copy_handle(void __user **dp, void *s) dp 29 drivers/misc/sgi-gru/grukdump.c if (copy_to_user(*dp, s, GRU_HANDLE_BYTES)) dp 31 drivers/misc/sgi-gru/grukdump.c *dp += GRU_HANDLE_BYTES; dp 275 drivers/mtd/maps/physmap-core.c struct device_node *dp = dev->dev.of_node; dp 279 drivers/mtd/maps/physmap-core.c count = of_property_count_strings(dp, "linux,part-probe"); dp 287 drivers/mtd/maps/physmap-core.c count = of_property_read_string_array(dp, "linux,part-probe", res, dp 297 drivers/mtd/maps/physmap-core.c struct device_node *dp = dev->dev.of_node; dp 309 drivers/mtd/maps/physmap-core.c of_property_read_string(dp, "probe-type", &probe_type); dp 332 drivers/mtd/maps/physmap-core.c struct device_node *dp = dev->dev.of_node; dp 339 drivers/mtd/maps/physmap-core.c if (!dp) dp 348 drivers/mtd/maps/physmap-core.c of_property_read_string(dp, "linux,mtd-name", &mtd_name); dp 350 drivers/mtd/maps/physmap-core.c map_indirect = of_property_read_bool(dp, "no-unaligned-direct-access"); dp 352 drivers/mtd/maps/physmap-core.c err = of_property_read_u32(dp, "bank-width", &bankwidth); dp 358 drivers/mtd/maps/physmap-core.c if (of_property_read_bool(dp, "big-endian")) dp 360 drivers/mtd/maps/physmap-core.c else if (of_property_read_bool(dp, "little-endian")) dp 367 drivers/mtd/maps/physmap-core.c info->maps[i].device_node = dp; dp 369 drivers/mtd/maps/physmap-core.c err = of_flash_probe_gemini(dev, dp, &info->maps[i]); dp 373 drivers/mtd/maps/physmap-core.c err = of_flash_probe_versatile(dev, dp, &info->maps[i]); dp 51 drivers/mtd/maps/sun_uflash.c int uflash_devinit(struct platform_device *op, struct device_node *dp) dp 60 drivers/mtd/maps/sun_uflash.c dp, (unsigned long long)op->resource[0].start); dp 76 drivers/mtd/maps/sun_uflash.c up->name = of_get_property(dp, "model", NULL); dp 113 drivers/mtd/maps/sun_uflash.c struct device_node *dp = op->dev.of_node; dp 118 drivers/mtd/maps/sun_uflash.c if (!of_find_property(dp, "user", NULL)) dp 121 drivers/mtd/maps/sun_uflash.c return uflash_devinit(op, dp); dp 156 drivers/mtd/parsers/ofpart.c struct device_node *dp; dp 164 drivers/mtd/parsers/ofpart.c dp = mtd_get_of_node(master); dp 165 drivers/mtd/parsers/ofpart.c if (!dp) dp 168 drivers/mtd/parsers/ofpart.c part = of_get_property(dp, "partitions", &plen); dp 172 drivers/mtd/parsers/ofpart.c pr_warn("Device tree uses obsolete partition map binding: %pOF\n", dp); dp 180 drivers/mtd/parsers/ofpart.c names = of_get_property(dp, "partition-names", &plen); dp 87 drivers/net/dsa/microchip/ksz_common.c const struct dsa_port *dp = dsa_to_port(dev->ds, i); dp 89 drivers/net/dsa/microchip/ksz_common.c if (!netif_carrier_ok(dp->slave)) dp 180 drivers/net/dsa/microchip/ksz_common.c const struct dsa_port *dp = dsa_to_port(ds, port); dp 188 drivers/net/dsa/microchip/ksz_common.c if (!netif_carrier_ok(dp->slave)) dp 2218 drivers/net/dsa/sja1105/sja1105_main.c sp->dp = &ds->ports[i]; dp 1307 drivers/net/ethernet/amd/sunlance.c struct device_node *dp = op->dev.of_node; dp 1384 drivers/net/ethernet/amd/sunlance.c lp->busmaster_regval = of_getintprop_default(dp, "busmaster-regval", dp 660 drivers/net/ethernet/broadcom/b44.c struct dma_desc *dp; dp 721 drivers/net/ethernet/broadcom/b44.c dp = &bp->rx_ring[dest_idx]; dp 722 drivers/net/ethernet/broadcom/b44.c dp->ctrl = cpu_to_le32(ctrl); dp 723 drivers/net/ethernet/broadcom/b44.c dp->addr = cpu_to_le32((u32) mapping + bp->dma_offset); dp 727 drivers/net/ethernet/broadcom/b44.c dest_idx * sizeof(*dp), dp 836 drivers/net/ethernet/broadcom/bnx2x/bnx2x_dcb.c struct bnx2x_config_dcbx_params *dp = &bp->dcbx_config_params; dp 849 drivers/net/ethernet/broadcom/bnx2x/bnx2x_dcb.c if (dp->overwrite_settings == BNX2X_DCBX_OVERWRITE_SETTINGS_ENABLE) { dp 853 drivers/net/ethernet/broadcom/bnx2x/bnx2x_dcb.c (dp->admin_dcbx_version << DCBX_CEE_VERSION_SHIFT) & dp 856 drivers/net/ethernet/broadcom/bnx2x/bnx2x_dcb.c af->ets.enabled = (u8)dp->admin_ets_enable; dp 858 drivers/net/ethernet/broadcom/bnx2x/bnx2x_dcb.c af->pfc.enabled = (u8)dp->admin_pfc_enable; dp 861 drivers/net/ethernet/broadcom/bnx2x/bnx2x_dcb.c if (dp->admin_ets_configuration_tx_enable) dp 868 drivers/net/ethernet/broadcom/bnx2x/bnx2x_dcb.c if (dp->admin_pfc_tx_enable) dp 875 drivers/net/ethernet/broadcom/bnx2x/bnx2x_dcb.c if (dp->admin_application_priority_tx_enable) dp 882 drivers/net/ethernet/broadcom/bnx2x/bnx2x_dcb.c if (dp->admin_ets_willing) dp 887 drivers/net/ethernet/broadcom/bnx2x/bnx2x_dcb.c if (dp->admin_pfc_willing) dp 892 drivers/net/ethernet/broadcom/bnx2x/bnx2x_dcb.c if (dp->admin_app_priority_willing) dp 899 drivers/net/ethernet/broadcom/bnx2x/bnx2x_dcb.c (u8)dp->admin_configuration_bw_precentage[i]); dp 907 drivers/net/ethernet/broadcom/bnx2x/bnx2x_dcb.c (u8)dp->admin_configuration_ets_pg[i]); dp 915 drivers/net/ethernet/broadcom/bnx2x/bnx2x_dcb.c af->pfc.pri_en_bitmap = (u8)dp->admin_pfc_bitmap; dp 917 drivers/net/ethernet/broadcom/bnx2x/bnx2x_dcb.c if (dp->admin_priority_app_table[i].valid) { dp 919 drivers/net/ethernet/broadcom/bnx2x/bnx2x_dcb.c dp->admin_priority_app_table; dp 944 drivers/net/ethernet/broadcom/bnx2x/bnx2x_dcb.c af->app.default_pri = (u8)dp->admin_default_priority; dp 12344 drivers/net/ethernet/broadcom/tg3.c struct device *dp = &tp->pdev->dev; dp 12349 drivers/net/ethernet/broadcom/tg3.c !(tg3_flag(tp, WOL_CAP) && device_can_wakeup(dp))) dp 12352 drivers/net/ethernet/broadcom/tg3.c device_set_wakeup_enable(dp, wol->wolopts & WAKE_MAGIC); dp 12354 drivers/net/ethernet/broadcom/tg3.c if (device_may_wakeup(dp)) dp 165 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c unsigned int tid, bool dip, bool sip, bool dp, dp 239 drivers/net/ethernet/chelsio/cxgb4/cxgb4_filter.c (dp ? f->fs.nat_lport : 0) | dp 377 drivers/net/ethernet/dec/tulip/dmfe.c struct device_node *dp = pci_device_to_OF_node(pdev); dp 379 drivers/net/ethernet/dec/tulip/dmfe.c if (dp && of_get_property(dp, "local-mac-address", NULL)) { dp 1348 drivers/net/ethernet/dec/tulip/tulip_core.c struct device_node *dp; dp 1356 drivers/net/ethernet/dec/tulip/tulip_core.c dp = pci_device_to_OF_node(pdev); dp 1357 drivers/net/ethernet/dec/tulip/tulip_core.c if (!(dp && of_get_property(dp, "local-mac-address", NULL))) { dp 1613 drivers/net/ethernet/dec/tulip/tulip_core.c struct device_node *dp = pci_device_to_OF_node(pdev); dp 1622 drivers/net/ethernet/dec/tulip/tulip_core.c addr = of_get_property(dp, "local-mac-address", &len); dp 477 drivers/net/ethernet/marvell/octeontx2/af/rvu_struct.h u64 dp : 8; dp 479 drivers/net/ethernet/marvell/octeontx2/af/rvu_struct.h u64 dp : 8; dp 117 drivers/net/ethernet/netronome/nfp/abm/main.c port->vnic = alink->vnic->dp.ctrl_bar; dp 124 drivers/net/ethernet/netronome/nfp/abm/main.c port, alink->vnic->dp.netdev); dp 268 drivers/net/ethernet/netronome/nfp/abm/main.c eth_hw_addr_random(nn->dp.netdev); dp 279 drivers/net/ethernet/netronome/nfp/abm/main.c eth_hw_addr_random(nn->dp.netdev); dp 285 drivers/net/ethernet/netronome/nfp/abm/main.c eth_hw_addr_random(nn->dp.netdev); dp 295 drivers/net/ethernet/netronome/nfp/abm/main.c eth_hw_addr_random(nn->dp.netdev); dp 304 drivers/net/ethernet/netronome/nfp/abm/main.c eth_hw_addr_random(nn->dp.netdev); dp 308 drivers/net/ethernet/netronome/nfp/abm/main.c ether_addr_copy(nn->dp.netdev->dev_addr, mac_addr); dp 309 drivers/net/ethernet/netronome/nfp/abm/main.c ether_addr_copy(nn->dp.netdev->perm_addr, mac_addr); dp 348 drivers/net/ethernet/netronome/nfp/abm/main.c netif_keep_dst(nn->dp.netdev); dp 391 drivers/net/ethernet/netronome/nfp/abm/main.c for (i = 0; i < alink->vnic->dp.num_r_vecs; i++) { dp 407 drivers/net/ethernet/netronome/nfp/abm/main.c return alink->vnic->dp.num_r_vecs * 2; dp 421 drivers/net/ethernet/netronome/nfp/abm/main.c for (i = 0; i < alink->vnic->dp.num_r_vecs; i++) { dp 434 drivers/net/ethernet/netronome/nfp/bpf/cmsg.c mtu = bpf->app->ctrl->dp.mtu; dp 46 drivers/net/ethernet/netronome/nfp/bpf/main.c running = nn->dp.ctrl & NFP_NET_CFG_CTRL_BPF; dp 119 drivers/net/ethernet/netronome/nfp/bpf/main.c if (!tc_cls_can_offload_and_chain0(nn->dp.netdev, &cls_bpf->common)) dp 187 drivers/net/ethernet/netronome/nfp/bpf/main.c if (~nn->dp.ctrl & NFP_NET_CFG_CTRL_BPF) dp 435 drivers/net/ethernet/netronome/nfp/bpf/main.c if (app->ctrl->dp.mtu < nfp_bpf_ctrl_cmsg_min_mtu(bpf)) { dp 438 drivers/net/ethernet/netronome/nfp/bpf/main.c app->ctrl->dp.mtu, nfp_bpf_ctrl_cmsg_min_mtu(bpf)); dp 21 drivers/net/ethernet/netronome/nfp/bpf/main.h #define cmsg_warn(bpf, msg...) nn_dp_warn(&(bpf)->app->ctrl->dp, msg) dp 491 drivers/net/ethernet/netronome/nfp/bpf/offload.c pkt_off = min(prog->aux->max_pkt_offset, nn->dp.netdev->mtu); dp 513 drivers/net/ethernet/netronome/nfp/bpf/offload.c dma_addr = dma_map_single(nn->dp.dev, img, dp 516 drivers/net/ethernet/netronome/nfp/bpf/offload.c if (dma_mapping_error(nn->dp.dev, dma_addr)) { dp 530 drivers/net/ethernet/netronome/nfp/bpf/offload.c dma_unmap_single(nn->dp.dev, dma_addr, nfp_prog->prog_len * sizeof(u64), dp 543 drivers/net/ethernet/netronome/nfp/bpf/offload.c nn->dp.ctrl |= NFP_NET_CFG_CTRL_BPF; dp 544 drivers/net/ethernet/netronome/nfp/bpf/offload.c nn_writel(nn, NFP_NET_CFG_CTRL, nn->dp.ctrl); dp 553 drivers/net/ethernet/netronome/nfp/bpf/offload.c if (!(nn->dp.ctrl & NFP_NET_CFG_CTRL_BPF)) dp 556 drivers/net/ethernet/netronome/nfp/bpf/offload.c nn->dp.ctrl &= ~NFP_NET_CFG_CTRL_BPF; dp 557 drivers/net/ethernet/netronome/nfp/bpf/offload.c nn_writel(nn, NFP_NET_CFG_CTRL, nn->dp.ctrl); dp 567 drivers/net/ethernet/netronome/nfp/bpf/offload.c if (prog && !bpf_offload_dev_match(prog, nn->dp.netdev)) dp 582 drivers/net/ethernet/netronome/nfp/bpf/offload.c if (!old_prog && nn->dp.ctrl & NFP_NET_CFG_CTRL_BPF) dp 10 drivers/net/ethernet/netronome/nfp/ccm.c #define ccm_warn(app, msg...) nn_dp_warn(&(app)->ctrl->dp, msg) dp 220 drivers/net/ethernet/netronome/nfp/ccm_mbox.c data = nn->dp.ctrl_bar + nn->tlv_caps.mbox_off + dp 232 drivers/net/ethernet/netronome/nfp/ccm_mbox.c offset = data - nn->dp.ctrl_bar; dp 238 drivers/net/ethernet/netronome/nfp/ccm_mbox.c nn_dp_warn(&nn->dp, "mailbox oversized TLV type:%d offset:%u len:%u\n", dp 249 drivers/net/ethernet/netronome/nfp/ccm_mbox.c nn_dp_warn(&nn->dp, "mailbox unknown TLV type:%d offset:%u len:%u\n", dp 255 drivers/net/ethernet/netronome/nfp/ccm_mbox.c nn_dp_warn(&nn->dp, "mailbox msg too short to contain header TLV type:%d offset:%u len:%u\n", dp 264 drivers/net/ethernet/netronome/nfp/ccm_mbox.c nn_dp_warn(&nn->dp, "mailbox request not found:%u\n", dp 271 drivers/net/ethernet/netronome/nfp/ccm_mbox.c nn_dp_warn(&nn->dp, dp 279 drivers/net/ethernet/netronome/nfp/ccm_mbox.c nn_dp_warn(&nn->dp, "mailbox msg reply wrong type:%u expected:%lu\n", dp 286 drivers/net/ethernet/netronome/nfp/ccm_mbox.c nn_dp_warn(&nn->dp, "mailbox msg reply wrong size type:%u expected:%u have:%u\n", dp 292 drivers/net/ethernet/netronome/nfp/ccm_mbox.c nn_dp_warn(&nn->dp, "mailbox msg oversized reply type:%u max:%u have:%u\n", dp 323 drivers/net/ethernet/netronome/nfp/ccm_mbox.c nn_dp_warn(&nn->dp, dp 343 drivers/net/ethernet/netronome/nfp/ccm_mbox.c nn_dp_warn(&nn->dp, dp 458 drivers/net/ethernet/netronome/nfp/ccm_mbox.c nn_dp_warn(&nn->dp, "time out waiting for mbox response to 0x%02x\n", dp 475 drivers/net/ethernet/netronome/nfp/ccm_mbox.c nn_dp_warn(&nn->dp, dp 496 drivers/net/ethernet/netronome/nfp/ccm_mbox.c nn_dp_warn(&nn->dp, dp 505 drivers/net/ethernet/netronome/nfp/ccm_mbox.c nn_dp_warn(&nn->dp, dp 525 drivers/net/ethernet/netronome/nfp/ccm_mbox.c nn_dp_warn(&nn->dp, "mailbox request queue too long\n"); dp 55 drivers/net/ethernet/netronome/nfp/crypto/tls.c nn->dp.ktls_tx = !!nn->ktls_tx_conn_cnt; dp 120 drivers/net/ethernet/netronome/nfp/crypto/tls.c nn_dp_warn(&nn->dp, "failed to %s TLS: %d\n", name, err); dp 127 drivers/net/ethernet/netronome/nfp/crypto/tls.c nn_dp_warn(&nn->dp, "failed to %s TLS, fw replied: %d\n", dp 361 drivers/net/ethernet/netronome/nfp/crypto/tls.c nn_dp_warn(&nn->dp, "failed to add TLS: %d (%d)\n", dp 373 drivers/net/ethernet/netronome/nfp/crypto/tls.c nn_dp_warn(&nn->dp, dp 380 drivers/net/ethernet/netronome/nfp/crypto/tls.c nn_dp_warn(&nn->dp, "FW returned NULL handle\n"); dp 483 drivers/net/ethernet/netronome/nfp/crypto/tls.c struct net_device *netdev = nn->dp.netdev; dp 423 drivers/net/ethernet/netronome/nfp/flower/main.c port->vnic = priv->nn->dp.ctrl_bar; dp 436 drivers/net/ethernet/netronome/nfp/flower/main.c port_id, port, priv->nn->dp.netdev); dp 549 drivers/net/ethernet/netronome/nfp/flower/main.c cmsg_port_id, port, priv->nn->dp.netdev); dp 608 drivers/net/ethernet/netronome/nfp/flower/main.c eth_hw_addr_random(nn->dp.netdev); dp 609 drivers/net/ethernet/netronome/nfp/flower/main.c netif_keep_dst(nn->dp.netdev); dp 615 drivers/net/ethernet/netronome/nfp/flower/main.c nn->port = nfp_port_alloc(app, NFP_PORT_INVALID, nn->dp.netdev); dp 19 drivers/net/ethernet/netronome/nfp/nfp_app_nic.c nn->port = nfp_port_alloc(app, NFP_PORT_PHYS_PORT, nn->dp.netdev); dp 41 drivers/net/ethernet/netronome/nfp/nfp_app_nic.c nfp_net_get_mac_addr(app->pf, nn->dp.netdev, nn->port); dp 31 drivers/net/ethernet/netronome/nfp/nfp_net.h if (__nn->dp.netdev) \ dp 32 drivers/net/ethernet/netronome/nfp/nfp_net.h netdev_printk(lvl, __nn->dp.netdev, fmt, ## args); \ dp 34 drivers/net/ethernet/netronome/nfp/nfp_net.h dev_printk(lvl, __nn->dp.dev, "ctrl: " fmt, ## args); \ dp 42 drivers/net/ethernet/netronome/nfp/nfp_net.h #define nn_dp_warn(dp, fmt, args...) \ dp 44 drivers/net/ethernet/netronome/nfp/nfp_net.h struct nfp_net_dp *__dp = (dp); \ dp 606 drivers/net/ethernet/netronome/nfp/nfp_net.h struct nfp_net_dp dp; dp 706 drivers/net/ethernet/netronome/nfp/nfp_net.h return readb(nn->dp.ctrl_bar + off); dp 711 drivers/net/ethernet/netronome/nfp/nfp_net.h writeb(val, nn->dp.ctrl_bar + off); dp 716 drivers/net/ethernet/netronome/nfp/nfp_net.h return readw(nn->dp.ctrl_bar + off); dp 721 drivers/net/ethernet/netronome/nfp/nfp_net.h writew(val, nn->dp.ctrl_bar + off); dp 726 drivers/net/ethernet/netronome/nfp/nfp_net.h return readl(nn->dp.ctrl_bar + off); dp 731 drivers/net/ethernet/netronome/nfp/nfp_net.h writel(val, nn->dp.ctrl_bar + off); dp 736 drivers/net/ethernet/netronome/nfp/nfp_net.h return readq(nn->dp.ctrl_bar + off); dp 741 drivers/net/ethernet/netronome/nfp/nfp_net.h writeq(val, nn->dp.ctrl_bar + off); dp 870 drivers/net/ethernet/netronome/nfp/nfp_net.h WARN_ON_ONCE(!nn->dp.netdev && nn->port); dp 871 drivers/net/ethernet/netronome/nfp/nfp_net.h return !!nn->dp.netdev; dp 876 drivers/net/ethernet/netronome/nfp/nfp_net.h return nn->dp.ctrl & NFP_NET_CFG_CTRL_ENABLE; dp 881 drivers/net/ethernet/netronome/nfp/nfp_net.h return nn->dp.netdev ? nn->dp.netdev->name : "ctrl"; dp 65 drivers/net/ethernet/netronome/nfp/nfp_net_common.c static dma_addr_t nfp_net_dma_map_rx(struct nfp_net_dp *dp, void *frag) dp 67 drivers/net/ethernet/netronome/nfp/nfp_net_common.c return dma_map_single_attrs(dp->dev, frag + NFP_NET_RX_BUF_HEADROOM, dp 68 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dp->fl_bufsz - NFP_NET_RX_BUF_NON_DATA, dp 69 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dp->rx_dma_dir, DMA_ATTR_SKIP_CPU_SYNC); dp 73 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_dma_sync_dev_rx(const struct nfp_net_dp *dp, dma_addr_t dma_addr) dp 75 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dma_sync_single_for_device(dp->dev, dma_addr, dp 76 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dp->fl_bufsz - NFP_NET_RX_BUF_NON_DATA, dp 77 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dp->rx_dma_dir); dp 80 drivers/net/ethernet/netronome/nfp/nfp_net_common.c static void nfp_net_dma_unmap_rx(struct nfp_net_dp *dp, dma_addr_t dma_addr) dp 82 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dma_unmap_single_attrs(dp->dev, dma_addr, dp 83 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dp->fl_bufsz - NFP_NET_RX_BUF_NON_DATA, dp 84 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dp->rx_dma_dir, DMA_ATTR_SKIP_CPU_SYNC); dp 87 drivers/net/ethernet/netronome/nfp/nfp_net_common.c static void nfp_net_dma_sync_cpu_rx(struct nfp_net_dp *dp, dma_addr_t dma_addr, dp 90 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dma_sync_single_for_cpu(dp->dev, dma_addr - NFP_NET_RX_BUF_HEADROOM, dp 91 drivers/net/ethernet/netronome/nfp/nfp_net_common.c len, dp->rx_dma_dir); dp 436 drivers/net/ethernet/netronome/nfp/nfp_net_common.c struct nfp_net_dp *dp = &nn->dp; dp 439 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dp->num_r_vecs = nn->max_r_vecs; dp 443 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (dp->num_rx_rings > dp->num_r_vecs || dp 444 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dp->num_tx_rings > dp->num_r_vecs) dp 445 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dev_warn(nn->dp.dev, "More rings (%d,%d) than vectors (%d).\n", dp 446 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dp->num_rx_rings, dp->num_tx_rings, dp 447 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dp->num_r_vecs); dp 449 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dp->num_rx_rings = min(dp->num_r_vecs, dp->num_rx_rings); dp 450 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dp->num_tx_rings = min(dp->num_r_vecs, dp->num_tx_rings); dp 451 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dp->num_stack_tx_rings = dp->num_tx_rings; dp 517 drivers/net/ethernet/netronome/nfp/nfp_net_common.c netif_carrier_on(nn->dp.netdev); dp 518 drivers/net/ethernet/netronome/nfp/nfp_net_common.c netdev_info(nn->dp.netdev, "NIC Link is Up\n"); dp 520 drivers/net/ethernet/netronome/nfp/nfp_net_common.c netif_carrier_off(nn->dp.netdev); dp 521 drivers/net/ethernet/netronome/nfp/nfp_net_common.c netdev_info(nn->dp.netdev, "NIC Link is Down\n"); dp 773 drivers/net/ethernet/netronome/nfp/nfp_net_common.c static void nfp_net_tx_csum(struct nfp_net_dp *dp, dp 782 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (!(dp->ctrl & NFP_NET_CFG_CTRL_TXCSUM)) dp 801 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn_dp_warn(dp, "partial checksum but ipv=%x!\n", iph->version); dp 813 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn_dp_warn(dp, "partial checksum but l4 proto=%x!\n", l4_hdr); dp 826 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_tls_tx(struct nfp_net_dp *dp, struct nfp_net_r_vector *r_vec, dp 835 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (likely(!dp->ktls_tx)) dp 865 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn_dp_warn(dp, "tls_encrypt_skb() produced fragmented frame\n"); dp 985 drivers/net/ethernet/netronome/nfp/nfp_net_common.c struct nfp_net_dp *dp; dp 991 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dp = &nn->dp; dp 993 drivers/net/ethernet/netronome/nfp/nfp_net_common.c tx_ring = &dp->tx_rings[qidx]; dp 999 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn_dp_warn(dp, "TX ring %d busy. wrp=%u rdp=%u\n", dp 1001 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nd_q = netdev_get_tx_queue(dp->netdev, qidx); dp 1010 drivers/net/ethernet/netronome/nfp/nfp_net_common.c skb = nfp_net_tls_tx(dp, r_vec, skb, &tls_handle, &nr_frags); dp 1021 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dma_addr = dma_map_single(dp->dev, skb->data, skb_headlen(skb), dp 1023 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (dma_mapping_error(dp->dev, dma_addr)) dp 1049 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_tx_csum(dp, r_vec, txbuf, txd, skb); dp 1050 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (skb_vlan_tag_present(skb) && dp->ctrl & NFP_NET_CFG_CTRL_TXVLAN) { dp 1066 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dma_addr = skb_frag_dma_map(dp->dev, frag, 0, dp 1068 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (dma_mapping_error(dp->dev, dma_addr)) dp 1091 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nd_q = netdev_get_tx_queue(dp->netdev, tx_ring->idx); dp 1106 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dma_unmap_page(dp->dev, tx_ring->txbufs[wr_idx].dma_addr, dp 1115 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dma_unmap_single(dp->dev, tx_ring->txbufs[wr_idx].dma_addr, dp 1121 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn_dp_warn(dp, "Failed to map DMA TX buffer\n"); dp 1140 drivers/net/ethernet/netronome/nfp/nfp_net_common.c struct nfp_net_dp *dp = &r_vec->nfp_net->dp; dp 1176 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dma_unmap_single(dp->dev, tx_buf->dma_addr, dp 1184 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dma_unmap_page(dp->dev, tx_buf->dma_addr, dp 1204 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (!dp->netdev) dp 1207 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nd_q = netdev_get_tx_queue(dp->netdev, tx_ring->idx); dp 1271 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_tx_ring_reset(struct nfp_net_dp *dp, struct nfp_net_tx_ring *tx_ring) dp 1289 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dma_unmap_single(dp->dev, tx_buf->dma_addr, dp 1294 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dma_unmap_page(dp->dev, tx_buf->dma_addr, dp 1316 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (tx_ring->is_xdp || !dp->netdev) dp 1319 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nd_q = netdev_get_tx_queue(dp->netdev, tx_ring->idx); dp 1328 drivers/net/ethernet/netronome/nfp/nfp_net_common.c for (i = 0; i < nn->dp.netdev->real_num_tx_queues; i++) { dp 1339 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_calc_fl_bufsz(struct nfp_net_dp *dp) dp 1344 drivers/net/ethernet/netronome/nfp/nfp_net_common.c fl_bufsz += dp->rx_dma_off; dp 1345 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (dp->rx_offset == NFP_NET_CFG_RX_OFFSET_DYNAMIC) dp 1348 drivers/net/ethernet/netronome/nfp/nfp_net_common.c fl_bufsz += dp->rx_offset; dp 1349 drivers/net/ethernet/netronome/nfp/nfp_net_common.c fl_bufsz += ETH_HLEN + VLAN_HLEN * 2 + dp->mtu; dp 1375 drivers/net/ethernet/netronome/nfp/nfp_net_common.c static void *nfp_net_rx_alloc_one(struct nfp_net_dp *dp, dma_addr_t *dma_addr) dp 1379 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (!dp->xdp_prog) { dp 1380 drivers/net/ethernet/netronome/nfp/nfp_net_common.c frag = netdev_alloc_frag(dp->fl_bufsz); dp 1388 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn_dp_warn(dp, "Failed to alloc receive page frag\n"); dp 1392 drivers/net/ethernet/netronome/nfp/nfp_net_common.c *dma_addr = nfp_net_dma_map_rx(dp, frag); dp 1393 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (dma_mapping_error(dp->dev, *dma_addr)) { dp 1394 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_free_frag(frag, dp->xdp_prog); dp 1395 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn_dp_warn(dp, "Failed to map DMA RX buffer\n"); dp 1402 drivers/net/ethernet/netronome/nfp/nfp_net_common.c static void *nfp_net_napi_alloc_one(struct nfp_net_dp *dp, dma_addr_t *dma_addr) dp 1406 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (!dp->xdp_prog) { dp 1407 drivers/net/ethernet/netronome/nfp/nfp_net_common.c frag = napi_alloc_frag(dp->fl_bufsz); dp 1419 drivers/net/ethernet/netronome/nfp/nfp_net_common.c *dma_addr = nfp_net_dma_map_rx(dp, frag); dp 1420 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (dma_mapping_error(dp->dev, *dma_addr)) { dp 1421 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_free_frag(frag, dp->xdp_prog); dp 1422 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn_dp_warn(dp, "Failed to map DMA RX buffer\n"); dp 1436 drivers/net/ethernet/netronome/nfp/nfp_net_common.c static void nfp_net_rx_give_one(const struct nfp_net_dp *dp, dp 1444 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_dma_sync_dev_rx(dp, dma_addr); dp 1454 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dma_addr + dp->rx_dma_off); dp 1505 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_ring_bufs_free(struct nfp_net_dp *dp, dp 1518 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_dma_unmap_rx(dp, rx_ring->rxbufs[i].dma_addr); dp 1519 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_free_frag(rx_ring->rxbufs[i].frag, dp->xdp_prog); dp 1531 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_ring_bufs_alloc(struct nfp_net_dp *dp, dp 1540 drivers/net/ethernet/netronome/nfp/nfp_net_common.c rxbufs[i].frag = nfp_net_rx_alloc_one(dp, &rxbufs[i].dma_addr); dp 1542 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_ring_bufs_free(dp, rx_ring); dp 1556 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_ring_fill_freelist(struct nfp_net_dp *dp, dp 1562 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_give_one(dp, rx_ring, rx_ring->rxbufs[i].frag, dp 1588 drivers/net/ethernet/netronome/nfp/nfp_net_common.c static void nfp_net_rx_csum(struct nfp_net_dp *dp, dp 1595 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (!(dp->netdev->features & NETIF_F_RXCSUM)) dp 1712 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_drop(const struct nfp_net_dp *dp, struct nfp_net_r_vector *r_vec, dp 1731 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_give_one(dp, rx_ring, rxbuf->frag, rxbuf->dma_addr); dp 1737 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_tx_xdp_buf(struct nfp_net_dp *dp, struct nfp_net_rx_ring *rx_ring, dp 1753 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_drop(dp, rx_ring->r_vec, rx_ring, rxbuf, dp 1764 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_give_one(dp, rx_ring, txbuf->frag, txbuf->dma_addr); dp 1772 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dma_sync_single_for_device(dp->dev, rxbuf->dma_addr + dma_off, dp 1805 drivers/net/ethernet/netronome/nfp/nfp_net_common.c struct nfp_net_dp *dp = &r_vec->nfp_net->dp; dp 1816 drivers/net/ethernet/netronome/nfp/nfp_net_common.c xdp_prog = READ_ONCE(dp->xdp_prog); dp 1817 drivers/net/ethernet/netronome/nfp/nfp_net_common.c true_bufsz = xdp_prog ? PAGE_SIZE : dp->fl_bufsz; dp 1865 drivers/net/ethernet/netronome/nfp/nfp_net_common.c pkt_off = NFP_NET_RX_BUF_HEADROOM + dp->rx_dma_off; dp 1866 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (dp->rx_offset == NFP_NET_CFG_RX_OFFSET_DYNAMIC) dp 1869 drivers/net/ethernet/netronome/nfp/nfp_net_common.c pkt_off += dp->rx_offset; dp 1879 drivers/net/ethernet/netronome/nfp/nfp_net_common.c (dp->rx_offset && meta_len > dp->rx_offset))) { dp 1880 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn_dp_warn(dp, "oversized RX packet metadata %u\n", dp 1882 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_drop(dp, r_vec, rx_ring, rxbuf, NULL); dp 1886 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_dma_sync_cpu_rx(dp, rxbuf->dma_addr + meta_off, dp 1889 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (!dp->chained_metadata_format) { dp 1890 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_set_hash_desc(dp->netdev, &meta, dp 1895 drivers/net/ethernet/netronome/nfp/nfp_net_common.c end = nfp_net_parse_meta(dp->netdev, &meta, dp 1899 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn_dp_warn(dp, "invalid RX packet metadata\n"); dp 1900 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_drop(dp, r_vec, rx_ring, rxbuf, dp 1927 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (unlikely(!nfp_net_tx_xdp_buf(dp, rx_ring, dp 1932 drivers/net/ethernet/netronome/nfp/nfp_net_common.c trace_xdp_exception(dp->netdev, dp 1939 drivers/net/ethernet/netronome/nfp/nfp_net_common.c trace_xdp_exception(dp->netdev, xdp_prog, act); dp 1942 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_give_one(dp, rx_ring, rxbuf->frag, dp 1949 drivers/net/ethernet/netronome/nfp/nfp_net_common.c netdev = dp->netdev; dp 1951 drivers/net/ethernet/netronome/nfp/nfp_net_common.c struct nfp_net *nn = netdev_priv(dp->netdev); dp 1955 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_give_one(dp, rx_ring, rxbuf->frag, dp 1961 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn = netdev_priv(dp->netdev); dp 1965 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_drop(dp, r_vec, rx_ring, rxbuf, dp 1976 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_drop(dp, r_vec, rx_ring, rxbuf, NULL); dp 1979 drivers/net/ethernet/netronome/nfp/nfp_net_common.c new_frag = nfp_net_napi_alloc_one(dp, &new_dma_addr); dp 1981 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_drop(dp, r_vec, rx_ring, rxbuf, skb); dp 1985 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_dma_unmap_rx(dp, rxbuf->dma_addr); dp 1987 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_give_one(dp, rx_ring, new_frag, new_dma_addr); dp 1998 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_csum(dp, r_vec, rxd, &meta, skb); dp 2074 drivers/net/ethernet/netronome/nfp/nfp_net_common.c struct nfp_net_dp *dp; dp 2078 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dp = &r_vec->nfp_net->dp; dp 2082 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn_dp_warn(dp, "Driver's CTRL TX does not implement gather\n"); dp 2099 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn_dp_warn(dp, "CTRL TX on skb without headroom\n"); dp 2108 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dma_addr = dma_map_single(dp->dev, skb->data, skb_headlen(skb), dp 2110 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (dma_mapping_error(dp->dev, dma_addr)) dp 2141 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn_dp_warn(dp, "Failed to DMA map TX CTRL buffer\n"); dp 2197 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_ctrl_rx_one(struct nfp_net *nn, struct nfp_net_dp *dp, dp 2226 drivers/net/ethernet/netronome/nfp/nfp_net_common.c pkt_off = NFP_NET_RX_BUF_HEADROOM + dp->rx_dma_off; dp 2227 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (dp->rx_offset == NFP_NET_CFG_RX_OFFSET_DYNAMIC) dp 2230 drivers/net/ethernet/netronome/nfp/nfp_net_common.c pkt_off += dp->rx_offset; dp 2239 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_dma_sync_cpu_rx(dp, rxbuf->dma_addr + meta_off, data_len); dp 2242 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn_dp_warn(dp, "incorrect metadata for ctrl packet (%d)\n", dp 2244 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_drop(dp, r_vec, rx_ring, rxbuf, NULL); dp 2248 drivers/net/ethernet/netronome/nfp/nfp_net_common.c skb = build_skb(rxbuf->frag, dp->fl_bufsz); dp 2250 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_drop(dp, r_vec, rx_ring, rxbuf, NULL); dp 2253 drivers/net/ethernet/netronome/nfp/nfp_net_common.c new_frag = nfp_net_napi_alloc_one(dp, &new_dma_addr); dp 2255 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_drop(dp, r_vec, rx_ring, rxbuf, skb); dp 2259 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_dma_unmap_rx(dp, rxbuf->dma_addr); dp 2261 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_give_one(dp, rx_ring, new_frag, new_dma_addr); dp 2275 drivers/net/ethernet/netronome/nfp/nfp_net_common.c struct nfp_net_dp *dp = &nn->dp; dp 2278 drivers/net/ethernet/netronome/nfp/nfp_net_common.c while (nfp_ctrl_rx_one(nn, dp, r_vec, rx_ring) && budget--) dp 2297 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn_dp_warn(&r_vec->nfp_net->dp, dp 2327 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (nn->dp.netdev) { dp 2350 drivers/net/ethernet/netronome/nfp/nfp_net_common.c struct nfp_net_dp *dp = &r_vec->nfp_net->dp; dp 2355 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dma_free_coherent(dp->dev, tx_ring->size, dp 2373 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_tx_ring_alloc(struct nfp_net_dp *dp, struct nfp_net_tx_ring *tx_ring) dp 2377 drivers/net/ethernet/netronome/nfp/nfp_net_common.c tx_ring->cnt = dp->txd_cnt; dp 2380 drivers/net/ethernet/netronome/nfp/nfp_net_common.c tx_ring->txds = dma_alloc_coherent(dp->dev, tx_ring->size, dp 2384 drivers/net/ethernet/netronome/nfp/nfp_net_common.c netdev_warn(dp->netdev, "failed to allocate TX descriptor ring memory, requested descriptor count: %d, consider lowering descriptor count\n", dp 2394 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (!tx_ring->is_xdp && dp->netdev) dp 2395 drivers/net/ethernet/netronome/nfp/nfp_net_common.c netif_set_xps_queue(dp->netdev, &r_vec->affinity_mask, dp 2406 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_tx_ring_bufs_free(struct nfp_net_dp *dp, dp 2418 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_dma_unmap_rx(dp, tx_ring->txbufs[i].dma_addr); dp 2424 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_tx_ring_bufs_alloc(struct nfp_net_dp *dp, dp 2434 drivers/net/ethernet/netronome/nfp/nfp_net_common.c txbufs[i].frag = nfp_net_rx_alloc_one(dp, &txbufs[i].dma_addr); dp 2436 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_tx_ring_bufs_free(dp, tx_ring); dp 2444 drivers/net/ethernet/netronome/nfp/nfp_net_common.c static int nfp_net_tx_rings_prepare(struct nfp_net *nn, struct nfp_net_dp *dp) dp 2448 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dp->tx_rings = kcalloc(dp->num_tx_rings, sizeof(*dp->tx_rings), dp 2450 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (!dp->tx_rings) dp 2453 drivers/net/ethernet/netronome/nfp/nfp_net_common.c for (r = 0; r < dp->num_tx_rings; r++) { dp 2456 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (r >= dp->num_stack_tx_rings) dp 2457 drivers/net/ethernet/netronome/nfp/nfp_net_common.c bias = dp->num_stack_tx_rings; dp 2459 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_tx_ring_init(&dp->tx_rings[r], &nn->r_vecs[r - bias], dp 2462 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (nfp_net_tx_ring_alloc(dp, &dp->tx_rings[r])) dp 2465 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (nfp_net_tx_ring_bufs_alloc(dp, &dp->tx_rings[r])) dp 2473 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_tx_ring_bufs_free(dp, &dp->tx_rings[r]); dp 2475 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_tx_ring_free(&dp->tx_rings[r]); dp 2477 drivers/net/ethernet/netronome/nfp/nfp_net_common.c kfree(dp->tx_rings); dp 2481 drivers/net/ethernet/netronome/nfp/nfp_net_common.c static void nfp_net_tx_rings_free(struct nfp_net_dp *dp) dp 2485 drivers/net/ethernet/netronome/nfp/nfp_net_common.c for (r = 0; r < dp->num_tx_rings; r++) { dp 2486 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_tx_ring_bufs_free(dp, &dp->tx_rings[r]); dp 2487 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_tx_ring_free(&dp->tx_rings[r]); dp 2490 drivers/net/ethernet/netronome/nfp/nfp_net_common.c kfree(dp->tx_rings); dp 2500 drivers/net/ethernet/netronome/nfp/nfp_net_common.c struct nfp_net_dp *dp = &r_vec->nfp_net->dp; dp 2502 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (dp->netdev) dp 2507 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dma_free_coherent(dp->dev, rx_ring->size, dp 2525 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_ring_alloc(struct nfp_net_dp *dp, struct nfp_net_rx_ring *rx_ring) dp 2529 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (dp->netdev) { dp 2530 drivers/net/ethernet/netronome/nfp/nfp_net_common.c err = xdp_rxq_info_reg(&rx_ring->xdp_rxq, dp->netdev, dp 2536 drivers/net/ethernet/netronome/nfp/nfp_net_common.c rx_ring->cnt = dp->rxd_cnt; dp 2538 drivers/net/ethernet/netronome/nfp/nfp_net_common.c rx_ring->rxds = dma_alloc_coherent(dp->dev, rx_ring->size, dp 2542 drivers/net/ethernet/netronome/nfp/nfp_net_common.c netdev_warn(dp->netdev, "failed to allocate RX descriptor ring memory, requested descriptor count: %d, consider lowering descriptor count\n", dp 2559 drivers/net/ethernet/netronome/nfp/nfp_net_common.c static int nfp_net_rx_rings_prepare(struct nfp_net *nn, struct nfp_net_dp *dp) dp 2563 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dp->rx_rings = kcalloc(dp->num_rx_rings, sizeof(*dp->rx_rings), dp 2565 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (!dp->rx_rings) dp 2568 drivers/net/ethernet/netronome/nfp/nfp_net_common.c for (r = 0; r < dp->num_rx_rings; r++) { dp 2569 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_ring_init(&dp->rx_rings[r], &nn->r_vecs[r], r); dp 2571 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (nfp_net_rx_ring_alloc(dp, &dp->rx_rings[r])) dp 2574 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (nfp_net_rx_ring_bufs_alloc(dp, &dp->rx_rings[r])) dp 2582 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_ring_bufs_free(dp, &dp->rx_rings[r]); dp 2584 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_ring_free(&dp->rx_rings[r]); dp 2586 drivers/net/ethernet/netronome/nfp/nfp_net_common.c kfree(dp->rx_rings); dp 2590 drivers/net/ethernet/netronome/nfp/nfp_net_common.c static void nfp_net_rx_rings_free(struct nfp_net_dp *dp) dp 2594 drivers/net/ethernet/netronome/nfp/nfp_net_common.c for (r = 0; r < dp->num_rx_rings; r++) { dp 2595 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_ring_bufs_free(dp, &dp->rx_rings[r]); dp 2596 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_ring_free(&dp->rx_rings[r]); dp 2599 drivers/net/ethernet/netronome/nfp/nfp_net_common.c kfree(dp->rx_rings); dp 2603 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_vector_assign_rings(struct nfp_net_dp *dp, dp 2606 drivers/net/ethernet/netronome/nfp/nfp_net_common.c r_vec->rx_ring = idx < dp->num_rx_rings ? &dp->rx_rings[idx] : NULL; dp 2608 drivers/net/ethernet/netronome/nfp/nfp_net_common.c idx < dp->num_stack_tx_rings ? &dp->tx_rings[idx] : NULL; dp 2610 drivers/net/ethernet/netronome/nfp/nfp_net_common.c r_vec->xdp_ring = idx < dp->num_tx_rings - dp->num_stack_tx_rings ? dp 2611 drivers/net/ethernet/netronome/nfp/nfp_net_common.c &dp->tx_rings[dp->num_stack_tx_rings + idx] : NULL; dp 2621 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (nn->dp.netdev) dp 2622 drivers/net/ethernet/netronome/nfp/nfp_net_common.c netif_napi_add(nn->dp.netdev, &r_vec->napi, dp 2632 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (nn->dp.netdev) dp 2654 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (nn->dp.netdev) dp 2707 drivers/net/ethernet/netronome/nfp/nfp_net_common.c for (i = 0; i < nn->dp.num_rx_rings; i++) dp 2713 drivers/net/ethernet/netronome/nfp/nfp_net_common.c for (i = 0; i < nn->dp.num_tx_rings; i++) dp 2755 drivers/net/ethernet/netronome/nfp/nfp_net_common.c new_ctrl = nn->dp.ctrl; dp 2772 drivers/net/ethernet/netronome/nfp/nfp_net_common.c for (r = 0; r < nn->dp.num_rx_rings; r++) dp 2773 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_ring_reset(&nn->dp.rx_rings[r]); dp 2774 drivers/net/ethernet/netronome/nfp/nfp_net_common.c for (r = 0; r < nn->dp.num_tx_rings; r++) dp 2775 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_tx_ring_reset(&nn->dp, &nn->dp.tx_rings[r]); dp 2776 drivers/net/ethernet/netronome/nfp/nfp_net_common.c for (r = 0; r < nn->dp.num_r_vecs; r++) dp 2779 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.ctrl = new_ctrl; dp 2811 drivers/net/ethernet/netronome/nfp/nfp_net_common.c new_ctrl = nn->dp.ctrl; dp 2813 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (nn->dp.ctrl & NFP_NET_CFG_CTRL_RSS_ANY) { dp 2820 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (nn->dp.ctrl & NFP_NET_CFG_CTRL_IRQMOD) { dp 2825 drivers/net/ethernet/netronome/nfp/nfp_net_common.c for (r = 0; r < nn->dp.num_tx_rings; r++) dp 2826 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_tx_ring_hw_cfg_write(nn, &nn->dp.tx_rings[r], r); dp 2827 drivers/net/ethernet/netronome/nfp/nfp_net_common.c for (r = 0; r < nn->dp.num_rx_rings; r++) dp 2828 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_ring_hw_cfg_write(nn, &nn->dp.rx_rings[r], r); dp 2830 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn_writeq(nn, NFP_NET_CFG_TXRS_ENABLE, nn->dp.num_tx_rings == 64 ? dp 2831 drivers/net/ethernet/netronome/nfp/nfp_net_common.c 0xffffffffffffffffULL : ((u64)1 << nn->dp.num_tx_rings) - 1); dp 2833 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn_writeq(nn, NFP_NET_CFG_RXRS_ENABLE, nn->dp.num_rx_rings == 64 ? dp 2834 drivers/net/ethernet/netronome/nfp/nfp_net_common.c 0xffffffffffffffffULL : ((u64)1 << nn->dp.num_rx_rings) - 1); dp 2836 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (nn->dp.netdev) dp 2837 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_write_mac_addr(nn, nn->dp.netdev->dev_addr); dp 2839 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn_writel(nn, NFP_NET_CFG_MTU, nn->dp.mtu); dp 2841 drivers/net/ethernet/netronome/nfp/nfp_net_common.c bufsz = nn->dp.fl_bufsz - nn->dp.rx_dma_off - NFP_NET_RX_BUF_NON_DATA; dp 2859 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.ctrl = new_ctrl; dp 2861 drivers/net/ethernet/netronome/nfp/nfp_net_common.c for (r = 0; r < nn->dp.num_rx_rings; r++) dp 2862 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_ring_fill_freelist(&nn->dp, &nn->dp.rx_rings[r]); dp 2867 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (nn->dp.ctrl & NFP_NET_CFG_CTRL_VXLAN) { dp 2870 drivers/net/ethernet/netronome/nfp/nfp_net_common.c udp_tunnel_get_rx_info(nn->dp.netdev); dp 2885 drivers/net/ethernet/netronome/nfp/nfp_net_common.c netif_carrier_off(nn->dp.netdev); dp 2888 drivers/net/ethernet/netronome/nfp/nfp_net_common.c for (r = 0; r < nn->dp.num_r_vecs; r++) { dp 2893 drivers/net/ethernet/netronome/nfp/nfp_net_common.c netif_tx_disable(nn->dp.netdev); dp 2904 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_tx_rings_free(&nn->dp); dp 2905 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_rings_free(&nn->dp); dp 2907 drivers/net/ethernet/netronome/nfp/nfp_net_common.c for (r = 0; r < nn->dp.num_r_vecs; r++) dp 2945 drivers/net/ethernet/netronome/nfp/nfp_net_common.c for (r = 0; r < nn->dp.num_r_vecs; r++) { dp 2965 drivers/net/ethernet/netronome/nfp/nfp_net_common.c for (r = 0; r < nn->dp.num_r_vecs; r++) { dp 2970 drivers/net/ethernet/netronome/nfp/nfp_net_common.c netif_tx_wake_all_queues(nn->dp.netdev); dp 2992 drivers/net/ethernet/netronome/nfp/nfp_net_common.c for (r = 0; r < nn->dp.num_r_vecs; r++) { dp 2998 drivers/net/ethernet/netronome/nfp/nfp_net_common.c err = nfp_net_rx_rings_prepare(nn, &nn->dp); dp 3002 drivers/net/ethernet/netronome/nfp/nfp_net_common.c err = nfp_net_tx_rings_prepare(nn, &nn->dp); dp 3007 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_vector_assign_rings(&nn->dp, &nn->r_vecs[r], r); dp 3012 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_rings_free(&nn->dp); dp 3014 drivers/net/ethernet/netronome/nfp/nfp_net_common.c r = nn->dp.num_r_vecs; dp 3038 drivers/net/ethernet/netronome/nfp/nfp_net_common.c err = netif_set_real_num_tx_queues(netdev, nn->dp.num_stack_tx_rings); dp 3042 drivers/net/ethernet/netronome/nfp/nfp_net_common.c err = netif_set_real_num_rx_queues(netdev, nn->dp.num_rx_rings); dp 3094 drivers/net/ethernet/netronome/nfp/nfp_net_common.c for (r = 0; r < nn->dp.num_r_vecs; r++) dp 3113 drivers/net/ethernet/netronome/nfp/nfp_net_common.c new_ctrl = nn->dp.ctrl; dp 3129 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (new_ctrl == nn->dp.ctrl) dp 3135 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.ctrl = new_ctrl; dp 3144 drivers/net/ethernet/netronome/nfp/nfp_net_common.c ethtool_rxfh_indir_default(i, nn->dp.num_rx_rings); dp 3147 drivers/net/ethernet/netronome/nfp/nfp_net_common.c static void nfp_net_dp_swap(struct nfp_net *nn, struct nfp_net_dp *dp) dp 3149 drivers/net/ethernet/netronome/nfp/nfp_net_common.c struct nfp_net_dp new_dp = *dp; dp 3151 drivers/net/ethernet/netronome/nfp/nfp_net_common.c *dp = nn->dp; dp 3152 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp = new_dp; dp 3154 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.netdev->mtu = new_dp.mtu; dp 3156 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (!netif_is_rxfh_configured(nn->dp.netdev)) dp 3160 drivers/net/ethernet/netronome/nfp/nfp_net_common.c static int nfp_net_dp_swap_enable(struct nfp_net *nn, struct nfp_net_dp *dp) dp 3165 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_dp_swap(nn, dp); dp 3168 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_vector_assign_rings(&nn->dp, &nn->r_vecs[r], r); dp 3170 drivers/net/ethernet/netronome/nfp/nfp_net_common.c err = netif_set_real_num_rx_queues(nn->dp.netdev, nn->dp.num_rx_rings); dp 3174 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (nn->dp.netdev->real_num_tx_queues != nn->dp.num_stack_tx_rings) { dp 3175 drivers/net/ethernet/netronome/nfp/nfp_net_common.c err = netif_set_real_num_tx_queues(nn->dp.netdev, dp 3176 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.num_stack_tx_rings); dp 3192 drivers/net/ethernet/netronome/nfp/nfp_net_common.c *new = nn->dp; dp 3205 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_check_config(struct nfp_net *nn, struct nfp_net_dp *dp, dp 3209 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (!dp->xdp_prog) dp 3211 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (dp->fl_bufsz > PAGE_SIZE) { dp 3215 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (dp->num_tx_rings > nn->max_tx_rings) { dp 3223 drivers/net/ethernet/netronome/nfp/nfp_net_common.c int nfp_net_ring_reconfig(struct nfp_net *nn, struct nfp_net_dp *dp, dp 3228 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dp->fl_bufsz = nfp_net_calc_fl_bufsz(dp); dp 3230 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dp->num_stack_tx_rings = dp->num_tx_rings; dp 3231 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (dp->xdp_prog) dp 3232 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dp->num_stack_tx_rings -= dp->num_rx_rings; dp 3234 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dp->num_r_vecs = max(dp->num_rx_rings, dp->num_stack_tx_rings); dp 3236 drivers/net/ethernet/netronome/nfp/nfp_net_common.c err = nfp_net_check_config(nn, dp, extack); dp 3240 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (!netif_running(dp->netdev)) { dp 3241 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_dp_swap(nn, dp); dp 3247 drivers/net/ethernet/netronome/nfp/nfp_net_common.c for (r = nn->dp.num_r_vecs; r < dp->num_r_vecs; r++) { dp 3250 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dp->num_r_vecs = r; dp 3255 drivers/net/ethernet/netronome/nfp/nfp_net_common.c err = nfp_net_rx_rings_prepare(nn, dp); dp 3259 drivers/net/ethernet/netronome/nfp/nfp_net_common.c err = nfp_net_tx_rings_prepare(nn, dp); dp 3267 drivers/net/ethernet/netronome/nfp/nfp_net_common.c err = nfp_net_dp_swap_enable(nn, dp); dp 3274 drivers/net/ethernet/netronome/nfp/nfp_net_common.c err2 = nfp_net_dp_swap_enable(nn, dp); dp 3279 drivers/net/ethernet/netronome/nfp/nfp_net_common.c for (r = dp->num_r_vecs - 1; r >= nn->dp.num_r_vecs; r--) dp 3282 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_rings_free(dp); dp 3283 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_tx_rings_free(dp); dp 3287 drivers/net/ethernet/netronome/nfp/nfp_net_common.c kfree(dp); dp 3292 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_rx_rings_free(dp); dp 3294 drivers/net/ethernet/netronome/nfp/nfp_net_common.c for (r = dp->num_r_vecs - 1; r >= nn->dp.num_r_vecs; r--) dp 3296 drivers/net/ethernet/netronome/nfp/nfp_net_common.c kfree(dp); dp 3303 drivers/net/ethernet/netronome/nfp/nfp_net_common.c struct nfp_net_dp *dp; dp 3310 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dp = nfp_net_clone_dp(nn); dp 3311 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (!dp) dp 3314 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dp->mtu = new_mtu; dp 3316 drivers/net/ethernet/netronome/nfp/nfp_net_common.c return nfp_net_ring_reconfig(nn, dp, NULL); dp 3419 drivers/net/ethernet/netronome/nfp/nfp_net_common.c new_ctrl = nn->dp.ctrl; dp 3478 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (new_ctrl == nn->dp.ctrl) dp 3481 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn_dbg(nn, "NIC ctrl: 0x%x -> 0x%x\n", nn->dp.ctrl, new_ctrl); dp 3487 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.ctrl = new_ctrl; dp 3553 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (nn->dp.is_vf || nn->vnic_no_name) dp 3575 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (!(nn->dp.ctrl & NFP_NET_CFG_CTRL_VXLAN)) dp 3647 drivers/net/ethernet/netronome/nfp/nfp_net_common.c struct nfp_net_dp *dp; dp 3653 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (!prog == !nn->dp.xdp_prog) { dp 3654 drivers/net/ethernet/netronome/nfp/nfp_net_common.c WRITE_ONCE(nn->dp.xdp_prog, prog); dp 3659 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dp = nfp_net_clone_dp(nn); dp 3660 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (!dp) dp 3663 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dp->xdp_prog = prog; dp 3664 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dp->num_tx_rings += prog ? nn->dp.num_rx_rings : -nn->dp.num_rx_rings; dp 3665 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dp->rx_dma_dir = prog ? DMA_BIDIRECTIONAL : DMA_FROM_DEVICE; dp 3666 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dp->rx_dma_off = prog ? XDP_PACKET_HEADROOM - nn->dp.rx_offset : 0; dp 3669 drivers/net/ethernet/netronome/nfp/nfp_net_common.c err = nfp_net_ring_reconfig(nn, dp, bpf->extack); dp 3767 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.is_vf ? "VF " : "", dp 3768 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.num_tx_rings, nn->max_tx_rings, dp 3769 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.num_rx_rings, nn->max_rx_rings); dp 3831 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.netdev = netdev; dp 3838 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.dev = &pdev->dev; dp 3839 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.ctrl_bar = ctrl_bar; dp 3845 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.num_tx_rings = min_t(unsigned int, dp 3847 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.num_rx_rings = min_t(unsigned int, max_rx_rings, dp 3850 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.num_r_vecs = max(nn->dp.num_tx_rings, nn->dp.num_rx_rings); dp 3851 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.num_r_vecs = min_t(unsigned int, dp 3852 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.num_r_vecs, num_online_cpus()); dp 3854 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.txd_cnt = NFP_NET_TX_DESCS_DEFAULT; dp 3855 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.rxd_cnt = NFP_NET_RX_DESCS_DEFAULT; dp 3864 drivers/net/ethernet/netronome/nfp/nfp_net_common.c err = nfp_net_tlv_caps_parse(&nn->pdev->dev, nn->dp.ctrl_bar, dp 3876 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (nn->dp.netdev) dp 3877 drivers/net/ethernet/netronome/nfp/nfp_net_common.c free_netdev(nn->dp.netdev); dp 3892 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (nn->dp.netdev) dp 3893 drivers/net/ethernet/netronome/nfp/nfp_net_common.c free_netdev(nn->dp.netdev); dp 3937 drivers/net/ethernet/netronome/nfp/nfp_net_common.c dev_warn(nn->dp.dev, dp 3968 drivers/net/ethernet/netronome/nfp/nfp_net_common.c struct net_device *netdev = nn->dp.netdev; dp 3970 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nfp_net_write_mac_addr(nn, nn->dp.netdev->dev_addr); dp 3972 drivers/net/ethernet/netronome/nfp/nfp_net_common.c netdev->mtu = nn->dp.mtu; dp 3986 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.ctrl |= nn->cap & NFP_NET_CFG_CTRL_RXCSUM_ANY; dp 3990 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.ctrl |= NFP_NET_CFG_CTRL_TXCSUM; dp 3994 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.ctrl |= NFP_NET_CFG_CTRL_GATHER; dp 3999 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.ctrl |= nn->cap & NFP_NET_CFG_CTRL_LSO2 ?: dp 4007 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.ctrl |= NFP_NET_CFG_CTRL_VXLAN; dp 4012 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.ctrl |= NFP_NET_CFG_CTRL_NVGRE; dp 4021 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.ctrl |= NFP_NET_CFG_CTRL_RXVLAN; dp 4028 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.ctrl |= NFP_NET_CFG_CTRL_TXVLAN; dp 4033 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.ctrl |= NFP_NET_CFG_CTRL_CTAG_FILTER; dp 4043 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.ctrl &= ~NFP_NET_CFG_CTRL_LSO_ANY; dp 4070 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.chained_metadata_format = nn->fw_ver.major == 4 || dp 4071 drivers/net/ethernet/netronome/nfp/nfp_net_common.c !nn->dp.netdev || dp 4077 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (nn->dp.chained_metadata_format && nn->fw_ver.major != 4) dp 4089 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.rx_offset = reg; dp 4091 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.rx_offset = NFP_NET_RX_OFFSET; dp 4095 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (!nn->dp.netdev) dp 4111 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.rx_dma_dir = DMA_FROM_DEVICE; dp 4119 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.mtu = min(nn->app->ctrl_mtu, nn->max_mtu); dp 4121 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.mtu = nn->max_mtu; dp 4123 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.mtu = NFP_NET_DEFAULT_MTU; dp 4125 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.fl_bufsz = nfp_net_calc_fl_bufsz(&nn->dp); dp 4128 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.ctrl |= nn->cap & NFP_NET_CFG_CTRL_CMSG_DATA; dp 4132 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.ctrl |= nn->cap & NFP_NET_CFG_CTRL_RSS2 ?: dp 4138 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.ctrl |= NFP_NET_CFG_CTRL_L2BC; dp 4143 drivers/net/ethernet/netronome/nfp/nfp_net_common.c nn->dp.ctrl |= NFP_NET_CFG_CTRL_IRQMOD; dp 4158 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (nn->dp.netdev) { dp 4172 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (!nn->dp.netdev) dp 4174 drivers/net/ethernet/netronome/nfp/nfp_net_common.c return register_netdev(nn->dp.netdev); dp 4187 drivers/net/ethernet/netronome/nfp/nfp_net_common.c if (!nn->dp.netdev) dp 4190 drivers/net/ethernet/netronome/nfp/nfp_net_common.c unregister_netdev(nn->dp.netdev); dp 367 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c ring->rx_pending = nn->dp.rxd_cnt; dp 368 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c ring->tx_pending = nn->dp.txd_cnt; dp 373 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c struct nfp_net_dp *dp; dp 375 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c dp = nfp_net_clone_dp(nn); dp 376 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c if (!dp) dp 379 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c dp->rxd_cnt = rxd_cnt; dp 380 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c dp->txd_cnt = txd_cnt; dp 382 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c return nfp_net_ring_reconfig(nn, dp, NULL); dp 403 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c if (nn->dp.rxd_cnt == rxd_cnt && nn->dp.txd_cnt == txd_cnt) dp 407 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c nn->dp.rxd_cnt, rxd_cnt, nn->dp.txd_cnt, txd_cnt); dp 627 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c data = nfp_vnic_get_hw_stats(data, nn->dp.ctrl_bar, nn->max_r_vecs); dp 826 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c cmd->data = nn->dp.num_rx_rings; dp 876 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c writel(new_rss_cfg, nn->dp.ctrl_bar + NFP_NET_CFG_RSS_CTRL); dp 988 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c regs_buf[i] = readl(nn->dp.ctrl_bar + (i * sizeof(u32))); dp 1324 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c num_tx_rings = nn->dp.num_tx_rings; dp 1325 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c if (nn->dp.xdp_prog) dp 1326 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c num_tx_rings -= nn->dp.num_rx_rings; dp 1332 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c channel->combined_count = min(nn->dp.num_rx_rings, num_tx_rings); dp 1333 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c channel->rx_count = nn->dp.num_rx_rings - channel->combined_count; dp 1341 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c struct nfp_net_dp *dp; dp 1343 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c dp = nfp_net_clone_dp(nn); dp 1344 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c if (!dp) dp 1347 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c dp->num_rx_rings = total_rx; dp 1348 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c dp->num_tx_rings = total_tx; dp 1350 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c if (dp->xdp_prog) dp 1351 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c dp->num_tx_rings += total_rx; dp 1353 drivers/net/ethernet/netronome/nfp/nfp_net_ethtool.c return nfp_net_ring_reconfig(nn, dp, NULL); dp 128 drivers/net/ethernet/netronome/nfp/nfp_net_main.c nn->dp.is_vf = 0; dp 242 drivers/net/ethernet/netronome/nfp/nfp_net_main.c wanted_irqs += NFP_NET_NON_Q_VECTORS + nn->dp.num_r_vecs; dp 263 drivers/net/ethernet/netronome/nfp/nfp_net_main.c n = min(NFP_NET_NON_Q_VECTORS + nn->dp.num_r_vecs, dp 57 drivers/net/ethernet/netronome/nfp/nfp_netvf_main.c eth_hw_addr_random(nn->dp.netdev); dp 61 drivers/net/ethernet/netronome/nfp/nfp_netvf_main.c ether_addr_copy(nn->dp.netdev->dev_addr, mac_addr); dp 62 drivers/net/ethernet/netronome/nfp/nfp_netvf_main.c ether_addr_copy(nn->dp.netdev->perm_addr, mac_addr); dp 183 drivers/net/ethernet/netronome/nfp/nfp_netvf_main.c nn->dp.is_vf = 1; dp 241 drivers/net/ethernet/netronome/nfp/nfp_netvf_main.c nn->dp.num_r_vecs); dp 310 drivers/net/ethernet/netronome/nfp/nfp_netvf_main.c iounmap(nn->dp.ctrl_bar); dp 9196 drivers/net/ethernet/sun/niu.c struct device_node *dp; dp 9203 drivers/net/ethernet/sun/niu.c dp = np->op->dev.of_node; dp 9205 drivers/net/ethernet/sun/niu.c dp = pci_device_to_OF_node(np->pdev); dp 9207 drivers/net/ethernet/sun/niu.c phy_type = of_get_property(dp, "phy-type", &prop_len); dp 9209 drivers/net/ethernet/sun/niu.c netdev_err(dev, "%pOF: OF node lacks phy-type property\n", dp); dp 9220 drivers/net/ethernet/sun/niu.c dp, np->vpd.phy_type); dp 9224 drivers/net/ethernet/sun/niu.c mac_addr = of_get_property(dp, "local-mac-address", &prop_len); dp 9227 drivers/net/ethernet/sun/niu.c dp); dp 9232 drivers/net/ethernet/sun/niu.c dp, prop_len); dp 9236 drivers/net/ethernet/sun/niu.c netdev_err(dev, "%pOF: OF MAC address is invalid\n", dp); dp 9237 drivers/net/ethernet/sun/niu.c netdev_err(dev, "%pOF: [ %pM ]\n", dp, dev->dev_addr); dp 9241 drivers/net/ethernet/sun/niu.c model = of_get_property(dp, "model", &prop_len); dp 9246 drivers/net/ethernet/sun/niu.c if (of_find_property(dp, "hot-swappable-phy", &prop_len)) { dp 2684 drivers/net/ethernet/sun/sunhme.c struct device_node *dp = op->dev.of_node, *sbus_dp; dp 2732 drivers/net/ethernet/sun/sunhme.c addr = of_get_property(dp, "local-mac-address", &len); dp 2789 drivers/net/ethernet/sun/sunhme.c hp->hm_revision = of_getintprop_default(dp, "hm-rev", 0xff); dp 2989 drivers/net/ethernet/sun/sunhme.c struct device_node *dp; dp 3001 drivers/net/ethernet/sun/sunhme.c dp = pci_device_to_OF_node(pdev); dp 3002 drivers/net/ethernet/sun/sunhme.c snprintf(prom_name, sizeof(prom_name), "%pOFn", dp); dp 3080 drivers/net/ethernet/sun/sunhme.c (addr = of_get_property(dp, "local-mac-address", &len)) dp 3100 drivers/net/ethernet/sun/sunhme.c hp->hm_revision = of_getintprop_default(dp, "hm-rev", 0xff); dp 3271 drivers/net/ethernet/sun/sunhme.c struct device_node *dp = op->dev.of_node; dp 3272 drivers/net/ethernet/sun/sunhme.c const char *model = of_get_property(dp, "model", NULL); dp 747 drivers/net/ethernet/sun/sunqe.c static u8 qec_get_burst(struct device_node *dp) dp 754 drivers/net/ethernet/sun/sunqe.c bsizes = of_getintprop_default(dp, "burst-sizes", 0xff); dp 756 drivers/net/ethernet/sun/sunqe.c bsizes_more = of_getintprop_default(dp->parent, "burst-sizes", 0xff); dp 129 drivers/net/ifb.c struct ifb_dev_private *dp = netdev_priv(dev); dp 130 drivers/net/ifb.c struct ifb_q_private *txp = dp->tx_private; dp 158 drivers/net/ifb.c struct ifb_dev_private *dp = netdev_priv(dev); dp 165 drivers/net/ifb.c dp->tx_private = txp; dp 197 drivers/net/ifb.c struct ifb_dev_private *dp = netdev_priv(dev); dp 198 drivers/net/ifb.c struct ifb_q_private *txp = dp->tx_private; dp 206 drivers/net/ifb.c kfree(dp->tx_private); dp 238 drivers/net/ifb.c struct ifb_dev_private *dp = netdev_priv(dev); dp 239 drivers/net/ifb.c struct ifb_q_private *txp = dp->tx_private + skb_get_queue_mapping(skb); dp 2812 drivers/net/ppp/ppp_generic.c unsigned char *dp; dp 2817 drivers/net/ppp/ppp_generic.c dp = skb->data + 2; dp 2819 drivers/net/ppp/ppp_generic.c switch (CCP_CODE(dp)) { dp 2850 drivers/net/ppp/ppp_generic.c len = CCP_LENGTH(dp); dp 2853 drivers/net/ppp/ppp_generic.c dp += CCP_HDRLEN; dp 2855 drivers/net/ppp/ppp_generic.c if (len < CCP_OPT_MINLEN || len < CCP_OPT_LENGTH(dp)) dp 2861 drivers/net/ppp/ppp_generic.c if (ppp->rcomp->decomp_init(ppp->rc_state, dp, len, dp 2870 drivers/net/ppp/ppp_generic.c if (ppp->xcomp->comp_init(ppp->xc_state, dp, len, dp 200 drivers/net/usb/asix.h void asix_rx_fixup_common_free(struct asix_common_private *dp); dp 195 drivers/net/usb/asix_common.c struct asix_common_private *dp = dev->driver_priv; dp 196 drivers/net/usb/asix_common.c struct asix_rx_fixup_info *rx = &dp->rx_fixup_info; dp 201 drivers/net/usb/asix_common.c void asix_rx_fixup_common_free(struct asix_common_private *dp) dp 205 drivers/net/usb/asix_common.c if (!dp) dp 208 drivers/net/usb/asix_common.c rx = &dp->rx_fixup_info; dp 358 drivers/net/usb/ax88172a.c struct ax88172a_private *dp = dev->driver_priv; dp 359 drivers/net/usb/ax88172a.c struct asix_rx_fixup_info *rx = &dp->rx_fixup_info; dp 1255 drivers/net/xen-netback/netback.c pending_ring_idx_t dc, dp; dp 1264 drivers/net/xen-netback/netback.c dp = queue->dealloc_prod; dp 1271 drivers/net/xen-netback/netback.c while (dc != dp) { dp 1288 drivers/net/xen-netback/netback.c } while (dp != queue->dealloc_prod); dp 831 drivers/nubus/nubus.c unsigned char dp; dp 841 drivers/nubus/nubus.c dp = *rp; dp 846 drivers/nubus/nubus.c if ((((dp >> 4) ^ dp) & 0x0F) != 0x0F) dp 850 drivers/nubus/nubus.c if (not_useful(rp, dp)) dp 854 drivers/nubus/nubus.c nubus_add_board(slot, dp); dp 31 drivers/of/pdt.c static char * __init of_pdt_build_full_name(struct device_node *dp) dp 33 drivers/of/pdt.c return build_path_component(dp); dp 39 drivers/of/pdt.c static inline void irq_trans_init(struct device_node *dp) { } dp 41 drivers/of/pdt.c static char * __init of_pdt_build_full_name(struct device_node *dp) dp 49 drivers/of/pdt.c if (!of_pdt_prom_ops->pkg2path(dp->phandle, path, sizeof(path), &len)) { dp 56 drivers/of/pdt.c name = of_get_property(dp, "name", &len); dp 147 drivers/of/pdt.c struct device_node *dp; dp 152 drivers/of/pdt.c dp = prom_early_alloc(sizeof(*dp)); dp 153 drivers/of/pdt.c of_node_init(dp); dp 154 drivers/of/pdt.c of_pdt_incr_unique_id(dp); dp 155 drivers/of/pdt.c dp->parent = parent; dp 157 drivers/of/pdt.c dp->name = of_pdt_get_one_property(node, "name"); dp 158 drivers/of/pdt.c dp->phandle = node; dp 160 drivers/of/pdt.c dp->properties = of_pdt_build_prop_list(node); dp 162 drivers/of/pdt.c dp->full_name = of_pdt_build_full_name(dp); dp 164 drivers/of/pdt.c irq_trans_init(dp); dp 166 drivers/of/pdt.c return dp; dp 173 drivers/of/pdt.c struct device_node *dp; dp 176 drivers/of/pdt.c dp = of_pdt_create_node(node, parent); dp 177 drivers/of/pdt.c if (!dp) dp 181 drivers/of/pdt.c prev_sibling->sibling = dp; dp 184 drivers/of/pdt.c ret = dp; dp 185 drivers/of/pdt.c prev_sibling = dp; dp 187 drivers/of/pdt.c dp->child = of_pdt_build_tree(dp, of_pdt_prom_ops->getchild(node)); dp 112 drivers/phy/motorola/phy-cpcap-usb.c bool dp; dp 202 drivers/phy/motorola/phy-cpcap-usb.c s->dp = val & BIT(0); dp 157 drivers/phy/renesas/phy-rcar-gen3-usb2.c static void rcar_gen3_set_linectrl(struct rcar_gen3_chan *ch, int dp, int dm) dp 162 drivers/phy/renesas/phy-rcar-gen3-usb2.c dev_vdbg(ch->dev, "%s: %08x, %d, %d\n", __func__, val, dp, dm); dp 164 drivers/phy/renesas/phy-rcar-gen3-usb2.c if (dp) dp 34 drivers/phy/rockchip/phy-rockchip-dp.c struct rockchip_dp_phy *dp = phy_get_drvdata(phy); dp 38 drivers/phy/rockchip/phy-rockchip-dp.c ret = regmap_write(dp->grf, GRF_SOC_CON12, dp 42 drivers/phy/rockchip/phy-rockchip-dp.c dev_err(dp->dev, "Can't enable PHY power %d\n", ret); dp 46 drivers/phy/rockchip/phy-rockchip-dp.c ret = clk_prepare_enable(dp->phy_24m); dp 48 drivers/phy/rockchip/phy-rockchip-dp.c clk_disable_unprepare(dp->phy_24m); dp 50 drivers/phy/rockchip/phy-rockchip-dp.c ret = regmap_write(dp->grf, GRF_SOC_CON12, dp 79 drivers/phy/rockchip/phy-rockchip-dp.c struct rockchip_dp_phy *dp; dp 89 drivers/phy/rockchip/phy-rockchip-dp.c dp = devm_kzalloc(dev, sizeof(*dp), GFP_KERNEL); dp 90 drivers/phy/rockchip/phy-rockchip-dp.c if (!dp) dp 93 drivers/phy/rockchip/phy-rockchip-dp.c dp->dev = dev; dp 95 drivers/phy/rockchip/phy-rockchip-dp.c dp->phy_24m = devm_clk_get(dev, "24m"); dp 96 drivers/phy/rockchip/phy-rockchip-dp.c if (IS_ERR(dp->phy_24m)) { dp 98 drivers/phy/rockchip/phy-rockchip-dp.c return PTR_ERR(dp->phy_24m); dp 101 drivers/phy/rockchip/phy-rockchip-dp.c ret = clk_set_rate(dp->phy_24m, 24000000); dp 103 drivers/phy/rockchip/phy-rockchip-dp.c dev_err(dp->dev, "cannot set clock phy_24m %d\n", ret); dp 107 drivers/phy/rockchip/phy-rockchip-dp.c dp->grf = syscon_node_to_regmap(dev->parent->of_node); dp 108 drivers/phy/rockchip/phy-rockchip-dp.c if (IS_ERR(dp->grf)) { dp 110 drivers/phy/rockchip/phy-rockchip-dp.c return PTR_ERR(dp->grf); dp 113 drivers/phy/rockchip/phy-rockchip-dp.c ret = regmap_write(dp->grf, GRF_SOC_CON12, GRF_EDP_REF_CLK_SEL_INTER | dp 116 drivers/phy/rockchip/phy-rockchip-dp.c dev_err(dp->dev, "Could not config GRF edp ref clk: %d\n", ret); dp 125 drivers/phy/rockchip/phy-rockchip-dp.c phy_set_drvdata(phy, dp); dp 811 drivers/phy/rockchip/phy-rockchip-typec.c bool ufp, dp; dp 819 drivers/phy/rockchip/phy-rockchip-typec.c dp = extcon_get_state(edev, EXTCON_DISP_DP); dp 827 drivers/phy/rockchip/phy-rockchip-typec.c } else if (dp) { dp 1635 drivers/pinctrl/tegra/pinctrl-tegra124.c FUNCTION(dp), dp 1208 drivers/pinctrl/tegra/pinctrl-tegra210.c FUNCTION(dp), dp 46 drivers/platform/x86/intel_cht_int33fe.c struct fwnode_handle *dp; dp 161 drivers/platform/x86/intel_cht_int33fe.c data->dp = device_get_named_child_node(&pdev->dev, "DD02"); dp 163 drivers/platform/x86/intel_cht_int33fe.c if (!data->dp) dp 167 drivers/platform/x86/intel_cht_int33fe.c data->dp->secondary = fwnode; dp 181 drivers/platform/x86/intel_cht_int33fe.c if (data->dp) { dp 182 drivers/platform/x86/intel_cht_int33fe.c data->dp->secondary = NULL; dp 183 drivers/platform/x86/intel_cht_int33fe.c fwnode_handle_put(data->dp); dp 184 drivers/platform/x86/intel_cht_int33fe.c data->dp = NULL; dp 79 drivers/s390/crypto/zcrypt_cex2a.h unsigned char dp[64]; dp 92 drivers/s390/crypto/zcrypt_cex2a.h unsigned char dp[128]; dp 105 drivers/s390/crypto/zcrypt_cex2a.h unsigned char dp[256]; dp 107 drivers/s390/crypto/zcrypt_msgtype50.c unsigned char dp[64]; dp 120 drivers/s390/crypto/zcrypt_msgtype50.c unsigned char dp[128]; dp 133 drivers/s390/crypto/zcrypt_msgtype50.c unsigned char dp[256]; dp 266 drivers/s390/crypto/zcrypt_msgtype50.c unsigned char *p, *q, *dp, *dq, *u, *inp; dp 287 drivers/s390/crypto/zcrypt_msgtype50.c dp = crb1->dp + sizeof(crb1->dp) - short_len; dp 301 drivers/s390/crypto/zcrypt_msgtype50.c dp = crb2->dp + sizeof(crb2->dp) - short_len; dp 316 drivers/s390/crypto/zcrypt_msgtype50.c dp = crb3->dp + sizeof(crb3->dp) - short_len; dp 329 drivers/s390/crypto/zcrypt_msgtype50.c copy_from_user(dp, crt->bp_key + MSGTYPE_ADJUSTMENT, short_len) || dp 297 drivers/sbus/char/bbc_i2c.c struct device_node *dp; dp 329 drivers/sbus/char/bbc_i2c.c for (dp = op->dev.of_node->child; dp 330 drivers/sbus/char/bbc_i2c.c dp && entry < 8; dp 331 drivers/sbus/char/bbc_i2c.c dp = dp->sibling, entry++) { dp 334 drivers/sbus/char/bbc_i2c.c child_op = of_find_device_by_node(dp); dp 770 drivers/sbus/char/envctrl.c static void envctrl_init_adc(struct i2c_child_t *pchild, struct device_node *dp) dp 777 drivers/sbus/char/envctrl.c pos = of_get_property(dp, "channels-description", &len); dp 787 drivers/sbus/char/envctrl.c pval = of_get_property(dp, "warning-temp", NULL); dp 791 drivers/sbus/char/envctrl.c pval = of_get_property(dp, "shutdown-temp", NULL); dp 864 drivers/sbus/char/envctrl.c static void envctrl_init_i2c_child(struct device_node *dp, dp 871 drivers/sbus/char/envctrl.c pval = of_get_property(dp, "reg", &len); dp 875 drivers/sbus/char/envctrl.c pval = of_get_property(dp, "translation", &len); dp 890 drivers/sbus/char/envctrl.c pval = of_get_property(dp, "tables", &len); dp 921 drivers/sbus/char/envctrl.c pval = of_get_property(dp, "channels-in-use", &len); dp 928 drivers/sbus/char/envctrl.c envctrl_init_adc(pchild, dp); dp 943 drivers/sbus/char/envctrl.c envctrl_init_adc(pchild,dp); dp 1030 drivers/sbus/char/envctrl.c struct device_node *dp; dp 1041 drivers/sbus/char/envctrl.c dp = op->dev.of_node->child; dp 1042 drivers/sbus/char/envctrl.c while (dp) { dp 1043 drivers/sbus/char/envctrl.c if (of_node_name_eq(dp, "gpio")) { dp 1045 drivers/sbus/char/envctrl.c envctrl_init_i2c_child(dp, &(i2c_childlist[index++])); dp 1046 drivers/sbus/char/envctrl.c } else if (of_node_name_eq(dp, "adc")) { dp 1048 drivers/sbus/char/envctrl.c envctrl_init_i2c_child(dp, &(i2c_childlist[index++])); dp 1051 drivers/sbus/char/envctrl.c dp = dp->sibling; dp 164 drivers/sbus/char/flash.c struct device_node *dp = op->dev.of_node; dp 167 drivers/sbus/char/flash.c parent = dp->parent; dp 130 drivers/sbus/char/openprom.c static int opromgetprop(void __user *argp, struct device_node *dp, struct openpromio *op, int bufsize) dp 135 drivers/sbus/char/openprom.c if (!dp || dp 136 drivers/sbus/char/openprom.c !(pval = of_get_property(dp, op->oprom_array, &len)) || dp 147 drivers/sbus/char/openprom.c static int opromnxtprop(void __user *argp, struct device_node *dp, struct openpromio *op, int bufsize) dp 152 drivers/sbus/char/openprom.c if (!dp) dp 155 drivers/sbus/char/openprom.c prop = dp->properties; dp 160 drivers/sbus/char/openprom.c prop = of_find_property(dp, op->oprom_array, NULL); dp 177 drivers/sbus/char/openprom.c static int opromsetopt(struct device_node *dp, struct openpromio *op, int bufsize) dp 185 drivers/sbus/char/openprom.c static int opromnext(void __user *argp, unsigned int cmd, struct device_node *dp, struct openpromio *op, int bufsize, DATA *data) dp 196 drivers/sbus/char/openprom.c dp = of_find_node_by_phandle(ph); dp 197 drivers/sbus/char/openprom.c if (!dp) dp 202 drivers/sbus/char/openprom.c dp = dp->sibling; dp 206 drivers/sbus/char/openprom.c dp = dp->child; dp 218 drivers/sbus/char/openprom.c dp = of_find_node_by_path("/"); dp 222 drivers/sbus/char/openprom.c if (dp) dp 223 drivers/sbus/char/openprom.c ph = dp->phandle; dp 225 drivers/sbus/char/openprom.c data->current_node = dp; dp 232 drivers/sbus/char/openprom.c static int oprompci2node(void __user *argp, struct device_node *dp, struct openpromio *op, int bufsize, DATA *data) dp 239 drivers/sbus/char/openprom.c struct device_node *dp; dp 245 drivers/sbus/char/openprom.c dp = pci_device_to_OF_node(pdev); dp 246 drivers/sbus/char/openprom.c data->current_node = dp; dp 247 drivers/sbus/char/openprom.c *((int *)op->oprom_array) = dp->phandle; dp 258 drivers/sbus/char/openprom.c static int oprompath2node(void __user *argp, struct device_node *dp, struct openpromio *op, int bufsize, DATA *data) dp 262 drivers/sbus/char/openprom.c dp = of_find_node_by_path(op->oprom_array); dp 263 drivers/sbus/char/openprom.c if (dp) dp 264 drivers/sbus/char/openprom.c ph = dp->phandle; dp 265 drivers/sbus/char/openprom.c data->current_node = dp; dp 291 drivers/sbus/char/openprom.c struct device_node *dp) dp 312 drivers/sbus/char/openprom.c error = opromgetprop(argp, dp, opp, bufsize); dp 317 drivers/sbus/char/openprom.c error = opromnxtprop(argp, dp, opp, bufsize); dp 322 drivers/sbus/char/openprom.c error = opromsetopt(dp, opp, bufsize); dp 328 drivers/sbus/char/openprom.c error = opromnext(argp, cmd, dp, opp, bufsize, data); dp 332 drivers/sbus/char/openprom.c error = oprompci2node(argp, dp, opp, bufsize, data); dp 336 drivers/sbus/char/openprom.c error = oprompath2node(argp, dp, opp, bufsize, data); dp 365 drivers/sbus/char/openprom.c struct device_node *dp = of_find_node_by_phandle(n); dp 367 drivers/sbus/char/openprom.c if (dp) dp 368 drivers/sbus/char/openprom.c data->lastnode = dp; dp 370 drivers/sbus/char/openprom.c return dp; dp 388 drivers/sbus/char/openprom.c struct device_node *dp; dp 396 drivers/sbus/char/openprom.c dp = get_node(op.op_nodeid, data); dp 402 drivers/sbus/char/openprom.c pval = of_get_property(dp, str, &len); dp 420 drivers/sbus/char/openprom.c struct device_node *dp; dp 428 drivers/sbus/char/openprom.c dp = get_node(op.op_nodeid, data); dp 429 drivers/sbus/char/openprom.c if (!dp) dp 437 drivers/sbus/char/openprom.c prop = dp->properties; dp 439 drivers/sbus/char/openprom.c prop = of_find_property(dp, str, NULL); dp 466 drivers/sbus/char/openprom.c struct device_node *dp; dp 473 drivers/sbus/char/openprom.c dp = get_node(op.op_nodeid, data); dp 474 drivers/sbus/char/openprom.c if (!dp) dp 487 drivers/sbus/char/openprom.c err = of_set_property(dp, str, tmp, op.op_buflen); dp 497 drivers/sbus/char/openprom.c struct device_node *dp; dp 508 drivers/sbus/char/openprom.c dp = of_find_node_by_path("/"); dp 510 drivers/sbus/char/openprom.c dp = of_find_node_by_phandle(nd); dp 512 drivers/sbus/char/openprom.c if (dp) { dp 514 drivers/sbus/char/openprom.c dp = dp->sibling; dp 516 drivers/sbus/char/openprom.c dp = dp->child; dp 519 drivers/sbus/char/openprom.c if (dp) dp 520 drivers/sbus/char/openprom.c nd = dp->phandle; dp 547 drivers/scsi/aacraid/aachba.c char *dp = d; dp 549 drivers/scsi/aacraid/aachba.c *dp++ = (*sp) ? *sp++ : ' '; dp 44 drivers/scsi/imm.c unsigned dp:1; /* Data phase present */ dp 849 drivers/scsi/imm.c dev->dp = (x & 0x20) ? 0 : 1; dp 851 drivers/scsi/imm.c if ((dev->dp) && (dev->rd)) dp 863 drivers/scsi/imm.c if (dev->dp) { dp 874 drivers/scsi/imm.c if ((dev->dp) && (dev->rd)) { dp 81 drivers/scsi/libfc/fc_disc.c struct fc_disc_port *dp, *next; dp 122 drivers/scsi/libfc/fc_disc.c dp = kzalloc(sizeof(*dp), GFP_KERNEL); dp 123 drivers/scsi/libfc/fc_disc.c if (!dp) { dp 127 drivers/scsi/libfc/fc_disc.c dp->lp = lport; dp 128 drivers/scsi/libfc/fc_disc.c dp->port_id = ntoh24(pp->rscn_fid); dp 129 drivers/scsi/libfc/fc_disc.c list_add_tail(&dp->peers, &disc_ports); dp 148 drivers/scsi/libfc/fc_disc.c list_for_each_entry_safe(dp, next, &disc_ports, peers) { dp 149 drivers/scsi/libfc/fc_disc.c list_del(&dp->peers); dp 151 drivers/scsi/libfc/fc_disc.c redisc = fc_disc_single(lport, dp); dp 152 drivers/scsi/libfc/fc_disc.c kfree(dp); dp 665 drivers/scsi/libfc/fc_disc.c static int fc_disc_single(struct fc_lport *lport, struct fc_disc_port *dp) dp 671 drivers/scsi/libfc/fc_disc.c rdata = fc_rport_create(lport, dp->port_id); dp 416 drivers/scsi/libfc/fc_lport.c void *dp; dp 431 drivers/scsi/libfc/fc_lport.c dp = fc_frame_payload_get(fp, len); dp 432 drivers/scsi/libfc/fc_lport.c memcpy(dp, pp, len); dp 433 drivers/scsi/libfc/fc_lport.c *((__be32 *)dp) = htonl(ELS_LS_ACC << 24); dp 4457 drivers/scsi/qla2xxx/qla_init.c struct device_node *dp = pci_device_to_OF_node(pdev); dp 4461 drivers/scsi/qla2xxx/qla_init.c val = of_get_property(dp, "port-wwn", &len); dp 4465 drivers/scsi/qla2xxx/qla_init.c val = of_get_property(dp, "node-wwn", &len); dp 7062 drivers/scsi/qla2xxx/qla_init.c struct device_node *dp = pci_device_to_OF_node(pdev); dp 7066 drivers/scsi/qla2xxx/qla_init.c val = of_get_property(dp, "port-wwn", &len); dp 7070 drivers/scsi/qla2xxx/qla_init.c val = of_get_property(dp, "node-wwn", &len); dp 766 drivers/scsi/qlogicpti.c struct device_node *dp; dp 768 drivers/scsi/qlogicpti.c dp = op->dev.of_node; dp 770 drivers/scsi/qlogicpti.c qpti->scsi_id = of_getintprop_default(dp, "initiator-id", -1); dp 772 drivers/scsi/qlogicpti.c qpti->scsi_id = of_getintprop_default(dp, "scsi-initiator-id", dp 776 drivers/scsi/qlogicpti.c of_getintprop_default(dp->parent, dp 1306 drivers/scsi/qlogicpti.c struct device_node *dp = op->dev.of_node; dp 1362 drivers/scsi/qlogicpti.c fcode = of_get_property(dp, "isp-fcode", NULL); dp 1365 drivers/scsi/qlogicpti.c if (of_find_property(dp, "differential", NULL) != NULL) dp 908 drivers/scsi/scsi_debug.c struct sdebug_dev_info *dp; dp 912 drivers/scsi/scsi_debug.c list_for_each_entry(dp, &sdhp->dev_info_list, dev_list) { dp 913 drivers/scsi/scsi_debug.c if ((devip->sdbg_host == dp->sdbg_host) && dp 914 drivers/scsi/scsi_debug.c (devip->target == dp->target)) dp 915 drivers/scsi/scsi_debug.c clear_bit(SDEBUG_UA_LUNS_CHANGED, dp->uas_bm); dp 3357 drivers/scsi/scsi_debug.c struct sdebug_dev_info *dp; dp 3372 drivers/scsi/scsi_debug.c list_for_each_entry(dp, dp 3375 drivers/scsi/scsi_debug.c if (dp->target == sdp->id) { dp 3376 drivers/scsi/scsi_debug.c set_bit(SDEBUG_UA_BUS_RESET, dp->uas_bm); dp 3377 drivers/scsi/scsi_debug.c if (devip != dp) dp 3379 drivers/scsi/scsi_debug.c dp->uas_bm); dp 3384 drivers/scsi/scsi_debug.c list_for_each_entry(dp, dp 3387 drivers/scsi/scsi_debug.c if (dp->target == sdp->id) dp 3389 drivers/scsi/scsi_debug.c dp->uas_bm); dp 4882 drivers/scsi/scsi_debug.c struct sdebug_dev_info *dp; dp 4887 drivers/scsi/scsi_debug.c list_for_each_entry(dp, &sdhp->dev_info_list, dp 4890 drivers/scsi/scsi_debug.c dp->uas_bm); dp 4965 drivers/scsi/scsi_debug.c struct sdebug_dev_info *dp; dp 4970 drivers/scsi/scsi_debug.c list_for_each_entry(dp, &sdhp->dev_info_list, dp 4973 drivers/scsi/scsi_debug.c dp->uas_bm); dp 124 drivers/scsi/sun_esp.c struct device_node *dp; dp 126 drivers/scsi/sun_esp.c dp = op->dev.of_node; dp 127 drivers/scsi/sun_esp.c esp->scsi_id = of_getintprop_default(dp, "initiator-id", 0xff); dp 131 drivers/scsi/sun_esp.c esp->scsi_id = of_getintprop_default(dp, "scsi-initiator-id", 0xff); dp 146 drivers/scsi/sun_esp.c struct device_node *dp; dp 148 drivers/scsi/sun_esp.c dp = op->dev.of_node; dp 149 drivers/scsi/sun_esp.c if (of_find_property(dp, "differential", NULL)) dp 158 drivers/scsi/sun_esp.c struct device_node *bus_dp, *dp; dp 161 drivers/scsi/sun_esp.c dp = op->dev.of_node; dp 162 drivers/scsi/sun_esp.c bus_dp = dp->parent; dp 164 drivers/scsi/sun_esp.c fmhz = of_getintprop_default(dp, "clock-frequency", 0); dp 175 drivers/scsi/sun_esp.c struct device_node *dp; dp 178 drivers/scsi/sun_esp.c dp = op->dev.of_node; dp 179 drivers/scsi/sun_esp.c bursts = of_getintprop_default(dp, "burst-sizes", 0xff); dp 528 drivers/scsi/sun_esp.c struct device_node *dp = op->dev.of_node; dp 533 drivers/scsi/sun_esp.c if (of_node_name_eq(dp->parent, "espdma") || dp 534 drivers/scsi/sun_esp.c of_node_name_eq(dp->parent, "dma")) dp 535 drivers/scsi/sun_esp.c dma_node = dp->parent; dp 536 drivers/scsi/sun_esp.c else if (of_node_name_eq(dp, "SUNW,fas")) { dp 664 drivers/scsi/sym53c8xx_2/sym_hipd.h #define sym_set_script_dp(np, cp, dp) \ dp 667 drivers/scsi/sym53c8xx_2/sym_hipd.h cp->phys.head.lastp = cpu_to_scr(dp); \ dp 669 drivers/scsi/sym53c8xx_2/sym_hipd.h np->ccb_head.lastp = cpu_to_scr(dp); \ dp 675 drivers/scsi/sym53c8xx_2/sym_hipd.h #define sym_set_script_dp(np, cp, dp) \ dp 677 drivers/scsi/sym53c8xx_2/sym_hipd.h cp->phys.head.lastp = cpu_to_scr(dp); \ dp 131 drivers/soc/fsl/dpio/qbman-portal.c int dp, int de, int ep) dp 142 drivers/soc/fsl/dpio/qbman-portal.c dp << SWP_CFG_DP_SHIFT | dp 65 drivers/staging/fwserial/dma_fifo.h static inline void dp_mark_completed(struct dma_pending *dp) dp 67 drivers/staging/fwserial/dma_fifo.h dp->data += 1; dp 70 drivers/staging/fwserial/dma_fifo.h static inline bool dp_is_completed(struct dma_pending *dp) dp 72 drivers/staging/fwserial/dma_fifo.h return (unsigned long)dp->data & 1UL; dp 282 drivers/staging/isdn/avm/avmcard.h unsigned char *dp) dp 287 drivers/staging/isdn/avm/avmcard.h while (i-- > 0) *dp++ = b1_get_byte(base); dp 292 drivers/staging/isdn/avm/avmcard.h unsigned char *dp, unsigned int len) dp 297 drivers/staging/isdn/avm/avmcard.h b1_put_byte(base, *dp++); dp 410 drivers/staging/isdn/avm/avmcard.h unsigned char *dp) dp 426 drivers/staging/isdn/avm/avmcard.h insb(base + B1_READ, dp, FIFO_INPBSIZE); dp 427 drivers/staging/isdn/avm/avmcard.h dp += FIFO_INPBSIZE; dp 434 drivers/staging/isdn/avm/avmcard.h insb(base + B1_READ, dp, i); dp 438 drivers/staging/isdn/avm/avmcard.h dp += i; dp 442 drivers/staging/isdn/avm/avmcard.h *dp++ = b1_get_byte(base); dp 457 drivers/staging/isdn/avm/avmcard.h *dp++ = b1_get_byte(base); dp 463 drivers/staging/isdn/avm/avmcard.h unsigned char *dp, unsigned int len) dp 474 drivers/staging/isdn/avm/avmcard.h outsb(base + B1_WRITE, dp, FIFO_OUTBSIZE); dp 475 drivers/staging/isdn/avm/avmcard.h dp += FIFO_OUTBSIZE; dp 479 drivers/staging/isdn/avm/avmcard.h outsb(base + B1_WRITE, dp, i); dp 480 drivers/staging/isdn/avm/avmcard.h dp += i; dp 484 drivers/staging/isdn/avm/avmcard.h b1_put_byte(base, *dp++); dp 491 drivers/staging/isdn/avm/avmcard.h b1_put_byte(base, *dp++); dp 155 drivers/staging/isdn/avm/b1.c unsigned char *dp; dp 159 drivers/staging/isdn/avm/b1.c dp = t4file->data; dp 163 drivers/staging/isdn/avm/b1.c if (copy_from_user(buf, dp, FWBUF_SIZE)) dp 166 drivers/staging/isdn/avm/b1.c memcpy(buf, dp, FWBUF_SIZE); dp 175 drivers/staging/isdn/avm/b1.c dp += FWBUF_SIZE; dp 179 drivers/staging/isdn/avm/b1.c if (copy_from_user(buf, dp, left)) dp 182 drivers/staging/isdn/avm/b1.c memcpy(buf, dp, left); dp 197 drivers/staging/isdn/avm/b1.c unsigned char *dp; dp 201 drivers/staging/isdn/avm/b1.c dp = config->data; dp 211 drivers/staging/isdn/avm/b1.c if (copy_from_user(buf, dp, FWBUF_SIZE)) dp 214 drivers/staging/isdn/avm/b1.c memcpy(buf, dp, FWBUF_SIZE); dp 223 drivers/staging/isdn/avm/b1.c dp += FWBUF_SIZE; dp 227 drivers/staging/isdn/avm/b1.c if (copy_from_user(buf, dp, left)) dp 230 drivers/staging/isdn/avm/b1.c memcpy(buf, dp, left); dp 178 drivers/staging/isdn/avm/b1dma.c static inline void _put_slice(void **pp, unsigned char *dp, unsigned int len) dp 183 drivers/staging/isdn/avm/b1dma.c _put_byte(pp, *dp++); dp 207 drivers/staging/isdn/avm/b1dma.c static inline u32 _get_slice(void **pp, unsigned char *dp) dp 212 drivers/staging/isdn/avm/b1dma.c while (i-- > 0) *dp++ = _get_byte(pp); dp 198 drivers/staging/isdn/avm/c4.c unsigned char *dp; dp 202 drivers/staging/isdn/avm/c4.c dp = t4file->data; dp 206 drivers/staging/isdn/avm/c4.c if (copy_from_user(&val, dp, sizeof(val))) dp 209 drivers/staging/isdn/avm/c4.c memcpy(&val, dp, sizeof(val)); dp 217 drivers/staging/isdn/avm/c4.c dp += sizeof(u32); dp 223 drivers/staging/isdn/avm/c4.c if (copy_from_user(&val, dp, left)) dp 226 drivers/staging/isdn/avm/c4.c memcpy(&val, dp, left); dp 256 drivers/staging/isdn/avm/c4.c static inline void _put_slice(void **pp, unsigned char *dp, unsigned int len) dp 261 drivers/staging/isdn/avm/c4.c _put_byte(pp, *dp++); dp 285 drivers/staging/isdn/avm/c4.c static inline u32 _get_slice(void **pp, unsigned char *dp) dp 290 drivers/staging/isdn/avm/c4.c while (i-- > 0) *dp++ = _get_byte(pp); dp 817 drivers/staging/isdn/avm/c4.c unsigned char *dp; dp 826 drivers/staging/isdn/avm/c4.c dp = config->data; dp 830 drivers/staging/isdn/avm/c4.c if (copy_from_user(val, dp, sizeof(val))) dp 833 drivers/staging/isdn/avm/c4.c memcpy(val, dp, sizeof(val)); dp 838 drivers/staging/isdn/avm/c4.c dp += sizeof(val); dp 843 drivers/staging/isdn/avm/c4.c if (copy_from_user(&val, dp, left)) dp 846 drivers/staging/isdn/avm/c4.c memcpy(&val, dp, left); dp 233 drivers/tty/hvc/hvsi_lib.c struct hvsi_data dp; dp 239 drivers/tty/hvc/hvsi_lib.c dp.hdr.type = VS_DATA_PACKET_HEADER; dp 240 drivers/tty/hvc/hvsi_lib.c dp.hdr.len = adjcount + sizeof(struct hvsi_header); dp 241 drivers/tty/hvc/hvsi_lib.c memcpy(dp.data, buf, adjcount); dp 242 drivers/tty/hvc/hvsi_lib.c rc = hvsi_send_packet(pv, &dp.hdr); dp 722 drivers/tty/n_gsm.c u8 *dp = msg->data; dp 723 drivers/tty/n_gsm.c u8 *fcs = dp + msg->len; dp 728 drivers/tty/n_gsm.c *--dp = (msg->len << 1) | EA; dp 730 drivers/tty/n_gsm.c *--dp = (msg->len >> 7); /* bits 7 - 15 */ dp 731 drivers/tty/n_gsm.c *--dp = (msg->len & 127) << 1; /* bits 0 - 6 */ dp 735 drivers/tty/n_gsm.c *--dp = msg->ctrl; dp 737 drivers/tty/n_gsm.c *--dp = (msg->addr << 2) | 2 | EA; dp 739 drivers/tty/n_gsm.c *--dp = (msg->addr << 2) | EA; dp 740 drivers/tty/n_gsm.c *fcs = gsm_fcs_add_block(INIT_FCS, dp , msg->data - dp); dp 751 drivers/tty/n_gsm.c msg->len += (msg->data - dp) + 1; dp 752 drivers/tty/n_gsm.c msg->data = dp; dp 793 drivers/tty/n_gsm.c u8 *dp; dp 814 drivers/tty/n_gsm.c dp = msg->data; dp 820 drivers/tty/n_gsm.c *dp++ = gsm_encode_modem(dlci); dp 823 drivers/tty/n_gsm.c WARN_ON(kfifo_out_locked(dlci->fifo, dp , len, &dlci->lock) != len); dp 847 drivers/tty/n_gsm.c u8 *dp; dp 887 drivers/tty/n_gsm.c dp = msg->data; dp 891 drivers/tty/n_gsm.c *dp++ = last << 7 | first << 6 | 1; /* EA */ dp 894 drivers/tty/n_gsm.c memcpy(dp, dlci->skb->data, len); dp 1083 drivers/tty/n_gsm.c const u8 *dp = data; dp 1086 drivers/tty/n_gsm.c while (gsm_read_ea(&addr, *dp++) == 0) { dp 1102 drivers/tty/n_gsm.c while (gsm_read_ea(&modem, *dp++) == 0) { dp 1109 drivers/tty/n_gsm.c while (gsm_read_ea(&brk, *dp++) == 0) { dp 1143 drivers/tty/n_gsm.c const u8 *dp = data; dp 1145 drivers/tty/n_gsm.c while (gsm_read_ea(&addr, *dp++) == 0) { dp 1159 drivers/tty/n_gsm.c bits = *dp; dp 2408 drivers/tty/n_gsm.c const unsigned char *dp; dp 2417 drivers/tty/n_gsm.c for (i = count, dp = cp, f = fp; i; i--, dp++) { dp 2422 drivers/tty/n_gsm.c gsm->receive(gsm, *dp); dp 2428 drivers/tty/n_gsm.c gsm->error(gsm, *dp, flags); dp 121 drivers/tty/serial/digicolor-usart.c struct digicolor_port *dp = dp 125 drivers/tty/serial/digicolor-usart.c if (!digicolor_uart_rx_empty(&dp->port)) dp 127 drivers/tty/serial/digicolor-usart.c writeb_relaxed(UA_INT_RX, dp->port.membase + UA_INTFLAG_SET); dp 129 drivers/tty/serial/digicolor-usart.c schedule_delayed_work(&dp->rx_poll_work, msecs_to_jiffies(100)); dp 257 drivers/tty/serial/digicolor-usart.c struct digicolor_port *dp = dp 274 drivers/tty/serial/digicolor-usart.c schedule_delayed_work(&dp->rx_poll_work, msecs_to_jiffies(100)); dp 281 drivers/tty/serial/digicolor-usart.c struct digicolor_port *dp = dp 285 drivers/tty/serial/digicolor-usart.c cancel_delayed_work_sync(&dp->rx_poll_work); dp 453 drivers/tty/serial/digicolor-usart.c struct digicolor_port *dp; dp 466 drivers/tty/serial/digicolor-usart.c dp = devm_kzalloc(&pdev->dev, sizeof(*dp), GFP_KERNEL); dp 467 drivers/tty/serial/digicolor-usart.c if (!dp) dp 475 drivers/tty/serial/digicolor-usart.c dp->port.mapbase = res->start; dp 476 drivers/tty/serial/digicolor-usart.c dp->port.membase = devm_ioremap_resource(&pdev->dev, res); dp 477 drivers/tty/serial/digicolor-usart.c if (IS_ERR(dp->port.membase)) dp 478 drivers/tty/serial/digicolor-usart.c return PTR_ERR(dp->port.membase); dp 483 drivers/tty/serial/digicolor-usart.c dp->port.irq = irq; dp 485 drivers/tty/serial/digicolor-usart.c dp->port.iotype = UPIO_MEM; dp 486 drivers/tty/serial/digicolor-usart.c dp->port.uartclk = clk_get_rate(uart_clk); dp 487 drivers/tty/serial/digicolor-usart.c dp->port.fifosize = 16; dp 488 drivers/tty/serial/digicolor-usart.c dp->port.dev = &pdev->dev; dp 489 drivers/tty/serial/digicolor-usart.c dp->port.ops = &digicolor_uart_ops; dp 490 drivers/tty/serial/digicolor-usart.c dp->port.line = index; dp 491 drivers/tty/serial/digicolor-usart.c dp->port.type = PORT_DIGICOLOR; dp 492 drivers/tty/serial/digicolor-usart.c spin_lock_init(&dp->port.lock); dp 494 drivers/tty/serial/digicolor-usart.c digicolor_ports[index] = &dp->port; dp 495 drivers/tty/serial/digicolor-usart.c platform_set_drvdata(pdev, &dp->port); dp 497 drivers/tty/serial/digicolor-usart.c INIT_DELAYED_WORK(&dp->rx_poll_work, digicolor_rx_poll); dp 499 drivers/tty/serial/digicolor-usart.c ret = devm_request_irq(&pdev->dev, dp->port.irq, digicolor_uart_int, 0, dp 500 drivers/tty/serial/digicolor-usart.c dev_name(&pdev->dev), &dp->port); dp 504 drivers/tty/serial/digicolor-usart.c return uart_add_one_port(&digicolor_uart, &dp->port); dp 55 drivers/tty/serial/suncore.c int sunserial_console_match(struct console *con, struct device_node *dp, dp 63 drivers/tty/serial/suncore.c if (of_console_device != dp) dp 102 drivers/tty/serial/suncore.c struct device_node *dp; dp 111 drivers/tty/serial/suncore.c dp = of_find_node_by_path("/options"); dp 112 drivers/tty/serial/suncore.c mode = of_get_property(dp, mode_prop, NULL); dp 115 drivers/tty/serial/suncore.c of_node_put(dp); dp 1116 drivers/tty/serial/sunsab.c struct device_node *dp; dp 1120 drivers/tty/serial/sunsab.c for_each_node_by_name(dp, "se") dp 1122 drivers/tty/serial/sunsab.c for_each_node_by_name(dp, "serial") { dp 1123 drivers/tty/serial/sunsab.c if (of_device_is_compatible(dp, "sab82532")) dp 1394 drivers/tty/serial/sunsu.c static enum su_type su_get_type(struct device_node *dp) dp 1414 drivers/tty/serial/sunsu.c if (dp == match) { dp 1424 drivers/tty/serial/sunsu.c if (dp == match) { dp 1438 drivers/tty/serial/sunsu.c struct device_node *dp = op->dev.of_node; dp 1445 drivers/tty/serial/sunsu.c type = su_get_type(dp); dp 1506 drivers/tty/serial/sunsu.c if (of_node_name_eq(dp, "rsc-console") || dp 1507 drivers/tty/serial/sunsu.c of_node_name_eq(dp, "lom-console")) dp 1510 drivers/tty/serial/sunsu.c sunserial_console_match(SUNSU_CONSOLE(), dp, dp 1584 drivers/tty/serial/sunsu.c struct device_node *dp; dp 1588 drivers/tty/serial/sunsu.c for_each_node_by_name(dp, "su") { dp 1589 drivers/tty/serial/sunsu.c if (su_get_type(dp) == SU_PORT_PORT) dp 1592 drivers/tty/serial/sunsu.c for_each_node_by_name(dp, "su_pnp") { dp 1593 drivers/tty/serial/sunsu.c if (su_get_type(dp) == SU_PORT_PORT) dp 1596 drivers/tty/serial/sunsu.c for_each_node_by_name(dp, "serial") { dp 1597 drivers/tty/serial/sunsu.c if (of_device_is_compatible(dp, "su")) { dp 1598 drivers/tty/serial/sunsu.c if (su_get_type(dp) == SU_PORT_PORT) dp 1602 drivers/tty/serial/sunsu.c for_each_node_by_type(dp, "serial") { dp 1603 drivers/tty/serial/sunsu.c if (of_device_is_compatible(dp, "su")) { dp 1604 drivers/tty/serial/sunsu.c if (su_get_type(dp) == SU_PORT_PORT) dp 1553 drivers/tty/serial/sunzilog.c struct device_node *dp; dp 1558 drivers/tty/serial/sunzilog.c for_each_node_by_name(dp, "zs") { dp 1560 drivers/tty/serial/sunzilog.c if (of_find_property(dp, "keyboard", NULL)) dp 92 drivers/usb/host/isp116x-hcd.c u8 *dp = (u8 *) buf; dp 104 drivers/usb/host/isp116x-hcd.c w = *dp++; dp 105 drivers/usb/host/isp116x-hcd.c w |= *dp++ << 8; dp 109 drivers/usb/host/isp116x-hcd.c isp116x_write_data16(isp116x, (u16) * dp); dp 129 drivers/usb/host/isp116x-hcd.c u8 *dp = (u8 *) buf; dp 142 drivers/usb/host/isp116x-hcd.c *dp++ = w & 0xff; dp 143 drivers/usb/host/isp116x-hcd.c *dp++ = (w >> 8) & 0xff; dp 147 drivers/usb/host/isp116x-hcd.c *dp = 0xff & isp116x_read_data16(isp116x); dp 644 drivers/usb/host/isp1362.h u8 *dp = buf; dp 654 drivers/usb/host/isp1362.h readsl(isp1362_hcd->data_reg, dp, len >> 2); dp 655 drivers/usb/host/isp1362.h dp += len & ~3; dp 661 drivers/usb/host/isp1362.h insw((unsigned long)isp1362_hcd->data_reg, dp, len >> 1); dp 662 drivers/usb/host/isp1362.h dp += len & ~1; dp 670 drivers/usb/host/isp1362.h (u8)data, (u32)dp); dp 671 drivers/usb/host/isp1362.h *dp = (u8)data; dp 677 drivers/usb/host/isp1362.h u8 *dp = buf; dp 683 drivers/usb/host/isp1362.h if ((unsigned long)dp & 0x1) { dp 686 drivers/usb/host/isp1362.h data = *dp++; dp 687 drivers/usb/host/isp1362.h data |= *dp++ << 8; dp 691 drivers/usb/host/isp1362.h isp1362_write_data16(isp1362_hcd, *dp); dp 699 drivers/usb/host/isp1362.h writesl(isp1362_hcd->data_reg, dp, len >> 2); dp 700 drivers/usb/host/isp1362.h dp += len & ~3; dp 706 drivers/usb/host/isp1362.h outsw((unsigned long)isp1362_hcd->data_reg, dp, len >> 1); dp 707 drivers/usb/host/isp1362.h dp += len & ~1; dp 716 drivers/usb/host/isp1362.h data = (u16)*dp; dp 718 drivers/usb/host/isp1362.h data, (u32)dp); dp 1188 drivers/usb/host/max3421-hcd.c char ubuf[512], *dp, *end; dp 1197 drivers/usb/host/max3421-hcd.c dp = ubuf; dp 1198 drivers/usb/host/max3421-hcd.c end = dp + sizeof(ubuf); dp 1199 drivers/usb/host/max3421-hcd.c *dp = '\0'; dp 1201 drivers/usb/host/max3421-hcd.c ret = snprintf(dp, end - dp, " %p(%d.%s %d/%d)", urb, dp 1206 drivers/usb/host/max3421-hcd.c if (ret < 0 || ret >= end - dp) dp 1208 drivers/usb/host/max3421-hcd.c dp += ret; dp 1290 drivers/usb/host/max3421-hcd.c char sbuf[16 * 16], *dp, *end; dp 1294 drivers/usb/host/max3421-hcd.c dp = sbuf; dp 1296 drivers/usb/host/max3421-hcd.c *dp = '\0'; dp 1298 drivers/usb/host/max3421-hcd.c int ret = snprintf(dp, end - dp, " %lu", dp 1300 drivers/usb/host/max3421-hcd.c if (ret < 0 || ret >= end - dp) dp 1302 drivers/usb/host/max3421-hcd.c dp += ret; dp 206 drivers/usb/isp1760/isp1760-if.c struct device_node *dp = pdev->dev.of_node; dp 209 drivers/usb/isp1760/isp1760-if.c if (of_device_is_compatible(dp, "nxp,usb-isp1761")) dp 213 drivers/usb/isp1760/isp1760-if.c of_property_read_u32(dp, "bus-width", &bus_width); dp 217 drivers/usb/isp1760/isp1760-if.c if (of_property_read_bool(dp, "port1-otg")) dp 220 drivers/usb/isp1760/isp1760-if.c if (of_property_read_bool(dp, "analog-oc")) dp 223 drivers/usb/isp1760/isp1760-if.c if (of_property_read_bool(dp, "dack-polarity")) dp 226 drivers/usb/isp1760/isp1760-if.c if (of_property_read_bool(dp, "dreq-polarity")) dp 468 drivers/usb/mon/mon_bin.c struct mon_bin_isodesc *dp; dp 473 drivers/usb/mon/mon_bin.c dp = (struct mon_bin_isodesc *) dp 475 drivers/usb/mon/mon_bin.c dp->iso_status = fp->status; dp 476 drivers/usb/mon/mon_bin.c dp->iso_off = fp->offset; dp 477 drivers/usb/mon/mon_bin.c dp->iso_len = (ev_type == 'S') ? fp->length : fp->actual_length; dp 478 drivers/usb/mon/mon_bin.c dp->_pad = 0; dp 199 drivers/usb/mon/mon_text.c struct mon_iso_desc *dp; dp 236 drivers/usb/mon/mon_text.c dp = ep->isodesc; dp 238 drivers/usb/mon/mon_text.c dp->status = fp->status; dp 239 drivers/usb/mon/mon_text.c dp->offset = fp->offset; dp 240 drivers/usb/mon/mon_text.c dp->length = (ev_type == 'S') ? dp 243 drivers/usb/mon/mon_text.c dp++; dp 593 drivers/usb/mon/mon_text.c const struct mon_iso_desc *dp; dp 602 drivers/usb/mon/mon_text.c dp = ep->isodesc; dp 605 drivers/usb/mon/mon_text.c " %d:%u:%u", dp->status, dp->offset, dp->length); dp 606 drivers/usb/mon/mon_text.c dp++; dp 66 drivers/usb/typec/altmodes/displayport.c static int dp_altmode_notify(struct dp_altmode *dp) dp 68 drivers/usb/typec/altmodes/displayport.c u8 state = get_count_order(DP_CONF_GET_PIN_ASSIGN(dp->data.conf)); dp 70 drivers/usb/typec/altmodes/displayport.c return typec_altmode_notify(dp->alt, TYPEC_MODAL_STATE(state), dp 71 drivers/usb/typec/altmodes/displayport.c &dp->data); dp 74 drivers/usb/typec/altmodes/displayport.c static int dp_altmode_configure(struct dp_altmode *dp, u8 con) dp 84 drivers/usb/typec/altmodes/displayport.c pin_assign = DP_CAP_UFP_D_PIN_ASSIGN(dp->alt->vdo) & dp 85 drivers/usb/typec/altmodes/displayport.c DP_CAP_DFP_D_PIN_ASSIGN(dp->port->vdo); dp 90 drivers/usb/typec/altmodes/displayport.c pin_assign = DP_CAP_DFP_D_PIN_ASSIGN(dp->alt->vdo) & dp 91 drivers/usb/typec/altmodes/displayport.c DP_CAP_UFP_D_PIN_ASSIGN(dp->port->vdo); dp 98 drivers/usb/typec/altmodes/displayport.c if (!DP_CONF_GET_PIN_ASSIGN(dp->data.conf)) { dp 100 drivers/usb/typec/altmodes/displayport.c if (dp->data.status & DP_STATUS_PREFER_MULTI_FUNC && dp 112 drivers/usb/typec/altmodes/displayport.c dp->data.conf = conf; dp 117 drivers/usb/typec/altmodes/displayport.c static int dp_altmode_status_update(struct dp_altmode *dp) dp 119 drivers/usb/typec/altmodes/displayport.c bool configured = !!DP_CONF_GET_PIN_ASSIGN(dp->data.conf); dp 120 drivers/usb/typec/altmodes/displayport.c u8 con = DP_STATUS_CONNECTION(dp->data.status); dp 123 drivers/usb/typec/altmodes/displayport.c if (configured && (dp->data.status & DP_STATUS_SWITCH_TO_USB)) { dp 124 drivers/usb/typec/altmodes/displayport.c dp->data.conf = 0; dp 125 drivers/usb/typec/altmodes/displayport.c dp->state = DP_STATE_CONFIGURE; dp 126 drivers/usb/typec/altmodes/displayport.c } else if (dp->data.status & DP_STATUS_EXIT_DP_MODE) { dp 127 drivers/usb/typec/altmodes/displayport.c dp->state = DP_STATE_EXIT; dp 128 drivers/usb/typec/altmodes/displayport.c } else if (!(con & DP_CONF_CURRENTLY(dp->data.conf))) { dp 129 drivers/usb/typec/altmodes/displayport.c ret = dp_altmode_configure(dp, con); dp 131 drivers/usb/typec/altmodes/displayport.c dp->state = DP_STATE_CONFIGURE; dp 137 drivers/usb/typec/altmodes/displayport.c static int dp_altmode_configured(struct dp_altmode *dp) dp 141 drivers/usb/typec/altmodes/displayport.c sysfs_notify(&dp->alt->dev.kobj, "displayport", "configuration"); dp 143 drivers/usb/typec/altmodes/displayport.c if (!dp->data.conf) dp 144 drivers/usb/typec/altmodes/displayport.c return typec_altmode_notify(dp->alt, TYPEC_STATE_USB, dp 145 drivers/usb/typec/altmodes/displayport.c &dp->data); dp 147 drivers/usb/typec/altmodes/displayport.c ret = dp_altmode_notify(dp); dp 151 drivers/usb/typec/altmodes/displayport.c sysfs_notify(&dp->alt->dev.kobj, "displayport", "pin_assignment"); dp 156 drivers/usb/typec/altmodes/displayport.c static int dp_altmode_configure_vdm(struct dp_altmode *dp, u32 conf) dp 158 drivers/usb/typec/altmodes/displayport.c u32 header = DP_HEADER(dp, DP_CMD_CONFIGURE); dp 161 drivers/usb/typec/altmodes/displayport.c ret = typec_altmode_notify(dp->alt, TYPEC_STATE_SAFE, &dp->data); dp 163 drivers/usb/typec/altmodes/displayport.c dev_err(&dp->alt->dev, dp 168 drivers/usb/typec/altmodes/displayport.c ret = typec_altmode_vdm(dp->alt, header, &conf, 2); dp 170 drivers/usb/typec/altmodes/displayport.c if (DP_CONF_GET_PIN_ASSIGN(dp->data.conf)) dp 171 drivers/usb/typec/altmodes/displayport.c dp_altmode_notify(dp); dp 173 drivers/usb/typec/altmodes/displayport.c typec_altmode_notify(dp->alt, TYPEC_STATE_USB, dp 174 drivers/usb/typec/altmodes/displayport.c &dp->data); dp 182 drivers/usb/typec/altmodes/displayport.c struct dp_altmode *dp = container_of(work, struct dp_altmode, work); dp 187 drivers/usb/typec/altmodes/displayport.c mutex_lock(&dp->lock); dp 189 drivers/usb/typec/altmodes/displayport.c switch (dp->state) { dp 191 drivers/usb/typec/altmodes/displayport.c ret = typec_altmode_enter(dp->alt); dp 193 drivers/usb/typec/altmodes/displayport.c dev_err(&dp->alt->dev, "failed to enter mode\n"); dp 196 drivers/usb/typec/altmodes/displayport.c header = DP_HEADER(dp, DP_CMD_STATUS_UPDATE); dp 198 drivers/usb/typec/altmodes/displayport.c ret = typec_altmode_vdm(dp->alt, header, &vdo, 2); dp 200 drivers/usb/typec/altmodes/displayport.c dev_err(&dp->alt->dev, dp 205 drivers/usb/typec/altmodes/displayport.c ret = dp_altmode_configure_vdm(dp, dp->data.conf); dp 207 drivers/usb/typec/altmodes/displayport.c dev_err(&dp->alt->dev, dp 211 drivers/usb/typec/altmodes/displayport.c if (typec_altmode_exit(dp->alt)) dp 212 drivers/usb/typec/altmodes/displayport.c dev_err(&dp->alt->dev, "Exit Mode Failed!\n"); dp 218 drivers/usb/typec/altmodes/displayport.c dp->state = DP_STATE_IDLE; dp 220 drivers/usb/typec/altmodes/displayport.c mutex_unlock(&dp->lock); dp 225 drivers/usb/typec/altmodes/displayport.c struct dp_altmode *dp = typec_altmode_get_drvdata(alt); dp 228 drivers/usb/typec/altmodes/displayport.c mutex_lock(&dp->lock); dp 230 drivers/usb/typec/altmodes/displayport.c old_state = dp->state; dp 231 drivers/usb/typec/altmodes/displayport.c dp->data.status = vdo; dp 237 drivers/usb/typec/altmodes/displayport.c if (dp_altmode_status_update(dp)) dp 240 drivers/usb/typec/altmodes/displayport.c if (dp_altmode_notify(dp)) dp 243 drivers/usb/typec/altmodes/displayport.c if (old_state == DP_STATE_IDLE && dp->state != DP_STATE_IDLE) dp 244 drivers/usb/typec/altmodes/displayport.c schedule_work(&dp->work); dp 246 drivers/usb/typec/altmodes/displayport.c mutex_unlock(&dp->lock); dp 252 drivers/usb/typec/altmodes/displayport.c struct dp_altmode *dp = typec_altmode_get_drvdata(alt); dp 257 drivers/usb/typec/altmodes/displayport.c mutex_lock(&dp->lock); dp 259 drivers/usb/typec/altmodes/displayport.c if (dp->state != DP_STATE_IDLE) { dp 268 drivers/usb/typec/altmodes/displayport.c dp->state = DP_STATE_UPDATE; dp 271 drivers/usb/typec/altmodes/displayport.c dp->data.status = 0; dp 272 drivers/usb/typec/altmodes/displayport.c dp->data.conf = 0; dp 275 drivers/usb/typec/altmodes/displayport.c dp->data.status = *vdo; dp 276 drivers/usb/typec/altmodes/displayport.c ret = dp_altmode_status_update(dp); dp 279 drivers/usb/typec/altmodes/displayport.c ret = dp_altmode_configured(dp); dp 288 drivers/usb/typec/altmodes/displayport.c dp->data.conf = 0; dp 289 drivers/usb/typec/altmodes/displayport.c ret = dp_altmode_configured(dp); dp 299 drivers/usb/typec/altmodes/displayport.c if (dp->state != DP_STATE_IDLE) dp 300 drivers/usb/typec/altmodes/displayport.c schedule_work(&dp->work); dp 303 drivers/usb/typec/altmodes/displayport.c mutex_unlock(&dp->lock); dp 328 drivers/usb/typec/altmodes/displayport.c struct dp_altmode *dp = dev_get_drvdata(dev); dp 338 drivers/usb/typec/altmodes/displayport.c mutex_lock(&dp->lock); dp 340 drivers/usb/typec/altmodes/displayport.c if (dp->state != DP_STATE_IDLE) { dp 345 drivers/usb/typec/altmodes/displayport.c cap = DP_CAP_CAPABILITY(dp->alt->vdo); dp 353 drivers/usb/typec/altmodes/displayport.c conf = dp->data.conf & ~DP_CONF_DUAL_D; dp 356 drivers/usb/typec/altmodes/displayport.c if (dp->alt->active) { dp 357 drivers/usb/typec/altmodes/displayport.c ret = dp_altmode_configure_vdm(dp, conf); dp 362 drivers/usb/typec/altmodes/displayport.c dp->data.conf = conf; dp 365 drivers/usb/typec/altmodes/displayport.c mutex_unlock(&dp->lock); dp 373 drivers/usb/typec/altmodes/displayport.c struct dp_altmode *dp = dev_get_drvdata(dev); dp 379 drivers/usb/typec/altmodes/displayport.c mutex_lock(&dp->lock); dp 381 drivers/usb/typec/altmodes/displayport.c cap = DP_CAP_CAPABILITY(dp->alt->vdo); dp 382 drivers/usb/typec/altmodes/displayport.c cur = DP_CONF_CURRENTLY(dp->data.conf); dp 394 drivers/usb/typec/altmodes/displayport.c mutex_unlock(&dp->lock); dp 414 drivers/usb/typec/altmodes/displayport.c struct dp_altmode *dp = dev_get_drvdata(dev); dp 426 drivers/usb/typec/altmodes/displayport.c mutex_lock(&dp->lock); dp 428 drivers/usb/typec/altmodes/displayport.c if (conf & dp->data.conf) dp 431 drivers/usb/typec/altmodes/displayport.c if (dp->state != DP_STATE_IDLE) { dp 436 drivers/usb/typec/altmodes/displayport.c if (DP_CONF_CURRENTLY(dp->data.conf) == DP_CONF_DFP_D) dp 437 drivers/usb/typec/altmodes/displayport.c assignments = DP_CAP_UFP_D_PIN_ASSIGN(dp->alt->vdo); dp 439 drivers/usb/typec/altmodes/displayport.c assignments = DP_CAP_DFP_D_PIN_ASSIGN(dp->alt->vdo); dp 446 drivers/usb/typec/altmodes/displayport.c conf |= dp->data.conf & ~DP_CONF_PIN_ASSIGNEMENT_MASK; dp 449 drivers/usb/typec/altmodes/displayport.c if (dp->alt->active && DP_CONF_CURRENTLY(dp->data.conf)) { dp 450 drivers/usb/typec/altmodes/displayport.c ret = dp_altmode_configure_vdm(dp, conf); dp 455 drivers/usb/typec/altmodes/displayport.c dp->data.conf = conf; dp 458 drivers/usb/typec/altmodes/displayport.c mutex_unlock(&dp->lock); dp 466 drivers/usb/typec/altmodes/displayport.c struct dp_altmode *dp = dev_get_drvdata(dev); dp 472 drivers/usb/typec/altmodes/displayport.c mutex_lock(&dp->lock); dp 474 drivers/usb/typec/altmodes/displayport.c cur = get_count_order(DP_CONF_GET_PIN_ASSIGN(dp->data.conf)); dp 476 drivers/usb/typec/altmodes/displayport.c if (DP_CONF_CURRENTLY(dp->data.conf) == DP_CONF_DFP_D) dp 477 drivers/usb/typec/altmodes/displayport.c assignments = DP_CAP_UFP_D_PIN_ASSIGN(dp->alt->vdo); dp 479 drivers/usb/typec/altmodes/displayport.c assignments = DP_CAP_DFP_D_PIN_ASSIGN(dp->alt->vdo); dp 492 drivers/usb/typec/altmodes/displayport.c mutex_unlock(&dp->lock); dp 513 drivers/usb/typec/altmodes/displayport.c struct dp_altmode *dp; dp 529 drivers/usb/typec/altmodes/displayport.c dp = devm_kzalloc(&alt->dev, sizeof(*dp), GFP_KERNEL); dp 530 drivers/usb/typec/altmodes/displayport.c if (!dp) dp 533 drivers/usb/typec/altmodes/displayport.c INIT_WORK(&dp->work, dp_altmode_work); dp 534 drivers/usb/typec/altmodes/displayport.c mutex_init(&dp->lock); dp 535 drivers/usb/typec/altmodes/displayport.c dp->port = port; dp 536 drivers/usb/typec/altmodes/displayport.c dp->alt = alt; dp 541 drivers/usb/typec/altmodes/displayport.c typec_altmode_set_drvdata(alt, dp); dp 543 drivers/usb/typec/altmodes/displayport.c dp->state = DP_STATE_ENTER; dp 544 drivers/usb/typec/altmodes/displayport.c schedule_work(&dp->work); dp 552 drivers/usb/typec/altmodes/displayport.c struct dp_altmode *dp = typec_altmode_get_drvdata(alt); dp 555 drivers/usb/typec/altmodes/displayport.c cancel_work_sync(&dp->work); dp 50 drivers/usb/typec/ucsi/displayport.c struct ucsi_dp *dp = typec_altmode_get_drvdata(alt); dp 55 drivers/usb/typec/ucsi/displayport.c mutex_lock(&dp->con->lock); dp 57 drivers/usb/typec/ucsi/displayport.c if (!dp->override && dp->initialized) { dp 62 drivers/usb/typec/ucsi/displayport.c mutex_unlock(&dp->con->lock); dp 66 drivers/usb/typec/ucsi/displayport.c UCSI_CMD_GET_CURRENT_CAM(ctrl, dp->con->num); dp 67 drivers/usb/typec/ucsi/displayport.c ret = ucsi_send_command(dp->con->ucsi, &ctrl, &cur, sizeof(cur)); dp 69 drivers/usb/typec/ucsi/displayport.c if (dp->con->ucsi->ppm->data->version > 0x0100) { dp 70 drivers/usb/typec/ucsi/displayport.c mutex_unlock(&dp->con->lock); dp 77 drivers/usb/typec/ucsi/displayport.c mutex_unlock(&dp->con->lock); dp 78 drivers/usb/typec/ucsi/displayport.c if (dp->con->port_altmode[cur] == alt) dp 89 drivers/usb/typec/ucsi/displayport.c dp->header = VDO(USB_TYPEC_DP_SID, 1, CMD_ENTER_MODE); dp 90 drivers/usb/typec/ucsi/displayport.c dp->header |= VDO_OPOS(USB_TYPEC_DP_MODE); dp 91 drivers/usb/typec/ucsi/displayport.c dp->header |= VDO_CMDT(CMDT_RSP_ACK); dp 93 drivers/usb/typec/ucsi/displayport.c dp->vdo_data = NULL; dp 94 drivers/usb/typec/ucsi/displayport.c dp->vdo_size = 1; dp 96 drivers/usb/typec/ucsi/displayport.c schedule_work(&dp->work); dp 98 drivers/usb/typec/ucsi/displayport.c mutex_unlock(&dp->con->lock); dp 105 drivers/usb/typec/ucsi/displayport.c struct ucsi_dp *dp = typec_altmode_get_drvdata(alt); dp 109 drivers/usb/typec/ucsi/displayport.c mutex_lock(&dp->con->lock); dp 111 drivers/usb/typec/ucsi/displayport.c if (!dp->override) { dp 120 drivers/usb/typec/ucsi/displayport.c ctrl.raw_cmd = UCSI_CMD_SET_NEW_CAM(dp->con->num, 0, dp->offset, 0); dp 121 drivers/usb/typec/ucsi/displayport.c ret = ucsi_send_command(dp->con->ucsi, &ctrl, NULL, 0); dp 125 drivers/usb/typec/ucsi/displayport.c dp->header = VDO(USB_TYPEC_DP_SID, 1, CMD_EXIT_MODE); dp 126 drivers/usb/typec/ucsi/displayport.c dp->header |= VDO_OPOS(USB_TYPEC_DP_MODE); dp 127 drivers/usb/typec/ucsi/displayport.c dp->header |= VDO_CMDT(CMDT_RSP_ACK); dp 129 drivers/usb/typec/ucsi/displayport.c dp->vdo_data = NULL; dp 130 drivers/usb/typec/ucsi/displayport.c dp->vdo_size = 1; dp 132 drivers/usb/typec/ucsi/displayport.c schedule_work(&dp->work); dp 135 drivers/usb/typec/ucsi/displayport.c mutex_unlock(&dp->con->lock); dp 144 drivers/usb/typec/ucsi/displayport.c static int ucsi_displayport_status_update(struct ucsi_dp *dp) dp 146 drivers/usb/typec/ucsi/displayport.c u32 cap = dp->alt->vdo; dp 148 drivers/usb/typec/ucsi/displayport.c dp->data.status = DP_STATUS_ENABLED; dp 155 drivers/usb/typec/ucsi/displayport.c dp->data.status |= DP_STATUS_CON_UFP_D; dp 158 drivers/usb/typec/ucsi/displayport.c dp->data.status |= DP_STATUS_PREFER_MULTI_FUNC; dp 160 drivers/usb/typec/ucsi/displayport.c dp->data.status |= DP_STATUS_CON_DFP_D; dp 163 drivers/usb/typec/ucsi/displayport.c dp->data.status |= DP_STATUS_PREFER_MULTI_FUNC; dp 166 drivers/usb/typec/ucsi/displayport.c dp->vdo_data = &dp->data.status; dp 167 drivers/usb/typec/ucsi/displayport.c dp->vdo_size = 2; dp 172 drivers/usb/typec/ucsi/displayport.c static int ucsi_displayport_configure(struct ucsi_dp *dp) dp 174 drivers/usb/typec/ucsi/displayport.c u32 pins = DP_CONF_GET_PIN_ASSIGN(dp->data.conf); dp 177 drivers/usb/typec/ucsi/displayport.c if (!dp->override) dp 180 drivers/usb/typec/ucsi/displayport.c ctrl.raw_cmd = UCSI_CMD_SET_NEW_CAM(dp->con->num, 1, dp->offset, pins); dp 182 drivers/usb/typec/ucsi/displayport.c return ucsi_send_command(dp->con->ucsi, &ctrl, NULL, 0); dp 188 drivers/usb/typec/ucsi/displayport.c struct ucsi_dp *dp = typec_altmode_get_drvdata(alt); dp 192 drivers/usb/typec/ucsi/displayport.c mutex_lock(&dp->con->lock); dp 194 drivers/usb/typec/ucsi/displayport.c if (!dp->override && dp->initialized) { dp 199 drivers/usb/typec/ucsi/displayport.c mutex_unlock(&dp->con->lock); dp 205 drivers/usb/typec/ucsi/displayport.c dp->header = VDO(USB_TYPEC_DP_SID, 1, cmd); dp 206 drivers/usb/typec/ucsi/displayport.c dp->header |= VDO_OPOS(USB_TYPEC_DP_MODE); dp 210 drivers/usb/typec/ucsi/displayport.c if (ucsi_displayport_status_update(dp)) dp 211 drivers/usb/typec/ucsi/displayport.c dp->header |= VDO_CMDT(CMDT_RSP_NAK); dp 213 drivers/usb/typec/ucsi/displayport.c dp->header |= VDO_CMDT(CMDT_RSP_ACK); dp 216 drivers/usb/typec/ucsi/displayport.c dp->data.conf = *data; dp 217 drivers/usb/typec/ucsi/displayport.c if (ucsi_displayport_configure(dp)) { dp 218 drivers/usb/typec/ucsi/displayport.c dp->header |= VDO_CMDT(CMDT_RSP_NAK); dp 220 drivers/usb/typec/ucsi/displayport.c dp->header |= VDO_CMDT(CMDT_RSP_ACK); dp 221 drivers/usb/typec/ucsi/displayport.c if (dp->initialized) dp 222 drivers/usb/typec/ucsi/displayport.c ucsi_altmode_update_active(dp->con); dp 224 drivers/usb/typec/ucsi/displayport.c dp->initialized = true; dp 228 drivers/usb/typec/ucsi/displayport.c dp->header |= VDO_CMDT(CMDT_RSP_ACK); dp 232 drivers/usb/typec/ucsi/displayport.c schedule_work(&dp->work); dp 238 drivers/usb/typec/ucsi/displayport.c mutex_unlock(&dp->con->lock); dp 251 drivers/usb/typec/ucsi/displayport.c struct ucsi_dp *dp = container_of(work, struct ucsi_dp, work); dp 254 drivers/usb/typec/ucsi/displayport.c mutex_lock(&dp->con->lock); dp 256 drivers/usb/typec/ucsi/displayport.c ret = typec_altmode_vdm(dp->alt, dp->header, dp 257 drivers/usb/typec/ucsi/displayport.c dp->vdo_data, dp->vdo_size); dp 259 drivers/usb/typec/ucsi/displayport.c dev_err(&dp->alt->dev, "VDM 0x%x failed\n", dp->header); dp 261 drivers/usb/typec/ucsi/displayport.c dp->vdo_data = NULL; dp 262 drivers/usb/typec/ucsi/displayport.c dp->vdo_size = 0; dp 263 drivers/usb/typec/ucsi/displayport.c dp->header = 0; dp 265 drivers/usb/typec/ucsi/displayport.c mutex_unlock(&dp->con->lock); dp 270 drivers/usb/typec/ucsi/displayport.c struct ucsi_dp *dp; dp 275 drivers/usb/typec/ucsi/displayport.c dp = typec_altmode_get_drvdata(alt); dp 276 drivers/usb/typec/ucsi/displayport.c if (!dp) dp 279 drivers/usb/typec/ucsi/displayport.c dp->data.conf = 0; dp 280 drivers/usb/typec/ucsi/displayport.c dp->data.status = 0; dp 281 drivers/usb/typec/ucsi/displayport.c dp->initialized = false; dp 291 drivers/usb/typec/ucsi/displayport.c struct ucsi_dp *dp; dp 308 drivers/usb/typec/ucsi/displayport.c dp = devm_kzalloc(&alt->dev, sizeof(*dp), GFP_KERNEL); dp 309 drivers/usb/typec/ucsi/displayport.c if (!dp) { dp 315 drivers/usb/typec/ucsi/displayport.c INIT_WORK(&dp->work, ucsi_displayport_work); dp 316 drivers/usb/typec/ucsi/displayport.c dp->override = override; dp 317 drivers/usb/typec/ucsi/displayport.c dp->offset = offset; dp 318 drivers/usb/typec/ucsi/displayport.c dp->con = con; dp 319 drivers/usb/typec/ucsi/displayport.c dp->alt = alt; dp 322 drivers/usb/typec/ucsi/displayport.c typec_altmode_set_drvdata(alt, dp); dp 50 drivers/video/fbdev/asiliantfb.c #define mm_write_ind(num, val, ap, dp) do { \ dp 51 drivers/video/fbdev/asiliantfb.c writeb((num), mmio_base + (ap)); writeb((val), mmio_base + (dp)); \ dp 91 drivers/video/fbdev/asiliantfb.c static int asiliantfb_pci_init(struct pci_dev *dp, const struct pci_device_id *); dp 538 drivers/video/fbdev/asiliantfb.c static int asiliantfb_pci_init(struct pci_dev *dp, dp 545 drivers/video/fbdev/asiliantfb.c if ((dp->resource[0].flags & IORESOURCE_MEM) == 0) dp 547 drivers/video/fbdev/asiliantfb.c addr = pci_resource_start(dp, 0); dp 548 drivers/video/fbdev/asiliantfb.c size = pci_resource_len(dp, 0); dp 554 drivers/video/fbdev/asiliantfb.c p = framebuffer_alloc(sizeof(u32) * 16, &dp->dev); dp 569 drivers/video/fbdev/asiliantfb.c pci_write_config_dword(dp, 4, 0x02800083); dp 580 drivers/video/fbdev/asiliantfb.c pci_set_drvdata(dp, p); dp 584 drivers/video/fbdev/asiliantfb.c static void asiliantfb_remove(struct pci_dev *dp) dp 586 drivers/video/fbdev/asiliantfb.c struct fb_info *p = pci_get_drvdata(dp); dp 591 drivers/video/fbdev/asiliantfb.c release_mem_region(pci_resource_start(dp, 0), pci_resource_len(dp, 0)); dp 2913 drivers/video/fbdev/aty/atyfb_base.c struct device_node *dp; dp 3031 drivers/video/fbdev/aty/atyfb_base.c dp = pci_device_to_OF_node(pdev); dp 3032 drivers/video/fbdev/aty/atyfb_base.c if (dp == of_console_device) { dp 3040 drivers/video/fbdev/aty/atyfb_base.c crtc.vxres = of_getintprop_default(dp, "width", 1024); dp 3041 drivers/video/fbdev/aty/atyfb_base.c crtc.vyres = of_getintprop_default(dp, "height", 768); dp 3042 drivers/video/fbdev/aty/atyfb_base.c var->bits_per_pixel = of_getintprop_default(dp, "depth", 8); dp 550 drivers/video/fbdev/aty/radeon_base.c struct device_node *dp = rinfo->of_node; dp 553 drivers/video/fbdev/aty/radeon_base.c if (dp == NULL) dp 555 drivers/video/fbdev/aty/radeon_base.c val = of_get_property(dp, "ATY,RefCLK", NULL); dp 563 drivers/video/fbdev/aty/radeon_base.c val = of_get_property(dp, "ATY,SCLK", NULL); dp 567 drivers/video/fbdev/aty/radeon_base.c val = of_get_property(dp, "ATY,MCLK", NULL); dp 66 drivers/video/fbdev/aty/radeon_monitor.c static int radeon_parse_montype_prop(struct device_node *dp, u8 **out_EDID, dp 77 drivers/video/fbdev/aty/radeon_monitor.c pmt = of_get_property(dp, "display-type", NULL); dp 96 drivers/video/fbdev/aty/radeon_monitor.c pedid = of_get_property(dp, propnames[i], NULL); dp 104 drivers/video/fbdev/aty/radeon_monitor.c if (pedid == NULL && dp->parent && (hdno != -1)) dp 105 drivers/video/fbdev/aty/radeon_monitor.c pedid = of_get_property(dp->parent, dp 107 drivers/video/fbdev/aty/radeon_monitor.c if (pedid == NULL && dp->parent && (hdno == 0)) dp 108 drivers/video/fbdev/aty/radeon_monitor.c pedid = of_get_property(dp->parent, "EDID", NULL); dp 122 drivers/video/fbdev/aty/radeon_monitor.c struct device_node *dp; dp 126 drivers/video/fbdev/aty/radeon_monitor.c dp = rinfo->of_node; dp 127 drivers/video/fbdev/aty/radeon_monitor.c while (dp == NULL) dp 134 drivers/video/fbdev/aty/radeon_monitor.c dp = dp->child; dp 136 drivers/video/fbdev/aty/radeon_monitor.c if (!dp) dp 138 drivers/video/fbdev/aty/radeon_monitor.c pname = of_get_property(dp, "name", NULL); dp 145 drivers/video/fbdev/aty/radeon_monitor.c int mt = radeon_parse_montype_prop(dp, out_EDID, 0); dp 153 drivers/video/fbdev/aty/radeon_monitor.c return radeon_parse_montype_prop(dp, out_EDID, 1); dp 155 drivers/video/fbdev/aty/radeon_monitor.c dp = dp->sibling; dp 160 drivers/video/fbdev/aty/radeon_monitor.c return radeon_parse_montype_prop(dp, out_EDID, -1); dp 278 drivers/video/fbdev/bw2.c struct device_node *dp = op->dev.of_node; dp 295 drivers/video/fbdev/bw2.c sbusfb_fill_var(&info->var, dp, 1); dp 296 drivers/video/fbdev/bw2.c linebytes = of_getintprop_default(dp, "linebytes", dp 309 drivers/video/fbdev/bw2.c if (!of_find_property(dp, "width", NULL)) { dp 338 drivers/video/fbdev/bw2.c dp, par->which_io, info->fix.smem_start); dp 357 drivers/video/fbdev/cg14.c struct device_node *dp) dp 359 drivers/video/fbdev/cg14.c snprintf(info->fix.id, sizeof(info->fix.id), "%pOFn", dp); dp 467 drivers/video/fbdev/cg14.c struct device_node *dp = op->dev.of_node; dp 481 drivers/video/fbdev/cg14.c sbusfb_fill_var(&info->var, dp, 8); dp 486 drivers/video/fbdev/cg14.c linebytes = of_getintprop_default(dp, "linebytes", dp 490 drivers/video/fbdev/cg14.c if (of_node_name_eq(dp->parent, "sbus") || dp 491 drivers/video/fbdev/cg14.c of_node_name_eq(dp->parent, "sbi")) { dp 547 drivers/video/fbdev/cg14.c cg14_init_fix(info, linebytes, dp); dp 556 drivers/video/fbdev/cg14.c dp, dp 248 drivers/video/fbdev/cg3.c struct device_node *dp) dp 250 drivers/video/fbdev/cg3.c snprintf(info->fix.id, sizeof(info->fix.id), "%pOFn", dp); dp 261 drivers/video/fbdev/cg3.c struct device_node *dp) dp 267 drivers/video/fbdev/cg3.c params = of_get_property(dp, "params", NULL); dp 352 drivers/video/fbdev/cg3.c struct device_node *dp = op->dev.of_node; dp 369 drivers/video/fbdev/cg3.c sbusfb_fill_var(&info->var, dp, 8); dp 373 drivers/video/fbdev/cg3.c if (of_node_name_eq(dp, "cgRDI")) dp 376 drivers/video/fbdev/cg3.c cg3_rdi_maybe_fixup_var(&info->var, dp); dp 378 drivers/video/fbdev/cg3.c linebytes = of_getintprop_default(dp, "linebytes", dp 396 drivers/video/fbdev/cg3.c if (!of_find_property(dp, "width", NULL)) { dp 408 drivers/video/fbdev/cg3.c cg3_init_fix(info, linebytes, dp); dp 417 drivers/video/fbdev/cg3.c dp, par->which_io, info->fix.smem_start); dp 743 drivers/video/fbdev/cg6.c struct device_node *dp = op->dev.of_node; dp 761 drivers/video/fbdev/cg6.c sbusfb_fill_var(&info->var, dp, 8); dp 766 drivers/video/fbdev/cg6.c linebytes = of_getintprop_default(dp, "linebytes", dp 770 drivers/video/fbdev/cg6.c dblbuf = of_getintprop_default(dp, "dblbuf", 0); dp 815 drivers/video/fbdev/cg6.c dp, info->fix.id, dp 39 drivers/video/fbdev/chipsfb.c #define write_ind(num, val, ap, dp) do { \ dp 40 drivers/video/fbdev/chipsfb.c outb((num), (ap)); outb((val), (dp)); \ dp 42 drivers/video/fbdev/chipsfb.c #define read_ind(num, var, ap, dp) do { \ dp 43 drivers/video/fbdev/chipsfb.c outb((num), (ap)); var = inb((dp)); \ dp 74 drivers/video/fbdev/chipsfb.c static int chipsfb_pci_init(struct pci_dev *dp, const struct pci_device_id *); dp 349 drivers/video/fbdev/chipsfb.c static int chipsfb_pci_init(struct pci_dev *dp, const struct pci_device_id *ent) dp 356 drivers/video/fbdev/chipsfb.c if (pci_enable_device(dp) < 0) { dp 357 drivers/video/fbdev/chipsfb.c dev_err(&dp->dev, "Cannot enable PCI device\n"); dp 361 drivers/video/fbdev/chipsfb.c if ((dp->resource[0].flags & IORESOURCE_MEM) == 0) dp 363 drivers/video/fbdev/chipsfb.c addr = pci_resource_start(dp, 0); dp 367 drivers/video/fbdev/chipsfb.c p = framebuffer_alloc(0, &dp->dev); dp 373 drivers/video/fbdev/chipsfb.c if (pci_request_region(dp, 0, "chipsfb") != 0) { dp 374 drivers/video/fbdev/chipsfb.c dev_err(&dp->dev, "Cannot request framebuffer\n"); dp 386 drivers/video/fbdev/chipsfb.c pci_read_config_word(dp, PCI_COMMAND, &cmd); dp 388 drivers/video/fbdev/chipsfb.c pci_write_config_word(dp, PCI_COMMAND, cmd); dp 406 drivers/video/fbdev/chipsfb.c dev_err(&dp->dev, "Cannot map framebuffer\n"); dp 411 drivers/video/fbdev/chipsfb.c pci_set_drvdata(dp, p); dp 416 drivers/video/fbdev/chipsfb.c dev_err(&dp->dev,"C&T 65550 framebuffer failed to register\n"); dp 420 drivers/video/fbdev/chipsfb.c dev_info(&dp->dev,"fb%d: Chips 65550 frame buffer" dp 429 drivers/video/fbdev/chipsfb.c pci_release_region(dp, 0); dp 437 drivers/video/fbdev/chipsfb.c static void chipsfb_remove(struct pci_dev *dp) dp 439 drivers/video/fbdev/chipsfb.c struct fb_info *p = pci_get_drvdata(dp); dp 446 drivers/video/fbdev/chipsfb.c pci_release_region(dp, 0); dp 140 drivers/video/fbdev/controlfb.c static int control_of_init(struct device_node *dp); dp 181 drivers/video/fbdev/controlfb.c struct device_node *dp; dp 184 drivers/video/fbdev/controlfb.c dp = of_find_node_by_name(NULL, "control"); dp 185 drivers/video/fbdev/controlfb.c if (dp && !control_of_init(dp)) dp 187 drivers/video/fbdev/controlfb.c of_node_put(dp); dp 574 drivers/video/fbdev/controlfb.c struct device_node *dp; dp 582 drivers/video/fbdev/controlfb.c dp = of_find_node_by_name(NULL, "control"); dp 583 drivers/video/fbdev/controlfb.c if (dp && !control_of_init(dp)) dp 585 drivers/video/fbdev/controlfb.c of_node_put(dp); dp 670 drivers/video/fbdev/controlfb.c static int __init control_of_init(struct device_node *dp) dp 680 drivers/video/fbdev/controlfb.c if (of_pci_address_to_resource(dp, 2, &fb_res) || dp 681 drivers/video/fbdev/controlfb.c of_pci_address_to_resource(dp, 1, ®_res)) { dp 899 drivers/video/fbdev/ffb.c struct device_node *dp = op->dev.of_node; dp 942 drivers/video/fbdev/ffb.c sbusfb_fill_var(&info->var, dp, 32); dp 948 drivers/video/fbdev/ffb.c if (of_node_name_eq(dp, "SUNW,afb")) dp 951 drivers/video/fbdev/ffb.c par->board_type = of_getintprop_default(dp, "board_type", 0); dp 1003 drivers/video/fbdev/ffb.c dp, dp 1471 drivers/video/fbdev/imsttfb.c struct device_node *dp; dp 1473 drivers/video/fbdev/imsttfb.c dp = pci_device_to_OF_node(pdev); dp 1474 drivers/video/fbdev/imsttfb.c if(dp) dp 1475 drivers/video/fbdev/imsttfb.c printk(KERN_INFO "%s: OF name %pOFn\n",__func__, dp); dp 1497 drivers/video/fbdev/imsttfb.c if (of_node_name_eq(dp, "IMS,tt128mb8") || dp 1498 drivers/video/fbdev/imsttfb.c of_node_name_eq(dp, "IMS,tt128mb8A")) dp 436 drivers/video/fbdev/leo.c leo_init_fix(struct fb_info *info, struct device_node *dp) dp 438 drivers/video/fbdev/leo.c snprintf(info->fix.id, sizeof(info->fix.id), "%pOFn", dp); dp 553 drivers/video/fbdev/leo.c struct device_node *dp = op->dev.of_node; dp 570 drivers/video/fbdev/leo.c sbusfb_fill_var(&info->var, dp, 32); dp 573 drivers/video/fbdev/leo.c linebytes = of_getintprop_default(dp, "linebytes", dp 615 drivers/video/fbdev/leo.c leo_init_fix(info, dp); dp 624 drivers/video/fbdev/leo.c dp, dp 31 drivers/video/fbdev/nvidia/nv_of.c struct device_node *parent, *dp; dp 45 drivers/video/fbdev/nvidia/nv_of.c for (dp = NULL; dp 46 drivers/video/fbdev/nvidia/nv_of.c (dp = of_get_next_child(parent, dp)) != NULL;) { dp 47 drivers/video/fbdev/nvidia/nv_of.c pname = of_get_property(dp, "name", NULL); dp 54 drivers/video/fbdev/nvidia/nv_of.c pedid = of_get_property(dp, dp 59 drivers/video/fbdev/nvidia/nv_of.c of_node_put(dp); dp 322 drivers/video/fbdev/offb.c static void offb_init_palette_hacks(struct fb_info *info, struct device_node *dp, dp 327 drivers/video/fbdev/offb.c if (of_node_name_prefix(dp, "ATY,Rage128")) { dp 328 drivers/video/fbdev/offb.c par->cmap_adr = offb_map_reg(dp, 2, 0, 0x1fff); dp 331 drivers/video/fbdev/offb.c } else if (of_node_name_prefix(dp, "ATY,RageM3pA") || dp 332 drivers/video/fbdev/offb.c of_node_name_prefix(dp, "ATY,RageM3p12A")) { dp 333 drivers/video/fbdev/offb.c par->cmap_adr = offb_map_reg(dp, 2, 0, 0x1fff); dp 336 drivers/video/fbdev/offb.c } else if (of_node_name_prefix(dp, "ATY,RageM3pB")) { dp 337 drivers/video/fbdev/offb.c par->cmap_adr = offb_map_reg(dp, 2, 0, 0x1fff); dp 340 drivers/video/fbdev/offb.c } else if (of_node_name_prefix(dp, "ATY,Rage6")) { dp 341 drivers/video/fbdev/offb.c par->cmap_adr = offb_map_reg(dp, 1, 0, 0x1fff); dp 344 drivers/video/fbdev/offb.c } else if (of_node_name_prefix(dp, "ATY,")) { dp 350 drivers/video/fbdev/offb.c } else if (dp && (of_device_is_compatible(dp, "pci1014,b7") || dp 351 drivers/video/fbdev/offb.c of_device_is_compatible(dp, "pci1014,21c"))) { dp 352 drivers/video/fbdev/offb.c par->cmap_adr = offb_map_reg(dp, 0, 0x6000, 0x1000); dp 355 drivers/video/fbdev/offb.c } else if (of_node_name_prefix(dp, "vga,Display-")) { dp 357 drivers/video/fbdev/offb.c struct device_node *pciparent = of_get_parent(dp); dp 370 drivers/video/fbdev/offb.c } else if (dp && of_device_is_compatible(dp, "qemu,std-vga")) { dp 376 drivers/video/fbdev/offb.c u64 io_addr = of_translate_address(dp, io_of_addr); dp 392 drivers/video/fbdev/offb.c int foreign_endian, struct device_node *dp) dp 408 drivers/video/fbdev/offb.c printk(KERN_ERR "%pOF: can't use depth = %d\n", dp, depth); dp 429 drivers/video/fbdev/offb.c snprintf(fix->id, sizeof(fix->id), "OFfb %pOFn", dp); dp 443 drivers/video/fbdev/offb.c offb_init_palette_hacks(info, dp, address); dp 524 drivers/video/fbdev/offb.c fb_info(info, "Open Firmware frame buffer device on %pOF\n", dp); dp 538 drivers/video/fbdev/offb.c static void __init offb_init_nodriver(struct device_node *dp, int no_real_node) dp 550 drivers/video/fbdev/offb.c if (of_get_property(dp, "little-endian", NULL)) dp 553 drivers/video/fbdev/offb.c if (of_get_property(dp, "big-endian", NULL)) dp 557 drivers/video/fbdev/offb.c pp = of_get_property(dp, "linux,bootx-depth", &len); dp 559 drivers/video/fbdev/offb.c pp = of_get_property(dp, "depth", &len); dp 563 drivers/video/fbdev/offb.c pp = of_get_property(dp, "linux,bootx-width", &len); dp 565 drivers/video/fbdev/offb.c pp = of_get_property(dp, "width", &len); dp 569 drivers/video/fbdev/offb.c pp = of_get_property(dp, "linux,bootx-height", &len); dp 571 drivers/video/fbdev/offb.c pp = of_get_property(dp, "height", &len); dp 575 drivers/video/fbdev/offb.c pp = of_get_property(dp, "linux,bootx-linebytes", &len); dp 577 drivers/video/fbdev/offb.c pp = of_get_property(dp, "linebytes", &len); dp 595 drivers/video/fbdev/offb.c up = of_get_property(dp, "linux,bootx-addr", &len); dp 597 drivers/video/fbdev/offb.c up = of_get_property(dp, "address", &len); dp 605 drivers/video/fbdev/offb.c for (i = 0; (addrp = of_get_address(dp, i, &asize, &flags)) dp 613 drivers/video/fbdev/offb.c rstart = of_translate_address(dp, addrp); dp 640 drivers/video/fbdev/offb.c vidp = of_get_property(dp, "vendor-id", NULL); dp 641 drivers/video/fbdev/offb.c didp = of_get_property(dp, "device-id", NULL); dp 651 drivers/video/fbdev/offb.c if (of_node_name_eq(dp, "valkyrie")) dp 655 drivers/video/fbdev/offb.c foreign_endian, no_real_node ? NULL : dp); dp 661 drivers/video/fbdev/offb.c struct device_node *dp = NULL, *boot_disp = NULL; dp 677 drivers/video/fbdev/offb.c for_each_node_by_type(dp, "display") { dp 678 drivers/video/fbdev/offb.c if (of_get_property(dp, "linux,opened", NULL) && dp 679 drivers/video/fbdev/offb.c of_get_property(dp, "linux,boot-display", NULL)) { dp 680 drivers/video/fbdev/offb.c boot_disp = dp; dp 681 drivers/video/fbdev/offb.c offb_init_nodriver(dp, 0); dp 684 drivers/video/fbdev/offb.c for_each_node_by_type(dp, "display") { dp 685 drivers/video/fbdev/offb.c if (of_get_property(dp, "linux,opened", NULL) && dp 686 drivers/video/fbdev/offb.c dp != boot_disp) dp 687 drivers/video/fbdev/offb.c offb_init_nodriver(dp, 0); dp 626 drivers/video/fbdev/omap2/omapfb/dss/dss.c enum omap_display_type dp; dp 627 drivers/video/fbdev/omap2/omapfb/dss/dss.c dp = dss_feat_get_supported_displays(OMAP_DSS_CHANNEL_DIGIT); dp 630 drivers/video/fbdev/omap2/omapfb/dss/dss.c WARN_ON((src == DSS_VENC_TV_CLK) && !(dp & OMAP_DISPLAY_TYPE_VENC)); dp 631 drivers/video/fbdev/omap2/omapfb/dss/dss.c WARN_ON((src == DSS_HDMI_M_PCLK) && !(dp & OMAP_DISPLAY_TYPE_HDMI)); dp 634 drivers/video/fbdev/omap2/omapfb/dss/dss.c if ((dp & OMAP_DISPLAY_TYPE_VENC) && (dp & OMAP_DISPLAY_TYPE_HDMI)) dp 241 drivers/video/fbdev/p9100.c static void p9100_init_fix(struct fb_info *info, int linebytes, struct device_node *dp) dp 243 drivers/video/fbdev/p9100.c snprintf(info->fix.id, sizeof(info->fix.id), "%pOFn", dp); dp 255 drivers/video/fbdev/p9100.c struct device_node *dp = op->dev.of_node; dp 273 drivers/video/fbdev/p9100.c sbusfb_fill_var(&info->var, dp, 8); dp 278 drivers/video/fbdev/p9100.c linebytes = of_getintprop_default(dp, "linebytes", info->var.xres); dp 298 drivers/video/fbdev/p9100.c p9100_init_fix(info, linebytes, dp); dp 309 drivers/video/fbdev/p9100.c dp, dp 532 drivers/video/fbdev/platinumfb.c struct device_node *dp = odev->dev.of_node; dp 546 drivers/video/fbdev/platinumfb.c if (of_address_to_resource(dp, 0, &pinfo->rsrc_reg) || dp 547 drivers/video/fbdev/platinumfb.c of_address_to_resource(dp, 1, &pinfo->rsrc_fb)) { dp 1732 drivers/video/fbdev/riva/fbdev.c struct device_node *dp; dp 1740 drivers/video/fbdev/riva/fbdev.c dp = pci_device_to_OF_node(pd); dp 1741 drivers/video/fbdev/riva/fbdev.c for (; dp != NULL; dp = dp->child) { dp 1742 drivers/video/fbdev/riva/fbdev.c disptype = of_get_property(dp, "display-type", NULL); dp 1748 drivers/video/fbdev/riva/fbdev.c pedid = of_get_property(dp, propnames[i], NULL); dp 20 drivers/video/fbdev/sbuslib.c void sbusfb_fill_var(struct fb_var_screeninfo *var, struct device_node *dp, dp 25 drivers/video/fbdev/sbuslib.c var->xres = of_getintprop_default(dp, "width", 1152); dp 26 drivers/video/fbdev/sbuslib.c var->yres = of_getintprop_default(dp, "height", 900); dp 16 drivers/video/fbdev/sbuslib.h struct device_node *dp, int bpp); dp 117 drivers/video/fbdev/sunxvr1000.c struct device_node *dp = op->dev.of_node; dp 130 drivers/video/fbdev/sunxvr1000.c gp->of_node = dp; dp 153 drivers/video/fbdev/sunxvr1000.c printk("gfb: Found device at %pOF\n", dp); dp 158 drivers/video/fbdev/sunxvr1000.c dp); dp 368 drivers/video/fbdev/tcx.c struct device_node *dp = op->dev.of_node; dp 383 drivers/video/fbdev/tcx.c (of_find_property(dp, "tcx-8-bit", NULL) != NULL); dp 385 drivers/video/fbdev/tcx.c sbusfb_fill_var(&info->var, dp, 8); dp 390 drivers/video/fbdev/tcx.c linebytes = of_getintprop_default(dp, "linebytes", dp 472 drivers/video/fbdev/tcx.c dp, dp 330 drivers/video/fbdev/valkyriefb.c struct device_node *dp; dp 333 drivers/video/fbdev/valkyriefb.c dp = of_find_node_by_name(NULL, "valkyrie"); dp 334 drivers/video/fbdev/valkyriefb.c if (dp == 0) dp 337 drivers/video/fbdev/valkyriefb.c if (of_address_to_resource(dp, 0, &r)) { dp 95 drivers/xen/xen-acpi-processor.c set_xen_guest_handle(dst_cx->dp, NULL); dp 557 fs/afs/cell.c char keyname[4 + AFS_MAXCELLNAME + 1], *cp, *dp; dp 561 fs/afs/cell.c dp = keyname + 4; dp 564 fs/afs/cell.c *dp++ = tolower(*cp); dp 551 fs/binfmt_misc.c char *dp = page; dp 562 fs/binfmt_misc.c dp += sprintf(dp, "%s\ninterpreter %s\n", status, e->interpreter); dp 565 fs/binfmt_misc.c dp += sprintf(dp, "flags: "); dp 567 fs/binfmt_misc.c *dp++ = 'P'; dp 569 fs/binfmt_misc.c *dp++ = 'O'; dp 571 fs/binfmt_misc.c *dp++ = 'C'; dp 573 fs/binfmt_misc.c *dp++ = 'F'; dp 574 fs/binfmt_misc.c *dp++ = '\n'; dp 577 fs/binfmt_misc.c sprintf(dp, "extension .%s\n", e->magic); dp 579 fs/binfmt_misc.c dp += sprintf(dp, "offset %i\nmagic ", e->offset); dp 580 fs/binfmt_misc.c dp = bin2hex(dp, e->magic, e->size); dp 582 fs/binfmt_misc.c dp += sprintf(dp, "\nmask "); dp 583 fs/binfmt_misc.c dp = bin2hex(dp, e->mask, e->size); dp 585 fs/binfmt_misc.c *dp++ = '\n'; dp 586 fs/binfmt_misc.c *dp = '\0'; dp 104 fs/cifs/cifs_spnego.c char *description, *dp; dp 128 fs/cifs/cifs_spnego.c dp = description; dp 131 fs/cifs/cifs_spnego.c sprintf(dp, "ver=0x%x;host=%s;", CIFS_SPNEGO_UPCALL_VERSION, dp 133 fs/cifs/cifs_spnego.c dp = description + strlen(description); dp 137 fs/cifs/cifs_spnego.c sprintf(dp, "ip4=%pI4", &sa->sin_addr); dp 139 fs/cifs/cifs_spnego.c sprintf(dp, "ip6=%pI6", &sa6->sin6_addr); dp 143 fs/cifs/cifs_spnego.c dp = description + strlen(description); dp 147 fs/cifs/cifs_spnego.c sprintf(dp, ";sec=krb5"); dp 149 fs/cifs/cifs_spnego.c sprintf(dp, ";sec=mskrb5"); dp 152 fs/cifs/cifs_spnego.c sprintf(dp, ";sec=krb5"); dp 155 fs/cifs/cifs_spnego.c dp = description + strlen(description); dp 156 fs/cifs/cifs_spnego.c sprintf(dp, ";uid=0x%x", dp 159 fs/cifs/cifs_spnego.c dp = description + strlen(description); dp 160 fs/cifs/cifs_spnego.c sprintf(dp, ";creduid=0x%x", dp 164 fs/cifs/cifs_spnego.c dp = description + strlen(description); dp 165 fs/cifs/cifs_spnego.c sprintf(dp, ";user=%s", sesInfo->user_name); dp 168 fs/cifs/cifs_spnego.c dp = description + strlen(description); dp 169 fs/cifs/cifs_spnego.c sprintf(dp, ";pid=0x%x", current->pid); dp 411 fs/ext4/namei.c struct ext4_dir_entry *dp; dp 418 fs/ext4/namei.c dp = (struct ext4_dir_entry *)(((void *)dirent) + 12); dp 419 fs/ext4/namei.c if (le16_to_cpu(dp->rec_len) != dp 422 fs/ext4/namei.c root = (struct dx_root_info *)(((void *)dp + 12)); dp 1620 fs/f2fs/segment.c struct discard_policy dp; dp 1627 fs/f2fs/segment.c __init_discard_policy(sbi, &dp, DPOLICY_FSTRIM, 1); dp 1628 fs/f2fs/segment.c discard_blks = __wait_discard_cmd_range(sbi, &dp, 0, UINT_MAX); dp 1629 fs/f2fs/segment.c __init_discard_policy(sbi, &dp, DPOLICY_UMOUNT, 1); dp 1630 fs/f2fs/segment.c discard_blks += __wait_discard_cmd_range(sbi, &dp, 0, UINT_MAX); dp 81 fs/freevxfs/vxfs_lookup.c vxfs_find_entry(struct inode *ip, struct dentry *dp, struct page **ppp) dp 84 fs/freevxfs/vxfs_lookup.c const char *name = dp->d_name.name; dp 85 fs/freevxfs/vxfs_lookup.c int namelen = dp->d_name.len; dp 156 fs/freevxfs/vxfs_lookup.c vxfs_inode_by_name(struct inode *dip, struct dentry *dp) dp 162 fs/freevxfs/vxfs_lookup.c de = vxfs_find_entry(dip, dp, &pp); dp 187 fs/freevxfs/vxfs_lookup.c vxfs_lookup(struct inode *dip, struct dentry *dp, unsigned int flags) dp 192 fs/freevxfs/vxfs_lookup.c if (dp->d_name.len > VXFS_NAMELEN) dp 195 fs/freevxfs/vxfs_lookup.c ino = vxfs_inode_by_name(dip, dp); dp 198 fs/freevxfs/vxfs_lookup.c return d_splice_alias(ip, dp); dp 207 fs/freevxfs/vxfs_super.c static int vxfs_fill_super(struct super_block *sbp, void *dp, int silent) dp 64 fs/jfs/jfs_dmap.c static void dbAllocBits(struct bmap * bmp, struct dmap * dp, s64 blkno, dp 73 fs/jfs/jfs_dmap.c static int dbAllocNext(struct bmap * bmp, struct dmap * dp, s64 blkno, dp 75 fs/jfs/jfs_dmap.c static int dbAllocNear(struct bmap * bmp, struct dmap * dp, s64 blkno, dp 78 fs/jfs/jfs_dmap.c static int dbAllocDmap(struct bmap * bmp, struct dmap * dp, s64 blkno, dp 80 fs/jfs/jfs_dmap.c static int dbAllocDmapLev(struct bmap * bmp, struct dmap * dp, int nblocks, dp 91 fs/jfs/jfs_dmap.c static int dbFreeBits(struct bmap * bmp, struct dmap * dp, s64 blkno, dp 93 fs/jfs/jfs_dmap.c static int dbFreeDmap(struct bmap * bmp, struct dmap * dp, s64 blkno, dp 101 fs/jfs/jfs_dmap.c static int dbAllocDmapBU(struct bmap * bmp, struct dmap * dp, s64 blkno, dp 103 fs/jfs/jfs_dmap.c static int dbInitDmap(struct dmap * dp, s64 blkno, int nblocks); dp 104 fs/jfs/jfs_dmap.c static int dbInitDmapTree(struct dmap * dp); dp 321 fs/jfs/jfs_dmap.c struct dmap *dp; dp 364 fs/jfs/jfs_dmap.c dp = (struct dmap *) mp->data; dp 372 fs/jfs/jfs_dmap.c if ((rc = dbFreeDmap(bmp, dp, blkno, nb))) { dp 419 fs/jfs/jfs_dmap.c struct dmap *dp; dp 458 fs/jfs/jfs_dmap.c dp = (struct dmap *) mp->data; dp 491 fs/jfs/jfs_dmap.c dp->pmap[word] &= dp 494 fs/jfs/jfs_dmap.c dp->pmap[word] |= dp 510 fs/jfs/jfs_dmap.c memset(&dp->pmap[word], 0, dp 513 fs/jfs/jfs_dmap.c memset(&dp->pmap[word], (int) ONES, dp 694 fs/jfs/jfs_dmap.c struct dmap *dp; dp 773 fs/jfs/jfs_dmap.c dp = (struct dmap *) mp->data; dp 778 fs/jfs/jfs_dmap.c if ((rc = dbAllocNext(bmp, dp, blkno, (int) nblocks)) dp 805 fs/jfs/jfs_dmap.c dbAllocNear(bmp, dp, blkno, (int) nblocks, l2nb, results)) dp 817 fs/jfs/jfs_dmap.c if ((rc = dbAllocDmapLev(bmp, dp, (int) nblocks, l2nb, results)) dp 885 fs/jfs/jfs_dmap.c struct dmap *dp; dp 916 fs/jfs/jfs_dmap.c dp = (struct dmap *) mp->data; dp 919 fs/jfs/jfs_dmap.c rc = dbAllocNext(bmp, dp, blkno, nblocks); dp 1019 fs/jfs/jfs_dmap.c struct dmap *dp; dp 1073 fs/jfs/jfs_dmap.c dp = (struct dmap *) mp->data; dp 1078 fs/jfs/jfs_dmap.c rc = dbAllocNext(bmp, dp, extblkno, (int) addnblocks); dp 1112 fs/jfs/jfs_dmap.c static int dbAllocNext(struct bmap * bmp, struct dmap * dp, s64 blkno, dp 1120 fs/jfs/jfs_dmap.c if (dp->tree.leafidx != cpu_to_le32(LEAFIND)) { dp 1127 fs/jfs/jfs_dmap.c leaf = dp->tree.stree + le32_to_cpu(dp->tree.leafidx); dp 1173 fs/jfs/jfs_dmap.c if ((mask & ~le32_to_cpu(dp->wmap[word])) != mask) dp 1212 fs/jfs/jfs_dmap.c return (dbAllocDmap(bmp, dp, blkno, nblocks)); dp 1245 fs/jfs/jfs_dmap.c struct dmap * dp, s64 blkno, int nblocks, int l2nb, s64 * results) dp 1250 fs/jfs/jfs_dmap.c if (dp->tree.leafidx != cpu_to_le32(LEAFIND)) { dp 1255 fs/jfs/jfs_dmap.c leaf = dp->tree.stree + le32_to_cpu(dp->tree.leafidx); dp 1275 fs/jfs/jfs_dmap.c blkno = le64_to_cpu(dp->start) + (word << L2DBWORD); dp 1284 fs/jfs/jfs_dmap.c dbFindBits(le32_to_cpu(dp->wmap[word]), l2nb); dp 1288 fs/jfs/jfs_dmap.c if ((rc = dbAllocDmap(bmp, dp, blkno, nblocks)) == 0) dp 1838 fs/jfs/jfs_dmap.c struct dmap *dp; dp 1849 fs/jfs/jfs_dmap.c dp = (struct dmap *) mp->data; dp 1853 fs/jfs/jfs_dmap.c rc = dbAllocDmapLev(bmp, dp, (int) nblocks, l2nb, results); dp 1878 fs/jfs/jfs_dmap.c dp = (struct dmap *) mp->data; dp 1882 fs/jfs/jfs_dmap.c if (dp->tree.stree[ROOT] != L2BPERDMAP) { dp 1896 fs/jfs/jfs_dmap.c if ((rc = dbAllocDmap(bmp, dp, b, nb))) { dp 1935 fs/jfs/jfs_dmap.c dp = (struct dmap *) mp->data; dp 1939 fs/jfs/jfs_dmap.c if (dbFreeDmap(bmp, dp, b, BPERDMAP)) { dp 1985 fs/jfs/jfs_dmap.c struct dmap * dp, int nblocks, int l2nb, s64 * results) dp 1997 fs/jfs/jfs_dmap.c if (dbFindLeaf((dmtree_t *) & dp->tree, l2nb, &leafidx)) dp 2003 fs/jfs/jfs_dmap.c blkno = le64_to_cpu(dp->start) + (leafidx << L2DBWORD); dp 2009 fs/jfs/jfs_dmap.c if (dp->tree.stree[leafidx + LEAFIND] < BUDMIN) dp 2010 fs/jfs/jfs_dmap.c blkno += dbFindBits(le32_to_cpu(dp->wmap[leafidx]), l2nb); dp 2013 fs/jfs/jfs_dmap.c if ((rc = dbAllocDmap(bmp, dp, blkno, nblocks)) == 0) dp 2047 fs/jfs/jfs_dmap.c static int dbAllocDmap(struct bmap * bmp, struct dmap * dp, s64 blkno, dp 2056 fs/jfs/jfs_dmap.c oldroot = dp->tree.stree[ROOT]; dp 2059 fs/jfs/jfs_dmap.c dbAllocBits(bmp, dp, blkno, nblocks); dp 2062 fs/jfs/jfs_dmap.c if (dp->tree.stree[ROOT] == oldroot) dp 2069 fs/jfs/jfs_dmap.c if ((rc = dbAdjCtl(bmp, blkno, dp->tree.stree[ROOT], 1, 0))) dp 2070 fs/jfs/jfs_dmap.c dbFreeBits(bmp, dp, blkno, nblocks); dp 2102 fs/jfs/jfs_dmap.c static int dbFreeDmap(struct bmap * bmp, struct dmap * dp, s64 blkno, dp 2111 fs/jfs/jfs_dmap.c oldroot = dp->tree.stree[ROOT]; dp 2114 fs/jfs/jfs_dmap.c rc = dbFreeBits(bmp, dp, blkno, nblocks); dp 2117 fs/jfs/jfs_dmap.c if (rc || (dp->tree.stree[ROOT] == oldroot)) dp 2124 fs/jfs/jfs_dmap.c if ((rc = dbAdjCtl(bmp, blkno, dp->tree.stree[ROOT], 0, 0))) { dp 2132 fs/jfs/jfs_dmap.c if (dp->tree.stree[word] == NOFREE) dp 2133 fs/jfs/jfs_dmap.c dbBackSplit((dmtree_t *) & dp->tree, word); dp 2135 fs/jfs/jfs_dmap.c dbAllocBits(bmp, dp, blkno, nblocks); dp 2164 fs/jfs/jfs_dmap.c static void dbAllocBits(struct bmap * bmp, struct dmap * dp, s64 blkno, dp 2168 fs/jfs/jfs_dmap.c dmtree_t *tp = (dmtree_t *) & dp->tree; dp 2173 fs/jfs/jfs_dmap.c leaf = dp->tree.stree + LEAFIND; dp 2210 fs/jfs/jfs_dmap.c dp->wmap[word] |= cpu_to_le32(ONES << (DBWORD - nb) dp 2219 fs/jfs/jfs_dmap.c dbMaxBud((u8 *) & dp->wmap[word])); dp 2229 fs/jfs/jfs_dmap.c memset(&dp->wmap[word], (int) ONES, nwords * 4); dp 2269 fs/jfs/jfs_dmap.c le32_add_cpu(&dp->nfree, -nblocks); dp 2311 fs/jfs/jfs_dmap.c static int dbFreeBits(struct bmap * bmp, struct dmap * dp, s64 blkno, dp 2315 fs/jfs/jfs_dmap.c dmtree_t *tp = (dmtree_t *) & dp->tree; dp 2359 fs/jfs/jfs_dmap.c dp->wmap[word] &= dp 2366 fs/jfs/jfs_dmap.c dbMaxBud((u8 *) & dp->wmap[word])); dp 2377 fs/jfs/jfs_dmap.c memset(&dp->wmap[word], 0, nwords * 4); dp 2413 fs/jfs/jfs_dmap.c le32_add_cpu(&dp->nfree, nblocks); dp 3198 fs/jfs/jfs_dmap.c struct dmap *dp; dp 3226 fs/jfs/jfs_dmap.c dp = (struct dmap *) mp->data; dp 3234 fs/jfs/jfs_dmap.c if ((rc = dbAllocDmapBU(bmp, dp, blkno, nb))) { dp 3250 fs/jfs/jfs_dmap.c static int dbAllocDmapBU(struct bmap * bmp, struct dmap * dp, s64 blkno, dp 3256 fs/jfs/jfs_dmap.c struct dmaptree *tp = (struct dmaptree *) & dp->tree; dp 3298 fs/jfs/jfs_dmap.c dp->wmap[word] |= cpu_to_le32(ONES << (DBWORD - nb) dp 3309 fs/jfs/jfs_dmap.c memset(&dp->wmap[word], (int) ONES, nwords * 4); dp 3318 fs/jfs/jfs_dmap.c le32_add_cpu(&dp->nfree, -nblocks); dp 3321 fs/jfs/jfs_dmap.c dbInitDmapTree(dp); dp 3348 fs/jfs/jfs_dmap.c dbFreeBits(bmp, dp, blkno, nblocks); dp 3380 fs/jfs/jfs_dmap.c struct dmap *dp; dp 3557 fs/jfs/jfs_dmap.c dp = (struct dmap *) mp->data; dp 3558 fs/jfs/jfs_dmap.c *l0leaf = dbInitDmap(dp, blkno, n); dp 3561 fs/jfs/jfs_dmap.c agno = le64_to_cpu(dp->start) >> l2agsize; dp 3734 fs/jfs/jfs_dmap.c static int dbInitDmap(struct dmap * dp, s64 Blkno, int nblocks) dp 3742 fs/jfs/jfs_dmap.c dp->nblocks = dp->nfree = cpu_to_le32(nblocks); dp 3743 fs/jfs/jfs_dmap.c dp->start = cpu_to_le64(Blkno); dp 3746 fs/jfs/jfs_dmap.c memset(&dp->wmap[0], 0, LPERDMAP * 4); dp 3747 fs/jfs/jfs_dmap.c memset(&dp->pmap[0], 0, LPERDMAP * 4); dp 3751 fs/jfs/jfs_dmap.c le32_add_cpu(&dp->nblocks, nblocks); dp 3752 fs/jfs/jfs_dmap.c le32_add_cpu(&dp->nfree, nblocks); dp 3772 fs/jfs/jfs_dmap.c dp->wmap[w] &= cpu_to_le32(~(ONES << (DBWORD - nb) dp 3774 fs/jfs/jfs_dmap.c dp->pmap[w] &= cpu_to_le32(~(ONES << (DBWORD - nb) dp 3782 fs/jfs/jfs_dmap.c memset(&dp->wmap[w], 0, nw * 4); dp 3783 fs/jfs/jfs_dmap.c memset(&dp->pmap[w], 0, nw * 4); dp 3806 fs/jfs/jfs_dmap.c dp->wmap[w] = dp->pmap[w] = cpu_to_le32(ONES >> b); dp 3812 fs/jfs/jfs_dmap.c dp->pmap[i] = dp->wmap[i] = cpu_to_le32(ONES); dp 3818 fs/jfs/jfs_dmap.c return (dbInitDmapTree(dp)); dp 3836 fs/jfs/jfs_dmap.c static int dbInitDmapTree(struct dmap * dp) dp 3843 fs/jfs/jfs_dmap.c tp = &dp->tree; dp 3856 fs/jfs/jfs_dmap.c *cp++ = dbMaxBud((u8 *) & dp->wmap[i]); dp 175 fs/jfs/jfs_dtree.c static void dtMoveEntry(dtpage_t * sp, int si, dtpage_t * dp, dp 4051 fs/jfs/jfs_dtree.c static void dtMoveEntry(dtpage_t * sp, int si, dtpage_t * dp, dp 4069 fs/jfs/jfs_dtree.c dstbl = (s8 *) & dp->slot[dp->header.stblindex]; dp 4071 fs/jfs/jfs_dtree.c dsi = dp->header.freelist; /* first (whole page) free slot */ dp 4113 fs/jfs/jfs_dtree.c h = d = &dp->slot[dsi]; dp 4211 fs/jfs/jfs_dtree.c if (dp->header.flag & BT_LEAF) dp 4234 fs/jfs/jfs_dtree.c dp->header.nextindex = di; dp 4236 fs/jfs/jfs_dtree.c dp->header.freelist = dsi; dp 4237 fs/jfs/jfs_dtree.c dp->header.freecnt -= nd; dp 296 fs/jfs/jfs_imap.c struct dinode *dp; dp 372 fs/jfs/jfs_imap.c dp = (struct dinode *) mp->data; dp 373 fs/jfs/jfs_imap.c dp += rel_inode; dp 375 fs/jfs/jfs_imap.c if (ip->i_ino != le32_to_cpu(dp->di_number)) { dp 378 fs/jfs/jfs_imap.c } else if (le32_to_cpu(dp->di_nlink) == 0) dp 382 fs/jfs/jfs_imap.c rc = copy_from_dinode(dp, ip); dp 420 fs/jfs/jfs_imap.c struct dinode *dp; dp 453 fs/jfs/jfs_imap.c dp = (struct dinode *) (mp->data); dp 454 fs/jfs/jfs_imap.c dp += inum % 8; /* 8 inodes per 4K page */ dp 457 fs/jfs/jfs_imap.c if ((copy_from_dinode(dp, ip)) != 0) { dp 474 fs/jfs/jfs_imap.c sbi->gengen = le32_to_cpu(dp->di_gengen); dp 475 fs/jfs/jfs_imap.c sbi->inostamp = le32_to_cpu(dp->di_inostamp); dp 502 fs/jfs/jfs_imap.c struct dinode *dp; dp 523 fs/jfs/jfs_imap.c dp = (struct dinode *) (mp->data); dp 524 fs/jfs/jfs_imap.c dp += inum % 8; /* 8 inodes per 4K page */ dp 527 fs/jfs/jfs_imap.c copy_to_dinode(dp, ip); dp 528 fs/jfs/jfs_imap.c memcpy(&dp->di_xtroot, &JFS_IP(ip)->i_xtroot, 288); dp 531 fs/jfs/jfs_imap.c dp->di_gengen = cpu_to_le32(sbi->gengen); dp 585 fs/jfs/jfs_imap.c struct dinode *dp; dp 642 fs/jfs/jfs_imap.c dp = (struct dinode *) mp->data; dp 643 fs/jfs/jfs_imap.c dp += rel_inode; dp 688 fs/jfs/jfs_imap.c xp = (xtpage_t *) &dp->di_dirtable; dp 724 fs/jfs/jfs_imap.c xp = &dp->di_xtroot; dp 748 fs/jfs/jfs_imap.c xp = (dtpage_t *) & dp->di_dtroot; dp 766 fs/jfs/jfs_imap.c memcpy(&dp->di_fastsymlink, jfs_ip->i_inline, IDATASIZE); dp 777 fs/jfs/jfs_imap.c memcpy(&dp->di_inlineea, jfs_ip->i_inline_ea, INODESLOTSIZE); dp 788 fs/jfs/jfs_imap.c copy_to_dinode(dp, ip); dp 791 fs/jfs/jfs_imap.c memcpy(&dp->di_dirtable, &jfs_ip->i_dirtable, 96); dp 2160 fs/jfs/jfs_imap.c struct dinode *dp; dp 2272 fs/jfs/jfs_imap.c dp = (struct dinode *) dmp->data; dp 2277 fs/jfs/jfs_imap.c for (j = 0; j < INOSPERPAGE; j++, dp++, ino++) { dp 2278 fs/jfs/jfs_imap.c dp->di_inostamp = cpu_to_le32(sbi->inostamp); dp 2279 fs/jfs/jfs_imap.c dp->di_number = cpu_to_le32(ino); dp 2280 fs/jfs/jfs_imap.c dp->di_fileset = cpu_to_le32(FILESYSTEM_I); dp 2281 fs/jfs/jfs_imap.c dp->di_mode = 0; dp 2282 fs/jfs/jfs_imap.c dp->di_nlink = 0; dp 2283 fs/jfs/jfs_imap.c PXDaddress(&(dp->di_ixpxd), blkno); dp 2284 fs/jfs/jfs_imap.c PXDlength(&(dp->di_ixpxd), imap->im_nbperiext); dp 314 fs/nfsd/nfs4callback.c const struct nfs4_delegation *dp, dp 320 fs/nfsd/nfs4callback.c encode_stateid4(xdr, &dp->dl_stid.sc_stateid); dp 325 fs/nfsd/nfs4callback.c encode_nfs_fh4(xdr, &dp->dl_stid.sc_file->fi_fhandle); dp 472 fs/nfsd/nfs4callback.c const struct nfs4_delegation *dp = cb_to_delegation(cb); dp 480 fs/nfsd/nfs4callback.c encode_cb_recall4args(xdr, dp, &hdr); dp 871 fs/nfsd/nfs4state.c struct nfs4_delegation *dp; dp 880 fs/nfsd/nfs4state.c dp = delegstateid(nfs4_alloc_stid(clp, deleg_slab, nfs4_free_deleg)); dp 881 fs/nfsd/nfs4state.c if (dp == NULL) dp 889 fs/nfsd/nfs4state.c dp->dl_stid.sc_stateid.si_generation = 1; dp 890 fs/nfsd/nfs4state.c INIT_LIST_HEAD(&dp->dl_perfile); dp 891 fs/nfsd/nfs4state.c INIT_LIST_HEAD(&dp->dl_perclnt); dp 892 fs/nfsd/nfs4state.c INIT_LIST_HEAD(&dp->dl_recall_lru); dp 893 fs/nfsd/nfs4state.c dp->dl_clnt_odstate = odstate; dp 895 fs/nfsd/nfs4state.c dp->dl_type = NFS4_OPEN_DELEGATE_READ; dp 896 fs/nfsd/nfs4state.c dp->dl_retries = 1; dp 897 fs/nfsd/nfs4state.c nfsd4_init_cb(&dp->dl_recall, dp->dl_stid.sc_client, dp 900 fs/nfsd/nfs4state.c dp->dl_stid.sc_file = fp; dp 901 fs/nfsd/nfs4state.c return dp; dp 951 fs/nfsd/nfs4state.c static void nfs4_unlock_deleg_lease(struct nfs4_delegation *dp) dp 953 fs/nfsd/nfs4state.c struct nfs4_file *fp = dp->dl_stid.sc_file; dp 958 fs/nfsd/nfs4state.c vfs_setlease(nf->nf_file, F_UNLCK, NULL, (void **)&dp); dp 962 fs/nfsd/nfs4state.c static void destroy_unhashed_deleg(struct nfs4_delegation *dp) dp 964 fs/nfsd/nfs4state.c put_clnt_odstate(dp->dl_clnt_odstate); dp 965 fs/nfsd/nfs4state.c nfs4_unlock_deleg_lease(dp); dp 966 fs/nfsd/nfs4state.c nfs4_put_stid(&dp->dl_stid); dp 1015 fs/nfsd/nfs4state.c hash_delegation_locked(struct nfs4_delegation *dp, struct nfs4_file *fp) dp 1017 fs/nfsd/nfs4state.c struct nfs4_client *clp = dp->dl_stid.sc_client; dp 1024 fs/nfsd/nfs4state.c refcount_inc(&dp->dl_stid.sc_count); dp 1025 fs/nfsd/nfs4state.c dp->dl_stid.sc_type = NFS4_DELEG_STID; dp 1026 fs/nfsd/nfs4state.c list_add(&dp->dl_perfile, &fp->fi_delegations); dp 1027 fs/nfsd/nfs4state.c list_add(&dp->dl_perclnt, &clp->cl_delegations); dp 1032 fs/nfsd/nfs4state.c unhash_delegation_locked(struct nfs4_delegation *dp) dp 1034 fs/nfsd/nfs4state.c struct nfs4_file *fp = dp->dl_stid.sc_file; dp 1038 fs/nfsd/nfs4state.c if (list_empty(&dp->dl_perfile)) dp 1041 fs/nfsd/nfs4state.c dp->dl_stid.sc_type = NFS4_CLOSED_DELEG_STID; dp 1043 fs/nfsd/nfs4state.c ++dp->dl_time; dp 1045 fs/nfsd/nfs4state.c list_del_init(&dp->dl_perclnt); dp 1046 fs/nfsd/nfs4state.c list_del_init(&dp->dl_recall_lru); dp 1047 fs/nfsd/nfs4state.c list_del_init(&dp->dl_perfile); dp 1052 fs/nfsd/nfs4state.c static void destroy_delegation(struct nfs4_delegation *dp) dp 1057 fs/nfsd/nfs4state.c unhashed = unhash_delegation_locked(dp); dp 1060 fs/nfsd/nfs4state.c destroy_unhashed_deleg(dp); dp 1063 fs/nfsd/nfs4state.c static void revoke_delegation(struct nfs4_delegation *dp) dp 1065 fs/nfsd/nfs4state.c struct nfs4_client *clp = dp->dl_stid.sc_client; dp 1067 fs/nfsd/nfs4state.c WARN_ON(!list_empty(&dp->dl_recall_lru)); dp 1070 fs/nfsd/nfs4state.c dp->dl_stid.sc_type = NFS4_REVOKED_DELEG_STID; dp 1071 fs/nfsd/nfs4state.c refcount_inc(&dp->dl_stid.sc_count); dp 1073 fs/nfsd/nfs4state.c list_add(&dp->dl_recall_lru, &clp->cl_revoked); dp 1076 fs/nfsd/nfs4state.c destroy_unhashed_deleg(dp); dp 2007 fs/nfsd/nfs4state.c struct nfs4_delegation *dp; dp 2013 fs/nfsd/nfs4state.c dp = list_entry(clp->cl_delegations.next, struct nfs4_delegation, dl_perclnt); dp 2014 fs/nfsd/nfs4state.c WARN_ON(!unhash_delegation_locked(dp)); dp 2015 fs/nfsd/nfs4state.c list_add(&dp->dl_recall_lru, &reaplist); dp 2019 fs/nfsd/nfs4state.c dp = list_entry(reaplist.next, struct nfs4_delegation, dl_recall_lru); dp 2020 fs/nfsd/nfs4state.c list_del_init(&dp->dl_recall_lru); dp 2021 fs/nfsd/nfs4state.c destroy_unhashed_deleg(dp); dp 2024 fs/nfsd/nfs4state.c dp = list_entry(clp->cl_revoked.next, struct nfs4_delegation, dl_recall_lru); dp 2025 fs/nfsd/nfs4state.c list_del_init(&dp->dl_recall_lru); dp 2026 fs/nfsd/nfs4state.c nfs4_put_stid(&dp->dl_stid); dp 4368 fs/nfsd/nfs4state.c struct nfs4_delegation *dp = cb_to_delegation(cb); dp 4369 fs/nfsd/nfs4state.c struct nfsd_net *nn = net_generic(dp->dl_stid.sc_client->net, dp 4372 fs/nfsd/nfs4state.c block_delegations(&dp->dl_stid.sc_file->fi_fhandle); dp 4382 fs/nfsd/nfs4state.c if (dp->dl_time == 0) { dp 4383 fs/nfsd/nfs4state.c dp->dl_time = get_seconds(); dp 4384 fs/nfsd/nfs4state.c list_add_tail(&dp->dl_recall_lru, &nn->del_recall_lru); dp 4392 fs/nfsd/nfs4state.c struct nfs4_delegation *dp = cb_to_delegation(cb); dp 4394 fs/nfsd/nfs4state.c if (dp->dl_stid.sc_type == NFS4_CLOSED_DELEG_STID) dp 4409 fs/nfsd/nfs4state.c if (dp->dl_retries--) { dp 4421 fs/nfsd/nfs4state.c struct nfs4_delegation *dp = cb_to_delegation(cb); dp 4423 fs/nfsd/nfs4state.c nfs4_put_stid(&dp->dl_stid); dp 4432 fs/nfsd/nfs4state.c static void nfsd_break_one_deleg(struct nfs4_delegation *dp) dp 4441 fs/nfsd/nfs4state.c refcount_inc(&dp->dl_stid.sc_count); dp 4442 fs/nfsd/nfs4state.c nfsd4_run_cb(&dp->dl_recall); dp 4450 fs/nfsd/nfs4state.c struct nfs4_delegation *dp = (struct nfs4_delegation *)fl->fl_owner; dp 4451 fs/nfsd/nfs4state.c struct nfs4_file *fp = dp->dl_stid.sc_file; dp 4462 fs/nfsd/nfs4state.c nfsd_break_one_deleg(dp); dp 4591 fs/nfsd/nfs4state.c nfs4_check_delegmode(struct nfs4_delegation *dp, int flags) dp 4593 fs/nfsd/nfs4state.c if ((flags & WR_STATE) && (dp->dl_type == NFS4_OPEN_DELEGATE_READ)) dp 4623 fs/nfsd/nfs4state.c struct nfs4_delegation **dp) dp 4644 fs/nfsd/nfs4state.c *dp = deleg; dp 4786 fs/nfsd/nfs4state.c static struct file_lock *nfs4_alloc_init_lease(struct nfs4_delegation *dp, dp 4798 fs/nfsd/nfs4state.c fl->fl_owner = (fl_owner_t)dp; dp 4800 fs/nfsd/nfs4state.c fl->fl_file = dp->dl_stid.sc_file->fi_deleg_file->nf_file; dp 4809 fs/nfsd/nfs4state.c struct nfs4_delegation *dp; dp 4847 fs/nfsd/nfs4state.c dp = alloc_init_deleg(clp, fp, fh, odstate); dp 4848 fs/nfsd/nfs4state.c if (!dp) dp 4851 fs/nfsd/nfs4state.c fl = nfs4_alloc_init_lease(dp, NFS4_OPEN_DELEGATE_READ); dp 4866 fs/nfsd/nfs4state.c status = hash_delegation_locked(dp, fp); dp 4873 fs/nfsd/nfs4state.c return dp; dp 4875 fs/nfsd/nfs4state.c vfs_setlease(fp->fi_deleg_file->nf_file, F_UNLCK, NULL, (void **)&dp); dp 4877 fs/nfsd/nfs4state.c put_clnt_odstate(dp->dl_clnt_odstate); dp 4878 fs/nfsd/nfs4state.c nfs4_put_stid(&dp->dl_stid); dp 4915 fs/nfsd/nfs4state.c struct nfs4_delegation *dp; dp 4956 fs/nfsd/nfs4state.c dp = nfs4_set_delegation(clp, fh, stp->st_stid.sc_file, stp->st_clnt_odstate); dp 4957 fs/nfsd/nfs4state.c if (IS_ERR(dp)) dp 4960 fs/nfsd/nfs4state.c memcpy(&open->op_delegate_stateid, &dp->dl_stid.sc_stateid, sizeof(dp->dl_stid.sc_stateid)); dp 4963 fs/nfsd/nfs4state.c STATEID_VAL(&dp->dl_stid.sc_stateid)); dp 4965 fs/nfsd/nfs4state.c nfs4_put_stid(&dp->dl_stid); dp 4982 fs/nfsd/nfs4state.c struct nfs4_delegation *dp) dp 4985 fs/nfsd/nfs4state.c dp->dl_type == NFS4_OPEN_DELEGATE_WRITE) { dp 4989 fs/nfsd/nfs4state.c dp->dl_type == NFS4_OPEN_DELEGATE_WRITE) { dp 5006 fs/nfsd/nfs4state.c struct nfs4_delegation *dp = NULL; dp 5017 fs/nfsd/nfs4state.c status = nfs4_check_deleg(cl, open, &dp); dp 5085 fs/nfsd/nfs4state.c if (open->op_delegate_type == NFS4_OPEN_DELEGATE_NONE && dp && dp 5087 fs/nfsd/nfs4state.c nfsd4_deleg_xgrade_none_ext(open, dp); dp 5102 fs/nfsd/nfs4state.c if (dp) dp 5103 fs/nfsd/nfs4state.c nfs4_put_stid(&dp->dl_stid); dp 5214 fs/nfsd/nfs4state.c struct nfs4_delegation *dp; dp 5255 fs/nfsd/nfs4state.c dp = list_entry (pos, struct nfs4_delegation, dl_recall_lru); dp 5256 fs/nfsd/nfs4state.c if (time_after((unsigned long)dp->dl_time, (unsigned long)cutoff)) { dp 5257 fs/nfsd/nfs4state.c t = dp->dl_time - cutoff; dp 5261 fs/nfsd/nfs4state.c WARN_ON(!unhash_delegation_locked(dp)); dp 5262 fs/nfsd/nfs4state.c list_add(&dp->dl_recall_lru, &reaplist); dp 5266 fs/nfsd/nfs4state.c dp = list_first_entry(&reaplist, struct nfs4_delegation, dp 5268 fs/nfsd/nfs4state.c list_del_init(&dp->dl_recall_lru); dp 5269 fs/nfsd/nfs4state.c revoke_delegation(dp); dp 5720 fs/nfsd/nfs4state.c struct nfs4_delegation *dp; dp 5747 fs/nfsd/nfs4state.c dp = delegstateid(s); dp 5748 fs/nfsd/nfs4state.c list_del_init(&dp->dl_recall_lru); dp 6033 fs/nfsd/nfs4state.c struct nfs4_delegation *dp; dp 6045 fs/nfsd/nfs4state.c dp = delegstateid(s); dp 6046 fs/nfsd/nfs4state.c status = nfsd4_stid_check_stateid_generation(stateid, &dp->dl_stid, nfsd4_has_session(cstate)); dp 6050 fs/nfsd/nfs4state.c destroy_delegation(dp); dp 6052 fs/nfsd/nfs4state.c nfs4_put_stid(&dp->dl_stid); dp 7400 fs/nfsd/nfs4state.c struct nfs4_delegation *dp, *next; dp 7408 fs/nfsd/nfs4state.c list_for_each_entry_safe(dp, next, &clp->cl_delegations, dl_perclnt) { dp 7416 fs/nfsd/nfs4state.c if (dp->dl_time != 0) dp 7420 fs/nfsd/nfs4state.c WARN_ON(!unhash_delegation_locked(dp)); dp 7421 fs/nfsd/nfs4state.c list_add(&dp->dl_recall_lru, victims); dp 7470 fs/nfsd/nfs4state.c struct nfs4_delegation *dp, *next; dp 7472 fs/nfsd/nfs4state.c list_for_each_entry_safe(dp, next, reaplist, dl_recall_lru) { dp 7473 fs/nfsd/nfs4state.c list_del_init(&dp->dl_recall_lru); dp 7474 fs/nfsd/nfs4state.c clp = dp->dl_stid.sc_client; dp 7475 fs/nfsd/nfs4state.c revoke_delegation(dp); dp 7530 fs/nfsd/nfs4state.c struct nfs4_delegation *dp, *next; dp 7532 fs/nfsd/nfs4state.c list_for_each_entry_safe(dp, next, reaplist, dl_recall_lru) { dp 7533 fs/nfsd/nfs4state.c list_del_init(&dp->dl_recall_lru); dp 7534 fs/nfsd/nfs4state.c clp = dp->dl_stid.sc_client; dp 7542 fs/nfsd/nfs4state.c dp->dl_time = 0; dp 7544 fs/nfsd/nfs4state.c nfsd_break_one_deleg(dp); dp 7756 fs/nfsd/nfs4state.c struct nfs4_delegation *dp = NULL; dp 7766 fs/nfsd/nfs4state.c dp = list_entry (pos, struct nfs4_delegation, dl_recall_lru); dp 7767 fs/nfsd/nfs4state.c WARN_ON(!unhash_delegation_locked(dp)); dp 7768 fs/nfsd/nfs4state.c list_add(&dp->dl_recall_lru, &reaplist); dp 7772 fs/nfsd/nfs4state.c dp = list_entry (pos, struct nfs4_delegation, dl_recall_lru); dp 7773 fs/nfsd/nfs4state.c list_del_init(&dp->dl_recall_lru); dp 7774 fs/nfsd/nfs4state.c destroy_unhashed_deleg(dp); dp 647 fs/nfsd/state.h extern void nfsd4_prepare_cb_recall(struct nfs4_delegation *dp); dp 115 fs/nfsd/vfs.c struct dentry *dp; dp 119 fs/nfsd/vfs.c dp = dget_parent(path->dentry); dp 121 fs/nfsd/vfs.c path->dentry = dp; dp 168 fs/ntfs/compress.c struct page *dp; /* Current destination page being worked on. */ dp 210 fs/ntfs/compress.c dp = dest_pages[di]; dp 215 fs/ntfs/compress.c handle_bounds_compressed_page(dp, i_size, dp 217 fs/ntfs/compress.c flush_dcache_page(dp); dp 218 fs/ntfs/compress.c kunmap(dp); dp 219 fs/ntfs/compress.c SetPageUptodate(dp); dp 220 fs/ntfs/compress.c unlock_page(dp); dp 224 fs/ntfs/compress.c put_page(dp); dp 251 fs/ntfs/compress.c dp = dest_pages[*dest_index]; dp 252 fs/ntfs/compress.c if (!dp) { dp 264 fs/ntfs/compress.c dp_addr = (u8*)page_address(dp) + do_sb_start; dp 71 fs/ocfs2/quota_global.c static void ocfs2_global_disk2memdqb(struct dquot *dquot, void *dp) dp 73 fs/ocfs2/quota_global.c struct ocfs2_global_disk_dqblk *d = dp; dp 96 fs/ocfs2/quota_global.c static void ocfs2_global_mem2diskdqb(void *dp, struct dquot *dquot) dp 98 fs/ocfs2/quota_global.c struct ocfs2_global_disk_dqblk *d = dp; dp 114 fs/ocfs2/quota_global.c static int ocfs2_global_is_id(void *dp, struct dquot *dquot) dp 116 fs/ocfs2/quota_global.c struct ocfs2_global_disk_dqblk *d = dp; dp 120 fs/ocfs2/quota_global.c if (qtree_entry_unused(&oinfo->dqi_gi, dp)) dp 184 fs/openpromfs/inode.c struct device_node *dp, *child; dp 195 fs/openpromfs/inode.c dp = oi->u.node; dp 202 fs/openpromfs/inode.c child = dp->child; dp 217 fs/openpromfs/inode.c prop = dp->properties; dp 251 fs/openpromfs/inode.c if (of_node_name_eq(dp, "options") && (len == 17) && dp 269 fs/openpromfs/inode.c struct device_node *dp = oi->u.node; dp 283 fs/openpromfs/inode.c (dp->parent == NULL ? dp 285 fs/openpromfs/inode.c dp->parent->unique_id), DT_DIR)) dp 292 fs/openpromfs/inode.c child = dp->child; dp 309 fs/openpromfs/inode.c prop = dp->properties; dp 352 fs/proc/generic.c struct proc_dir_entry *dp) dp 354 fs/proc/generic.c if (proc_alloc_inum(&dp->low_ino)) dp 358 fs/proc/generic.c dp->parent = dir; dp 359 fs/proc/generic.c if (pde_subdir_insert(dir, dp) == false) { dp 361 fs/proc/generic.c dir->name, dp->name); dp 367 fs/proc/generic.c return dp; dp 369 fs/proc/generic.c proc_free_inum(dp->low_ino); dp 371 fs/proc/generic.c pde_free(dp); dp 177 fs/proc/internal.h struct proc_dir_entry *dp); dp 27 fs/quota/quota_v2.c static void v2r0_mem2diskdqb(void *dp, struct dquot *dquot); dp 28 fs/quota/quota_v2.c static void v2r0_disk2memdqb(struct dquot *dquot, void *dp); dp 29 fs/quota/quota_v2.c static int v2r0_is_id(void *dp, struct dquot *dquot); dp 30 fs/quota/quota_v2.c static void v2r1_mem2diskdqb(void *dp, struct dquot *dquot); dp 31 fs/quota/quota_v2.c static void v2r1_disk2memdqb(struct dquot *dquot, void *dp); dp 32 fs/quota/quota_v2.c static int v2r1_is_id(void *dp, struct dquot *dquot); dp 198 fs/quota/quota_v2.c static void v2r0_disk2memdqb(struct dquot *dquot, void *dp) dp 200 fs/quota/quota_v2.c struct v2r0_disk_dqblk *d = dp, empty; dp 214 fs/quota/quota_v2.c if (!memcmp(&empty, dp, sizeof(struct v2r0_disk_dqblk))) dp 218 fs/quota/quota_v2.c static void v2r0_mem2diskdqb(void *dp, struct dquot *dquot) dp 220 fs/quota/quota_v2.c struct v2r0_disk_dqblk *d = dp; dp 234 fs/quota/quota_v2.c if (qtree_entry_unused(info, dp)) dp 238 fs/quota/quota_v2.c static int v2r0_is_id(void *dp, struct dquot *dquot) dp 240 fs/quota/quota_v2.c struct v2r0_disk_dqblk *d = dp; dp 244 fs/quota/quota_v2.c if (qtree_entry_unused(info, dp)) dp 251 fs/quota/quota_v2.c static void v2r1_disk2memdqb(struct dquot *dquot, void *dp) dp 253 fs/quota/quota_v2.c struct v2r1_disk_dqblk *d = dp, empty; dp 267 fs/quota/quota_v2.c if (!memcmp(&empty, dp, sizeof(struct v2r1_disk_dqblk))) dp 271 fs/quota/quota_v2.c static void v2r1_mem2diskdqb(void *dp, struct dquot *dquot) dp 273 fs/quota/quota_v2.c struct v2r1_disk_dqblk *d = dp; dp 287 fs/quota/quota_v2.c if (qtree_entry_unused(info, dp)) dp 291 fs/quota/quota_v2.c static int v2r1_is_id(void *dp, struct dquot *dquot) dp 293 fs/quota/quota_v2.c struct v2r1_disk_dqblk *d = dp; dp 297 fs/quota/quota_v2.c if (qtree_entry_unused(info, dp)) dp 461 fs/xfs/libxfs/xfs_ag.c struct xfs_aghdr_grow_data *dp; dp 466 fs/xfs/libxfs/xfs_ag.c for (dp = &aghdr_data[0]; dp->daddr != XFS_BUF_DADDR_NULL; dp++) { dp 467 fs/xfs/libxfs/xfs_ag.c if (!dp->need_init) dp 470 fs/xfs/libxfs/xfs_ag.c id->daddr = dp->daddr; dp 471 fs/xfs/libxfs/xfs_ag.c id->numblks = dp->numblks; dp 472 fs/xfs/libxfs/xfs_ag.c id->type = dp->type; dp 473 fs/xfs/libxfs/xfs_ag.c error = xfs_ag_init_hdr(mp, id, dp->work, dp->ops); dp 63 fs/xfs/libxfs/xfs_attr.c struct xfs_inode *dp, dp 72 fs/xfs/libxfs/xfs_attr.c args->geo = dp->i_mount->m_attr_geo; dp 74 fs/xfs/libxfs/xfs_attr.c args->dp = dp; dp 194 fs/xfs/libxfs/xfs_attr.c struct xfs_mount *mp = args->dp->i_mount; dp 224 fs/xfs/libxfs/xfs_attr.c struct xfs_inode *dp, dp 228 fs/xfs/libxfs/xfs_attr.c struct xfs_mount *mp = dp->i_mount; dp 240 fs/xfs/libxfs/xfs_attr.c xfs_trans_ichgtime(args->trans, dp, XFS_ICHGTIME_CHG); dp 257 fs/xfs/libxfs/xfs_attr.c struct xfs_inode *dp = args->dp; dp 265 fs/xfs/libxfs/xfs_attr.c if (dp->i_d.di_aformat == XFS_DINODE_FMT_LOCAL || dp 266 fs/xfs/libxfs/xfs_attr.c (dp->i_d.di_aformat == XFS_DINODE_FMT_EXTENTS && dp 267 fs/xfs/libxfs/xfs_attr.c dp->i_d.di_anextents == 0)) { dp 272 fs/xfs/libxfs/xfs_attr.c if (dp->i_d.di_aformat == XFS_DINODE_FMT_EXTENTS) dp 278 fs/xfs/libxfs/xfs_attr.c error = xfs_attr_try_sf_addname(dp, args); dp 306 fs/xfs/libxfs/xfs_attr.c if (xfs_bmap_one_block(dp, XFS_ATTR_FORK)) dp 320 fs/xfs/libxfs/xfs_attr.c struct xfs_inode *dp = args->dp; dp 323 fs/xfs/libxfs/xfs_attr.c if (!xfs_inode_hasattr(dp)) { dp 325 fs/xfs/libxfs/xfs_attr.c } else if (dp->i_d.di_aformat == XFS_DINODE_FMT_LOCAL) { dp 326 fs/xfs/libxfs/xfs_attr.c ASSERT(dp->i_afp->if_flags & XFS_IFINLINE); dp 328 fs/xfs/libxfs/xfs_attr.c } else if (xfs_bmap_one_block(dp, XFS_ATTR_FORK)) { dp 339 fs/xfs/libxfs/xfs_attr.c struct xfs_inode *dp, dp 345 fs/xfs/libxfs/xfs_attr.c struct xfs_mount *mp = dp->i_mount; dp 353 fs/xfs/libxfs/xfs_attr.c if (XFS_FORCED_SHUTDOWN(dp->i_mount)) dp 356 fs/xfs/libxfs/xfs_attr.c error = xfs_attr_args_init(&args, dp, name, flags); dp 365 fs/xfs/libxfs/xfs_attr.c error = xfs_qm_dqattach(dp); dp 373 fs/xfs/libxfs/xfs_attr.c if (XFS_IFORK_Q(dp) == 0) { dp 377 fs/xfs/libxfs/xfs_attr.c error = xfs_bmap_add_attrfork(dp, sf_size, rsvd); dp 396 fs/xfs/libxfs/xfs_attr.c xfs_ilock(dp, XFS_ILOCK_EXCL); dp 397 fs/xfs/libxfs/xfs_attr.c error = xfs_trans_reserve_quota_nblks(args.trans, dp, args.total, 0, dp 403 fs/xfs/libxfs/xfs_attr.c xfs_trans_ijoin(args.trans, dp, 0); dp 420 fs/xfs/libxfs/xfs_attr.c xfs_trans_ichgtime(args.trans, dp, XFS_ICHGTIME_CHG); dp 425 fs/xfs/libxfs/xfs_attr.c xfs_trans_log_inode(args.trans, dp, XFS_ILOG_CORE); dp 428 fs/xfs/libxfs/xfs_attr.c xfs_iunlock(dp, XFS_ILOCK_EXCL); dp 443 fs/xfs/libxfs/xfs_attr.c struct xfs_inode *dp, dp 447 fs/xfs/libxfs/xfs_attr.c struct xfs_mount *mp = dp->i_mount; dp 453 fs/xfs/libxfs/xfs_attr.c if (XFS_FORCED_SHUTDOWN(dp->i_mount)) dp 456 fs/xfs/libxfs/xfs_attr.c error = xfs_attr_args_init(&args, dp, name, flags); dp 467 fs/xfs/libxfs/xfs_attr.c error = xfs_qm_dqattach(dp); dp 482 fs/xfs/libxfs/xfs_attr.c xfs_ilock(dp, XFS_ILOCK_EXCL); dp 487 fs/xfs/libxfs/xfs_attr.c xfs_trans_ijoin(args.trans, dp, 0); dp 501 fs/xfs/libxfs/xfs_attr.c xfs_trans_ichgtime(args.trans, dp, XFS_ICHGTIME_CHG); dp 506 fs/xfs/libxfs/xfs_attr.c xfs_trans_log_inode(args.trans, dp, XFS_ILOG_CORE); dp 508 fs/xfs/libxfs/xfs_attr.c xfs_iunlock(dp, XFS_ILOCK_EXCL); dp 515 fs/xfs/libxfs/xfs_attr.c xfs_iunlock(dp, XFS_ILOCK_EXCL); dp 555 fs/xfs/libxfs/xfs_attr.c newsize = XFS_ATTR_SF_TOTSIZE(args->dp); dp 558 fs/xfs/libxfs/xfs_attr.c forkoff = xfs_attr_shortform_bytesfit(args->dp, newsize); dp 581 fs/xfs/libxfs/xfs_attr.c struct xfs_inode *dp; dp 590 fs/xfs/libxfs/xfs_attr.c dp = args->dp; dp 592 fs/xfs/libxfs/xfs_attr.c error = xfs_attr3_leaf_read(args->trans, args->dp, args->blkno, -1, &bp); dp 652 fs/xfs/libxfs/xfs_attr.c error = xfs_trans_roll_inode(&args->trans, dp); dp 667 fs/xfs/libxfs/xfs_attr.c error = xfs_trans_roll_inode(&args->trans, dp); dp 717 fs/xfs/libxfs/xfs_attr.c error = xfs_attr3_leaf_read(args->trans, args->dp, args->blkno, dp 727 fs/xfs/libxfs/xfs_attr.c if ((forkoff = xfs_attr_shortform_allfit(bp, dp))) { dp 740 fs/xfs/libxfs/xfs_attr.c error = xfs_trans_roll_inode(&args->trans, dp); dp 761 fs/xfs/libxfs/xfs_attr.c struct xfs_inode *dp; dp 770 fs/xfs/libxfs/xfs_attr.c dp = args->dp; dp 772 fs/xfs/libxfs/xfs_attr.c error = xfs_attr3_leaf_read(args->trans, args->dp, args->blkno, -1, &bp); dp 787 fs/xfs/libxfs/xfs_attr.c if ((forkoff = xfs_attr_shortform_allfit(bp, dp))) { dp 816 fs/xfs/libxfs/xfs_attr.c error = xfs_attr3_leaf_read(args->trans, args->dp, args->blkno, -1, &bp); dp 850 fs/xfs/libxfs/xfs_attr.c struct xfs_inode *dp; dp 859 fs/xfs/libxfs/xfs_attr.c dp = args->dp; dp 860 fs/xfs/libxfs/xfs_attr.c mp = dp->i_mount; dp 922 fs/xfs/libxfs/xfs_attr.c error = xfs_trans_roll_inode(&args->trans, dp); dp 959 fs/xfs/libxfs/xfs_attr.c error = xfs_trans_roll_inode(&args->trans, dp); dp 1042 fs/xfs/libxfs/xfs_attr.c error = xfs_trans_roll_inode(&args->trans, dp); dp 1077 fs/xfs/libxfs/xfs_attr.c struct xfs_inode *dp; dp 1086 fs/xfs/libxfs/xfs_attr.c dp = args->dp; dp 1089 fs/xfs/libxfs/xfs_attr.c state->mp = dp->i_mount; dp 1160 fs/xfs/libxfs/xfs_attr.c error = xfs_trans_roll_inode(&args->trans, dp); dp 1168 fs/xfs/libxfs/xfs_attr.c if (xfs_bmap_one_block(dp, XFS_ATTR_FORK)) { dp 1176 fs/xfs/libxfs/xfs_attr.c error = xfs_attr3_leaf_read(args->trans, args->dp, 0, -1, &bp); dp 1180 fs/xfs/libxfs/xfs_attr.c if ((forkoff = xfs_attr_shortform_allfit(bp, dp))) { dp 1270 fs/xfs/libxfs/xfs_attr.c state->args->dp, dp 1289 fs/xfs/libxfs/xfs_attr.c state->args->dp, dp 1323 fs/xfs/libxfs/xfs_attr.c state->mp = args->dp->i_mount; dp 117 fs/xfs/libxfs/xfs_attr.h struct xfs_inode *dp; /* inode */ dp 145 fs/xfs/libxfs/xfs_attr.h int xfs_attr_inactive(struct xfs_inode *dp); dp 152 fs/xfs/libxfs/xfs_attr.h int xfs_attr_set(struct xfs_inode *dp, const unsigned char *name, dp 155 fs/xfs/libxfs/xfs_attr.h int xfs_attr_remove(struct xfs_inode *dp, const unsigned char *name, int flags); dp 157 fs/xfs/libxfs/xfs_attr.h int xfs_attr_list(struct xfs_inode *dp, char *buffer, int bufsize, dp 368 fs/xfs/libxfs/xfs_attr_leaf.c struct xfs_inode *dp, dp 375 fs/xfs/libxfs/xfs_attr_leaf.c err = xfs_da_read_buf(tp, dp, bno, mappedbno, bpp, dp 456 fs/xfs/libxfs/xfs_attr_leaf.c xfs_attr_shortform_bytesfit(xfs_inode_t *dp, int bytes) dp 462 fs/xfs/libxfs/xfs_attr_leaf.c xfs_mount_t *mp = dp->i_mount; dp 465 fs/xfs/libxfs/xfs_attr_leaf.c offset = (XFS_LITINO(mp, dp->i_d.di_version) - bytes) >> 3; dp 467 fs/xfs/libxfs/xfs_attr_leaf.c if (dp->i_d.di_format == XFS_DINODE_FMT_DEV) { dp 482 fs/xfs/libxfs/xfs_attr_leaf.c if (bytes <= XFS_IFORK_ASIZE(dp)) dp 483 fs/xfs/libxfs/xfs_attr_leaf.c return dp->i_d.di_forkoff; dp 493 fs/xfs/libxfs/xfs_attr_leaf.c dsize = dp->i_df.if_bytes; dp 495 fs/xfs/libxfs/xfs_attr_leaf.c switch (dp->i_d.di_format) { dp 504 fs/xfs/libxfs/xfs_attr_leaf.c if (!dp->i_d.di_forkoff && dp->i_df.if_bytes > dp 505 fs/xfs/libxfs/xfs_attr_leaf.c xfs_default_attroffset(dp)) dp 515 fs/xfs/libxfs/xfs_attr_leaf.c if (dp->i_d.di_forkoff) { dp 516 fs/xfs/libxfs/xfs_attr_leaf.c if (offset < dp->i_d.di_forkoff) dp 518 fs/xfs/libxfs/xfs_attr_leaf.c return dp->i_d.di_forkoff; dp 520 fs/xfs/libxfs/xfs_attr_leaf.c dsize = XFS_BMAP_BROOT_SPACE(mp, dp->i_df.if_broot); dp 532 fs/xfs/libxfs/xfs_attr_leaf.c maxforkoff = XFS_LITINO(mp, dp->i_d.di_version) - dp 568 fs/xfs/libxfs/xfs_attr_leaf.c xfs_inode_t *dp; dp 573 fs/xfs/libxfs/xfs_attr_leaf.c dp = args->dp; dp 574 fs/xfs/libxfs/xfs_attr_leaf.c ASSERT(dp != NULL); dp 575 fs/xfs/libxfs/xfs_attr_leaf.c ifp = dp->i_afp; dp 578 fs/xfs/libxfs/xfs_attr_leaf.c if (dp->i_d.di_aformat == XFS_DINODE_FMT_EXTENTS) { dp 580 fs/xfs/libxfs/xfs_attr_leaf.c dp->i_d.di_aformat = XFS_DINODE_FMT_LOCAL; dp 585 fs/xfs/libxfs/xfs_attr_leaf.c xfs_idata_realloc(dp, sizeof(*hdr), XFS_ATTR_FORK); dp 589 fs/xfs/libxfs/xfs_attr_leaf.c xfs_trans_log_inode(args->trans, dp, XFS_ILOG_CORE | XFS_ILOG_ADATA); dp 603 fs/xfs/libxfs/xfs_attr_leaf.c xfs_inode_t *dp; dp 608 fs/xfs/libxfs/xfs_attr_leaf.c dp = args->dp; dp 609 fs/xfs/libxfs/xfs_attr_leaf.c mp = dp->i_mount; dp 610 fs/xfs/libxfs/xfs_attr_leaf.c dp->i_d.di_forkoff = forkoff; dp 612 fs/xfs/libxfs/xfs_attr_leaf.c ifp = dp->i_afp; dp 630 fs/xfs/libxfs/xfs_attr_leaf.c xfs_idata_realloc(dp, size, XFS_ATTR_FORK); dp 641 fs/xfs/libxfs/xfs_attr_leaf.c xfs_trans_log_inode(args->trans, dp, XFS_ILOG_CORE | XFS_ILOG_ADATA); dp 675 fs/xfs/libxfs/xfs_attr_leaf.c xfs_inode_t *dp; dp 679 fs/xfs/libxfs/xfs_attr_leaf.c dp = args->dp; dp 680 fs/xfs/libxfs/xfs_attr_leaf.c mp = dp->i_mount; dp 682 fs/xfs/libxfs/xfs_attr_leaf.c sf = (xfs_attr_shortform_t *)dp->i_afp->if_u1.if_data; dp 715 fs/xfs/libxfs/xfs_attr_leaf.c (dp->i_d.di_format != XFS_DINODE_FMT_BTREE) && dp 717 fs/xfs/libxfs/xfs_attr_leaf.c xfs_attr_fork_remove(dp, args->trans); dp 719 fs/xfs/libxfs/xfs_attr_leaf.c xfs_idata_realloc(dp, -size, XFS_ATTR_FORK); dp 720 fs/xfs/libxfs/xfs_attr_leaf.c dp->i_d.di_forkoff = xfs_attr_shortform_bytesfit(dp, totsize); dp 721 fs/xfs/libxfs/xfs_attr_leaf.c ASSERT(dp->i_d.di_forkoff); dp 725 fs/xfs/libxfs/xfs_attr_leaf.c dp->i_d.di_format == XFS_DINODE_FMT_BTREE); dp 726 fs/xfs/libxfs/xfs_attr_leaf.c xfs_trans_log_inode(args->trans, dp, dp 749 fs/xfs/libxfs/xfs_attr_leaf.c ifp = args->dp->i_afp; dp 781 fs/xfs/libxfs/xfs_attr_leaf.c ASSERT(args->dp->i_afp->if_flags == XFS_IFINLINE); dp 782 fs/xfs/libxfs/xfs_attr_leaf.c sf = (xfs_attr_shortform_t *)args->dp->i_afp->if_u1.if_data; dp 807 fs/xfs/libxfs/xfs_attr_leaf.c struct xfs_inode *dp; dp 819 fs/xfs/libxfs/xfs_attr_leaf.c dp = args->dp; dp 820 fs/xfs/libxfs/xfs_attr_leaf.c ifp = dp->i_afp; dp 828 fs/xfs/libxfs/xfs_attr_leaf.c xfs_idata_realloc(dp, -size, XFS_ATTR_FORK); dp 829 fs/xfs/libxfs/xfs_attr_leaf.c xfs_bmap_local_to_extents_empty(args->trans, dp, XFS_ATTR_FORK); dp 842 fs/xfs/libxfs/xfs_attr_leaf.c nargs.dp = dp; dp 880 fs/xfs/libxfs/xfs_attr_leaf.c struct xfs_inode *dp) dp 909 fs/xfs/libxfs/xfs_attr_leaf.c if ((dp->i_mount->m_flags & XFS_MOUNT_ATTR2) && dp 910 fs/xfs/libxfs/xfs_attr_leaf.c (dp->i_d.di_format != XFS_DINODE_FMT_BTREE) && dp 913 fs/xfs/libxfs/xfs_attr_leaf.c return xfs_attr_shortform_bytesfit(dp, bytes); dp 1004 fs/xfs/libxfs/xfs_attr_leaf.c struct xfs_inode *dp = args->dp; dp 1032 fs/xfs/libxfs/xfs_attr_leaf.c ASSERT(dp->i_mount->m_flags & XFS_MOUNT_ATTR2); dp 1033 fs/xfs/libxfs/xfs_attr_leaf.c ASSERT(dp->i_d.di_format != XFS_DINODE_FMT_BTREE); dp 1034 fs/xfs/libxfs/xfs_attr_leaf.c xfs_attr_fork_remove(dp, args->trans); dp 1045 fs/xfs/libxfs/xfs_attr_leaf.c nargs.dp = dp; dp 1086 fs/xfs/libxfs/xfs_attr_leaf.c struct xfs_inode *dp = args->dp; dp 1087 fs/xfs/libxfs/xfs_attr_leaf.c struct xfs_mount *mp = dp->i_mount; dp 1098 fs/xfs/libxfs/xfs_attr_leaf.c error = xfs_attr3_leaf_read(args->trans, dp, 0, -1, &bp1); dp 1102 fs/xfs/libxfs/xfs_attr_leaf.c error = xfs_da_get_buf(args->trans, dp, blkno, -1, &bp2, XFS_ATTR_FORK); dp 1123 fs/xfs/libxfs/xfs_attr_leaf.c dp->d_ops->node_hdr_from_disk(&icnodehdr, node); dp 1124 fs/xfs/libxfs/xfs_attr_leaf.c btree = dp->d_ops->node_tree_p(node); dp 1134 fs/xfs/libxfs/xfs_attr_leaf.c dp->d_ops->node_hdr_to_disk(node, &icnodehdr); dp 1157 fs/xfs/libxfs/xfs_attr_leaf.c struct xfs_inode *dp = args->dp; dp 1158 fs/xfs/libxfs/xfs_attr_leaf.c struct xfs_mount *mp = dp->i_mount; dp 1164 fs/xfs/libxfs/xfs_attr_leaf.c error = xfs_da_get_buf(args->trans, args->dp, blkno, -1, &bp, dp 1182 fs/xfs/libxfs/xfs_attr_leaf.c hdr3->owner = cpu_to_be64(dp->i_ino); dp 1933 fs/xfs/libxfs/xfs_attr_leaf.c error = xfs_attr3_leaf_read(state->args->trans, state->args->dp, dp 2364 fs/xfs/libxfs/xfs_attr_leaf.c args->dp->i_mount, dp 2412 fs/xfs/libxfs/xfs_attr_leaf.c args->rmtblkcnt = xfs_attr3_rmt_blocks(args->dp->i_mount, dp 2664 fs/xfs/libxfs/xfs_attr_leaf.c error = xfs_attr3_leaf_read(args->trans, args->dp, args->blkno, -1, &bp); dp 2707 fs/xfs/libxfs/xfs_attr_leaf.c return xfs_trans_roll_inode(&args->trans, args->dp); dp 2731 fs/xfs/libxfs/xfs_attr_leaf.c error = xfs_attr3_leaf_read(args->trans, args->dp, args->blkno, -1, &bp); dp 2758 fs/xfs/libxfs/xfs_attr_leaf.c return xfs_trans_roll_inode(&args->trans, args->dp); dp 2793 fs/xfs/libxfs/xfs_attr_leaf.c error = xfs_attr3_leaf_read(args->trans, args->dp, args->blkno, -1, &bp1); dp 2801 fs/xfs/libxfs/xfs_attr_leaf.c error = xfs_attr3_leaf_read(args->trans, args->dp, args->blkno2, dp 2876 fs/xfs/libxfs/xfs_attr_leaf.c error = xfs_trans_roll_inode(&args->trans, args->dp); dp 42 fs/xfs/libxfs/xfs_attr_leaf.h int xfs_attr_shortform_allfit(struct xfs_buf *bp, struct xfs_inode *dp); dp 43 fs/xfs/libxfs/xfs_attr_leaf.h int xfs_attr_shortform_bytesfit(struct xfs_inode *dp, int bytes); dp 87 fs/xfs/libxfs/xfs_attr_leaf.h int xfs_attr3_leaf_read(struct xfs_trans *tp, struct xfs_inode *dp, dp 369 fs/xfs/libxfs/xfs_attr_remote.c struct xfs_mount *mp = args->dp->i_mount; dp 388 fs/xfs/libxfs/xfs_attr_remote.c error = xfs_bmapi_read(args->dp, (xfs_fileoff_t)lblkno, dp 410 fs/xfs/libxfs/xfs_attr_remote.c error = xfs_attr_rmtval_copyout(mp, bp, args->dp->i_ino, dp 434 fs/xfs/libxfs/xfs_attr_remote.c struct xfs_inode *dp = args->dp; dp 435 fs/xfs/libxfs/xfs_attr_remote.c struct xfs_mount *mp = dp->i_mount; dp 455 fs/xfs/libxfs/xfs_attr_remote.c error = xfs_bmap_first_unused(args->trans, args->dp, blkcnt, &lfileoff, dp 480 fs/xfs/libxfs/xfs_attr_remote.c error = xfs_bmapi_write(args->trans, dp, (xfs_fileoff_t)lblkno, dp 498 fs/xfs/libxfs/xfs_attr_remote.c error = xfs_trans_roll_inode(&args->trans, dp); dp 520 fs/xfs/libxfs/xfs_attr_remote.c error = xfs_bmapi_read(dp, (xfs_fileoff_t)lblkno, dp 537 fs/xfs/libxfs/xfs_attr_remote.c xfs_attr_rmtval_copyin(mp, bp, args->dp->i_ino, &offset, dp 562 fs/xfs/libxfs/xfs_attr_remote.c struct xfs_mount *mp = args->dp->i_mount; dp 586 fs/xfs/libxfs/xfs_attr_remote.c error = xfs_bmapi_read(args->dp, (xfs_fileoff_t)lblkno, dp 618 fs/xfs/libxfs/xfs_attr_remote.c error = xfs_bunmapi(args->trans, args->dp, lblkno, blkcnt, dp 629 fs/xfs/libxfs/xfs_attr_remote.c error = xfs_trans_roll_inode(&args->trans, args->dp); dp 38 fs/xfs/libxfs/xfs_attr_sf.h #define XFS_ATTR_SF_TOTSIZE(dp) /* total space in use */ \ dp 40 fs/xfs/libxfs/xfs_attr_sf.h ((dp)->i_afp->if_u1.if_data))->hdr.totsize)) dp 1004 fs/xfs/libxfs/xfs_bmap.c dargs.dp = ip; dp 281 fs/xfs/libxfs/xfs_da_btree.c struct xfs_inode *dp, dp 289 fs/xfs/libxfs/xfs_da_btree.c err = xfs_da_read_buf(tp, dp, bno, mappedbno, bpp, dp 341 fs/xfs/libxfs/xfs_da_btree.c struct xfs_inode *dp = args->dp; dp 346 fs/xfs/libxfs/xfs_da_btree.c error = xfs_da_get_buf(tp, dp, blkno, -1, &bp, whichfork); dp 359 fs/xfs/libxfs/xfs_da_btree.c hdr3->info.owner = cpu_to_be64(args->dp->i_ino); dp 366 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_to_disk(node, &ichdr); dp 368 fs/xfs/libxfs/xfs_da_btree.c XFS_DA_LOGRANGE(node, &node->hdr, dp->d_ops->node_hdr_size)); dp 550 fs/xfs/libxfs/xfs_da_btree.c struct xfs_inode *dp; dp 569 fs/xfs/libxfs/xfs_da_btree.c dp = args->dp; dp 571 fs/xfs/libxfs/xfs_da_btree.c error = xfs_da_get_buf(tp, dp, blkno, -1, &bp, args->whichfork); dp 580 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_from_disk(&icnodehdr, oldroot); dp 581 fs/xfs/libxfs/xfs_da_btree.c btree = dp->d_ops->node_tree_p(oldroot); dp 595 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->leaf_hdr_from_disk(&leafhdr, leaf); dp 596 fs/xfs/libxfs/xfs_da_btree.c ents = dp->d_ops->leaf_ents_p(leaf); dp 640 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_from_disk(&nodehdr, node); dp 641 fs/xfs/libxfs/xfs_da_btree.c btree = dp->d_ops->node_tree_p(node); dp 647 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_to_disk(node, &nodehdr); dp 684 fs/xfs/libxfs/xfs_da_btree.c struct xfs_inode *dp = state->args->dp; dp 689 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_from_disk(&nodehdr, node); dp 736 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_from_disk(&nodehdr, node); dp 785 fs/xfs/libxfs/xfs_da_btree.c struct xfs_inode *dp = state->args->dp; dp 791 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_from_disk(&nodehdr1, node1); dp 792 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_from_disk(&nodehdr2, node2); dp 793 fs/xfs/libxfs/xfs_da_btree.c btree1 = dp->d_ops->node_tree_p(node1); dp 794 fs/xfs/libxfs/xfs_da_btree.c btree2 = dp->d_ops->node_tree_p(node2); dp 807 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_from_disk(&nodehdr1, node1); dp 808 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_from_disk(&nodehdr2, node2); dp 809 fs/xfs/libxfs/xfs_da_btree.c btree1 = dp->d_ops->node_tree_p(node1); dp 810 fs/xfs/libxfs/xfs_da_btree.c btree2 = dp->d_ops->node_tree_p(node2); dp 872 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_to_disk(node1, &nodehdr1); dp 874 fs/xfs/libxfs/xfs_da_btree.c XFS_DA_LOGRANGE(node1, &node1->hdr, dp->d_ops->node_hdr_size)); dp 876 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_to_disk(node2, &nodehdr2); dp 879 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_size + dp 889 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_from_disk(&nodehdr1, node1); dp 890 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_from_disk(&nodehdr2, node2); dp 891 fs/xfs/libxfs/xfs_da_btree.c btree1 = dp->d_ops->node_tree_p(node1); dp 892 fs/xfs/libxfs/xfs_da_btree.c btree2 = dp->d_ops->node_tree_p(node2); dp 919 fs/xfs/libxfs/xfs_da_btree.c struct xfs_inode *dp = state->args->dp; dp 924 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_from_disk(&nodehdr, node); dp 925 fs/xfs/libxfs/xfs_da_btree.c btree = dp->d_ops->node_tree_p(node); dp 948 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_to_disk(node, &nodehdr); dp 950 fs/xfs/libxfs/xfs_da_btree.c XFS_DA_LOGRANGE(node, &node->hdr, dp->d_ops->node_hdr_size)); dp 1087 fs/xfs/libxfs/xfs_da_btree.c struct xfs_inode *dp = state->args->dp; dp 1095 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_from_disk(&oldroothdr, oldroot); dp 1109 fs/xfs/libxfs/xfs_da_btree.c btree = dp->d_ops->node_tree_p(oldroot); dp 1112 fs/xfs/libxfs/xfs_da_btree.c error = xfs_da3_node_read(args->trans, dp, child, -1, &bp, dp 1163 fs/xfs/libxfs/xfs_da_btree.c struct xfs_inode *dp = state->args->dp; dp 1175 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_from_disk(&nodehdr, node); dp 1227 fs/xfs/libxfs/xfs_da_btree.c error = xfs_da3_node_read(state->args->trans, dp, dp 1233 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_from_disk(&thdr, node); dp 1271 fs/xfs/libxfs/xfs_da_btree.c struct xfs_inode *dp, dp 1280 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_from_disk(&nodehdr, node); dp 1285 fs/xfs/libxfs/xfs_da_btree.c btree = dp->d_ops->node_tree_p(node); dp 1304 fs/xfs/libxfs/xfs_da_btree.c struct xfs_inode *dp = state->args->dp; dp 1317 fs/xfs/libxfs/xfs_da_btree.c lasthash = xfs_dir2_leaf_lasthash(dp, blk->bp, &count); dp 1322 fs/xfs/libxfs/xfs_da_btree.c lasthash = xfs_da3_node_lasthash(dp, blk->bp, &count); dp 1331 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_from_disk(&nodehdr, node); dp 1332 fs/xfs/libxfs/xfs_da_btree.c btree = dp->d_ops->node_tree_p(node); dp 1358 fs/xfs/libxfs/xfs_da_btree.c struct xfs_inode *dp = state->args->dp; dp 1363 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_from_disk(&nodehdr, node); dp 1371 fs/xfs/libxfs/xfs_da_btree.c btree = dp->d_ops->node_tree_p(node); dp 1384 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_to_disk(node, &nodehdr); dp 1386 fs/xfs/libxfs/xfs_da_btree.c XFS_DA_LOGRANGE(node, &node->hdr, dp->d_ops->node_hdr_size)); dp 1413 fs/xfs/libxfs/xfs_da_btree.c struct xfs_inode *dp = state->args->dp; dp 1419 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_from_disk(&drop_hdr, drop_node); dp 1420 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_from_disk(&save_hdr, save_node); dp 1421 fs/xfs/libxfs/xfs_da_btree.c drop_btree = dp->d_ops->node_tree_p(drop_node); dp 1422 fs/xfs/libxfs/xfs_da_btree.c save_btree = dp->d_ops->node_tree_p(save_node); dp 1456 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_to_disk(save_node, &save_hdr); dp 1459 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_size)); dp 1503 fs/xfs/libxfs/xfs_da_btree.c struct xfs_inode *dp = state->args->dp; dp 1519 fs/xfs/libxfs/xfs_da_btree.c error = xfs_da3_node_read(args->trans, args->dp, blkno, dp 1539 fs/xfs/libxfs/xfs_da_btree.c blk->hashval = xfs_dir2_leaf_lasthash(args->dp, dp 1553 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_from_disk(&nodehdr, node); dp 1554 fs/xfs/libxfs/xfs_da_btree.c btree = dp->d_ops->node_tree_p(node); dp 1668 fs/xfs/libxfs/xfs_da_btree.c struct xfs_inode *dp, dp 1681 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_from_disk(&node1hdr, node1); dp 1682 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_from_disk(&node2hdr, node2); dp 1683 fs/xfs/libxfs/xfs_da_btree.c btree1 = dp->d_ops->node_tree_p(node1); dp 1684 fs/xfs/libxfs/xfs_da_btree.c btree2 = dp->d_ops->node_tree_p(node2); dp 1711 fs/xfs/libxfs/xfs_da_btree.c struct xfs_inode *dp = state->args->dp; dp 1729 fs/xfs/libxfs/xfs_da_btree.c before = xfs_dir2_leafn_order(dp, old_blk->bp, new_blk->bp); dp 1732 fs/xfs/libxfs/xfs_da_btree.c before = xfs_da3_node_order(dp, old_blk->bp, new_blk->bp); dp 1747 fs/xfs/libxfs/xfs_da_btree.c error = xfs_da3_node_read(args->trans, dp, dp 1768 fs/xfs/libxfs/xfs_da_btree.c error = xfs_da3_node_read(args->trans, dp, dp 1827 fs/xfs/libxfs/xfs_da_btree.c error = xfs_da3_node_read(args->trans, args->dp, dp 1844 fs/xfs/libxfs/xfs_da_btree.c error = xfs_da3_node_read(args->trans, args->dp, dp 1889 fs/xfs/libxfs/xfs_da_btree.c struct xfs_inode *dp = state->args->dp; dp 1905 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_from_disk(&nodehdr, node); dp 1906 fs/xfs/libxfs/xfs_da_btree.c btree = dp->d_ops->node_tree_p(node); dp 1932 fs/xfs/libxfs/xfs_da_btree.c error = xfs_da3_node_read(args->trans, dp, blkno, -1, &bp, dp 1966 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_from_disk(&nodehdr, node); dp 1967 fs/xfs/libxfs/xfs_da_btree.c btree = dp->d_ops->node_tree_p(node); dp 1987 fs/xfs/libxfs/xfs_da_btree.c blk->hashval = xfs_dir2_leaf_lasthash(args->dp, dp 2066 fs/xfs/libxfs/xfs_da_btree.c struct xfs_inode *dp = args->dp; dp 2068 fs/xfs/libxfs/xfs_da_btree.c xfs_rfsblock_t nblks = dp->i_d.di_nblocks; dp 2075 fs/xfs/libxfs/xfs_da_btree.c error = xfs_bmap_first_unused(tp, dp, count, bno, w); dp 2083 fs/xfs/libxfs/xfs_da_btree.c error = xfs_bmapi_write(tp, dp, *bno, count, dp 2105 fs/xfs/libxfs/xfs_da_btree.c error = xfs_bmapi_write(tp, dp, b, c, dp 2134 fs/xfs/libxfs/xfs_da_btree.c args->total -= dp->i_d.di_nblocks - nblks; dp 2184 fs/xfs/libxfs/xfs_da_btree.c struct xfs_inode *dp; dp 2208 fs/xfs/libxfs/xfs_da_btree.c dp = args->dp; dp 2211 fs/xfs/libxfs/xfs_da_btree.c mp = dp->i_mount; dp 2213 fs/xfs/libxfs/xfs_da_btree.c error = xfs_bmap_last_before(tp, dp, &lastoff, w); dp 2225 fs/xfs/libxfs/xfs_da_btree.c error = xfs_da3_node_read(tp, dp, last_blkno, -1, &last_buf, w); dp 2243 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->leaf_hdr_from_disk(&leafhdr, dead_leaf2); dp 2244 fs/xfs/libxfs/xfs_da_btree.c ents = dp->d_ops->leaf_ents_p(dead_leaf2); dp 2251 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_from_disk(&deadhdr, dead_node); dp 2252 fs/xfs/libxfs/xfs_da_btree.c btree = dp->d_ops->node_tree_p(dead_node); dp 2261 fs/xfs/libxfs/xfs_da_btree.c error = xfs_da3_node_read(tp, dp, sib_blkno, -1, &sib_buf, w); dp 2283 fs/xfs/libxfs/xfs_da_btree.c error = xfs_da3_node_read(tp, dp, sib_blkno, -1, &sib_buf, w); dp 2307 fs/xfs/libxfs/xfs_da_btree.c error = xfs_da3_node_read(tp, dp, par_blkno, -1, &par_buf, w); dp 2311 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_from_disk(&par_hdr, par_node); dp 2319 fs/xfs/libxfs/xfs_da_btree.c btree = dp->d_ops->node_tree_p(par_node); dp 2358 fs/xfs/libxfs/xfs_da_btree.c error = xfs_da3_node_read(tp, dp, par_blkno, -1, &par_buf, w); dp 2362 fs/xfs/libxfs/xfs_da_btree.c dp->d_ops->node_hdr_from_disk(&par_hdr, par_node); dp 2369 fs/xfs/libxfs/xfs_da_btree.c btree = dp->d_ops->node_tree_p(par_node); dp 2400 fs/xfs/libxfs/xfs_da_btree.c struct xfs_inode *dp; dp 2406 fs/xfs/libxfs/xfs_da_btree.c dp = args->dp; dp 2415 fs/xfs/libxfs/xfs_da_btree.c error = xfs_bunmapi(tp, dp, dead_blkno, count, dp 2510 fs/xfs/libxfs/xfs_da_btree.c struct xfs_inode *dp, dp 2517 fs/xfs/libxfs/xfs_da_btree.c struct xfs_mount *mp = dp->i_mount; dp 2545 fs/xfs/libxfs/xfs_da_btree.c error = xfs_bmapi_read(dp, (xfs_fileoff_t)bno, nfsb, irecs, dp 2564 fs/xfs/libxfs/xfs_da_btree.c (long long)dp->i_ino); dp 2593 fs/xfs/libxfs/xfs_da_btree.c struct xfs_inode *dp, dp 2608 fs/xfs/libxfs/xfs_da_btree.c error = xfs_dabuf_map(dp, bno, mappedbno, whichfork, dp 2617 fs/xfs/libxfs/xfs_da_btree.c bp = xfs_trans_get_buf_map(trans, dp->i_mount->m_ddev_targp, dp 2641 fs/xfs/libxfs/xfs_da_btree.c struct xfs_inode *dp, dp 2657 fs/xfs/libxfs/xfs_da_btree.c error = xfs_dabuf_map(dp, bno, mappedbno, whichfork, dp 2666 fs/xfs/libxfs/xfs_da_btree.c error = xfs_trans_read_buf_map(dp->i_mount, trans, dp 2667 fs/xfs/libxfs/xfs_da_btree.c dp->i_mount->m_ddev_targp, dp 2689 fs/xfs/libxfs/xfs_da_btree.c struct xfs_inode *dp, dp 2702 fs/xfs/libxfs/xfs_da_btree.c error = xfs_dabuf_map(dp, bno, mappedbno, whichfork, dp 2712 fs/xfs/libxfs/xfs_da_btree.c xfs_buf_readahead_map(dp->i_mount->m_ddev_targp, mapp, nmap, ops); dp 58 fs/xfs/libxfs/xfs_da_btree.h struct xfs_inode *dp; /* directory inode to manipulate */ dp 174 fs/xfs/libxfs/xfs_da_btree.h int xfs_da3_node_read(struct xfs_trans *tp, struct xfs_inode *dp, dp 184 fs/xfs/libxfs/xfs_da_btree.h int xfs_da_get_buf(struct xfs_trans *trans, struct xfs_inode *dp, dp 187 fs/xfs/libxfs/xfs_da_btree.h int xfs_da_read_buf(struct xfs_trans *trans, struct xfs_inode *dp, dp 191 fs/xfs/libxfs/xfs_da_btree.h int xfs_da_reada_buf(struct xfs_inode *dp, xfs_dablk_t bno, dp 863 fs/xfs/libxfs/xfs_da_format.c struct xfs_inode *dp) dp 865 fs/xfs/libxfs/xfs_da_format.c if (dp) dp 866 fs/xfs/libxfs/xfs_da_format.c return dp->d_ops; dp 879 fs/xfs/libxfs/xfs_da_format.c struct xfs_inode *dp) dp 881 fs/xfs/libxfs/xfs_da_format.c if (dp) dp 882 fs/xfs/libxfs/xfs_da_format.c return dp->d_ops; dp 171 fs/xfs/libxfs/xfs_dir2.c xfs_inode_t *dp) dp 175 fs/xfs/libxfs/xfs_dir2.c ASSERT(S_ISDIR(VFS_I(dp)->i_mode)); dp 176 fs/xfs/libxfs/xfs_dir2.c if (dp->i_d.di_size == 0) /* might happen during shutdown. */ dp 178 fs/xfs/libxfs/xfs_dir2.c if (dp->i_d.di_size > XFS_IFORK_DSIZE(dp)) dp 180 fs/xfs/libxfs/xfs_dir2.c sfp = (xfs_dir2_sf_hdr_t *)dp->i_df.if_u1.if_data; dp 209 fs/xfs/libxfs/xfs_dir2.c xfs_inode_t *dp, dp 215 fs/xfs/libxfs/xfs_dir2.c ASSERT(S_ISDIR(VFS_I(dp)->i_mode)); dp 224 fs/xfs/libxfs/xfs_dir2.c args->geo = dp->i_mount->m_dir_geo; dp 225 fs/xfs/libxfs/xfs_dir2.c args->dp = dp; dp 239 fs/xfs/libxfs/xfs_dir2.c struct xfs_inode *dp, dp 248 fs/xfs/libxfs/xfs_dir2.c ASSERT(S_ISDIR(VFS_I(dp)->i_mode)); dp 254 fs/xfs/libxfs/xfs_dir2.c XFS_STATS_INC(dp->i_mount, xs_dir_create); dp 261 fs/xfs/libxfs/xfs_dir2.c args->geo = dp->i_mount->m_dir_geo; dp 265 fs/xfs/libxfs/xfs_dir2.c args->hashval = dp->i_mount->m_dirnameops->hashname(name); dp 267 fs/xfs/libxfs/xfs_dir2.c args->dp = dp; dp 275 fs/xfs/libxfs/xfs_dir2.c if (dp->i_d.di_format == XFS_DINODE_FMT_LOCAL) { dp 335 fs/xfs/libxfs/xfs_dir2.c xfs_inode_t *dp, dp 345 fs/xfs/libxfs/xfs_dir2.c ASSERT(S_ISDIR(VFS_I(dp)->i_mode)); dp 346 fs/xfs/libxfs/xfs_dir2.c XFS_STATS_INC(dp->i_mount, xs_dir_lookup); dp 357 fs/xfs/libxfs/xfs_dir2.c args->geo = dp->i_mount->m_dir_geo; dp 361 fs/xfs/libxfs/xfs_dir2.c args->hashval = dp->i_mount->m_dirnameops->hashname(name); dp 362 fs/xfs/libxfs/xfs_dir2.c args->dp = dp; dp 369 fs/xfs/libxfs/xfs_dir2.c lock_mode = xfs_ilock_data_map_shared(dp); dp 370 fs/xfs/libxfs/xfs_dir2.c if (dp->i_d.di_format == XFS_DINODE_FMT_LOCAL) { dp 402 fs/xfs/libxfs/xfs_dir2.c xfs_iunlock(dp, lock_mode); dp 413 fs/xfs/libxfs/xfs_dir2.c struct xfs_inode *dp, dp 422 fs/xfs/libxfs/xfs_dir2.c ASSERT(S_ISDIR(VFS_I(dp)->i_mode)); dp 423 fs/xfs/libxfs/xfs_dir2.c XFS_STATS_INC(dp->i_mount, xs_dir_remove); dp 429 fs/xfs/libxfs/xfs_dir2.c args->geo = dp->i_mount->m_dir_geo; dp 433 fs/xfs/libxfs/xfs_dir2.c args->hashval = dp->i_mount->m_dirnameops->hashname(name); dp 435 fs/xfs/libxfs/xfs_dir2.c args->dp = dp; dp 440 fs/xfs/libxfs/xfs_dir2.c if (dp->i_d.di_format == XFS_DINODE_FMT_LOCAL) { dp 471 fs/xfs/libxfs/xfs_dir2.c struct xfs_inode *dp, dp 480 fs/xfs/libxfs/xfs_dir2.c ASSERT(S_ISDIR(VFS_I(dp)->i_mode)); dp 490 fs/xfs/libxfs/xfs_dir2.c args->geo = dp->i_mount->m_dir_geo; dp 494 fs/xfs/libxfs/xfs_dir2.c args->hashval = dp->i_mount->m_dirnameops->hashname(name); dp 496 fs/xfs/libxfs/xfs_dir2.c args->dp = dp; dp 501 fs/xfs/libxfs/xfs_dir2.c if (dp->i_d.di_format == XFS_DINODE_FMT_LOCAL) { dp 532 fs/xfs/libxfs/xfs_dir2.c xfs_inode_t *dp, dp 535 fs/xfs/libxfs/xfs_dir2.c return xfs_dir_createname(tp, dp, name, 0, 0); dp 554 fs/xfs/libxfs/xfs_dir2.c struct xfs_inode *dp = args->dp; dp 555 fs/xfs/libxfs/xfs_dir2.c struct xfs_mount *mp = dp->i_mount; dp 581 fs/xfs/libxfs/xfs_dir2.c if (size > dp->i_d.di_size) { dp 582 fs/xfs/libxfs/xfs_dir2.c dp->i_d.di_size = size; dp 583 fs/xfs/libxfs/xfs_dir2.c xfs_trans_log_inode(args->trans, dp, XFS_ILOG_CORE); dp 600 fs/xfs/libxfs/xfs_dir2.c if ((rval = xfs_bmap_last_offset(args->dp, &last, XFS_DATA_FORK))) dp 602 fs/xfs/libxfs/xfs_dir2.c rval = XFS_FSB_TO_B(args->dp->i_mount, last) == args->geo->blksize; dp 603 fs/xfs/libxfs/xfs_dir2.c if (rval != 0 && args->dp->i_d.di_size != args->geo->blksize) dp 620 fs/xfs/libxfs/xfs_dir2.c if ((rval = xfs_bmap_last_offset(args->dp, &last, XFS_DATA_FORK))) dp 640 fs/xfs/libxfs/xfs_dir2.c struct xfs_inode *dp; dp 647 fs/xfs/libxfs/xfs_dir2.c dp = args->dp; dp 648 fs/xfs/libxfs/xfs_dir2.c mp = dp->i_mount; dp 653 fs/xfs/libxfs/xfs_dir2.c error = xfs_bunmapi(tp, dp, da, args->geo->fsbcount, 0, 0, &done); dp 682 fs/xfs/libxfs/xfs_dir2.c if (dp->i_d.di_size > xfs_dir2_db_off_to_byte(args->geo, db + 1, 0)) dp 685 fs/xfs/libxfs/xfs_dir2.c if ((error = xfs_bmap_last_before(tp, dp, &bno, XFS_DATA_FORK))) { dp 698 fs/xfs/libxfs/xfs_dir2.c dp->i_d.di_size = XFS_FSB_TO_B(mp, bno); dp 699 fs/xfs/libxfs/xfs_dir2.c xfs_trans_log_inode(tp, dp, XFS_ILOG_CORE); dp 104 fs/xfs/libxfs/xfs_dir2.h xfs_dir_get_ops(struct xfs_mount *mp, struct xfs_inode *dp); dp 106 fs/xfs/libxfs/xfs_dir2.h xfs_nondir_get_ops(struct xfs_mount *mp, struct xfs_inode *dp); dp 115 fs/xfs/libxfs/xfs_dir2.h extern int xfs_dir_isempty(struct xfs_inode *dp); dp 116 fs/xfs/libxfs/xfs_dir2.h extern int xfs_dir_init(struct xfs_trans *tp, struct xfs_inode *dp, dp 118 fs/xfs/libxfs/xfs_dir2.h extern int xfs_dir_createname(struct xfs_trans *tp, struct xfs_inode *dp, dp 121 fs/xfs/libxfs/xfs_dir2.h extern int xfs_dir_lookup(struct xfs_trans *tp, struct xfs_inode *dp, dp 124 fs/xfs/libxfs/xfs_dir2.h extern int xfs_dir_removename(struct xfs_trans *tp, struct xfs_inode *dp, dp 127 fs/xfs/libxfs/xfs_dir2.h extern int xfs_dir_replace(struct xfs_trans *tp, struct xfs_inode *dp, dp 130 fs/xfs/libxfs/xfs_dir2.h extern int xfs_dir_canenter(struct xfs_trans *tp, struct xfs_inode *dp, dp 149 fs/xfs/libxfs/xfs_dir2.h extern void xfs_dir2_data_freescan(struct xfs_inode *dp, dp 187 fs/xfs/libxfs/xfs_dir2.h xfs_dir2_dataptr_to_byte(xfs_dir2_dataptr_t dp) dp 189 fs/xfs/libxfs/xfs_dir2.h return (xfs_dir2_off_t)dp << XFS_DIR2_DATA_ALIGN_LOG; dp 214 fs/xfs/libxfs/xfs_dir2.h xfs_dir2_dataptr_to_db(struct xfs_da_geometry *geo, xfs_dir2_dataptr_t dp) dp 216 fs/xfs/libxfs/xfs_dir2.h return xfs_dir2_byte_to_db(geo, xfs_dir2_dataptr_to_byte(dp)); dp 232 fs/xfs/libxfs/xfs_dir2.h xfs_dir2_dataptr_to_off(struct xfs_da_geometry *geo, xfs_dir2_dataptr_t dp) dp 234 fs/xfs/libxfs/xfs_dir2.h return xfs_dir2_byte_to_off(geo, xfs_dir2_dataptr_to_byte(dp)); dp 120 fs/xfs/libxfs/xfs_dir2_block.c struct xfs_inode *dp, dp 123 fs/xfs/libxfs/xfs_dir2_block.c struct xfs_mount *mp = dp->i_mount; dp 126 fs/xfs/libxfs/xfs_dir2_block.c err = xfs_da_read_buf(tp, dp, mp->m_dir_geo->datablk, -1, bpp, dp 138 fs/xfs/libxfs/xfs_dir2_block.c struct xfs_inode *dp) dp 149 fs/xfs/libxfs/xfs_dir2_block.c hdr3->owner = cpu_to_be64(dp->i_ino); dp 159 fs/xfs/libxfs/xfs_dir2_block.c struct xfs_inode *dp, dp 175 fs/xfs/libxfs/xfs_dir2_block.c bf = dp->d_ops->data_bestfree_p(hdr); dp 314 fs/xfs/libxfs/xfs_dir2_block.c xfs_dir2_data_freescan(args->dp, hdr, needlog); dp 330 fs/xfs/libxfs/xfs_dir2_block.c xfs_inode_t *dp; /* directory inode */ dp 350 fs/xfs/libxfs/xfs_dir2_block.c dp = args->dp; dp 354 fs/xfs/libxfs/xfs_dir2_block.c error = xfs_dir3_block_read(tp, dp, &bp); dp 358 fs/xfs/libxfs/xfs_dir2_block.c len = dp->d_ops->data_entsize(args->namelen); dp 371 fs/xfs/libxfs/xfs_dir2_block.c xfs_dir2_block_need_space(dp, hdr, btp, blp, &tagp, &dup, dp 461 fs/xfs/libxfs/xfs_dir2_block.c xfs_dir2_data_freescan(dp, hdr, &needlog); dp 544 fs/xfs/libxfs/xfs_dir2_block.c dp->d_ops->data_put_ftype(dep, args->filetype); dp 545 fs/xfs/libxfs/xfs_dir2_block.c tagp = dp->d_ops->data_entry_tag_p(dep); dp 551 fs/xfs/libxfs/xfs_dir2_block.c xfs_dir2_data_freescan(dp, hdr, &needlog); dp 556 fs/xfs/libxfs/xfs_dir2_block.c xfs_dir3_data_check(dp, bp); dp 609 fs/xfs/libxfs/xfs_dir2_block.c xfs_inode_t *dp; /* incore inode */ dp 621 fs/xfs/libxfs/xfs_dir2_block.c dp = args->dp; dp 623 fs/xfs/libxfs/xfs_dir2_block.c xfs_dir3_data_check(dp, bp); dp 636 fs/xfs/libxfs/xfs_dir2_block.c args->filetype = dp->d_ops->data_get_ftype(dep); dp 657 fs/xfs/libxfs/xfs_dir2_block.c xfs_inode_t *dp; /* incore inode */ dp 667 fs/xfs/libxfs/xfs_dir2_block.c dp = args->dp; dp 669 fs/xfs/libxfs/xfs_dir2_block.c mp = dp->i_mount; dp 671 fs/xfs/libxfs/xfs_dir2_block.c error = xfs_dir3_block_read(tp, dp, &bp); dp 676 fs/xfs/libxfs/xfs_dir2_block.c xfs_dir3_data_check(dp, bp); dp 759 fs/xfs/libxfs/xfs_dir2_block.c xfs_inode_t *dp; /* incore inode */ dp 777 fs/xfs/libxfs/xfs_dir2_block.c dp = args->dp; dp 794 fs/xfs/libxfs/xfs_dir2_block.c dp->d_ops->data_entsize(dep->namelen), &needlog, &needscan); dp 809 fs/xfs/libxfs/xfs_dir2_block.c xfs_dir2_data_freescan(dp, hdr, &needlog); dp 812 fs/xfs/libxfs/xfs_dir2_block.c xfs_dir3_data_check(dp, bp); dp 816 fs/xfs/libxfs/xfs_dir2_block.c size = xfs_dir2_block_sfsize(dp, hdr, &sfh); dp 817 fs/xfs/libxfs/xfs_dir2_block.c if (size > XFS_IFORK_DSIZE(dp)) dp 839 fs/xfs/libxfs/xfs_dir2_block.c xfs_inode_t *dp; /* incore inode */ dp 852 fs/xfs/libxfs/xfs_dir2_block.c dp = args->dp; dp 867 fs/xfs/libxfs/xfs_dir2_block.c dp->d_ops->data_put_ftype(dep, args->filetype); dp 869 fs/xfs/libxfs/xfs_dir2_block.c xfs_dir3_data_check(dp, bp); dp 902 fs/xfs/libxfs/xfs_dir2_block.c xfs_inode_t *dp; /* incore directory inode */ dp 922 fs/xfs/libxfs/xfs_dir2_block.c dp = args->dp; dp 924 fs/xfs/libxfs/xfs_dir2_block.c mp = dp->i_mount; dp 926 fs/xfs/libxfs/xfs_dir2_block.c dp->d_ops->leaf_hdr_from_disk(&leafhdr, leaf); dp 927 fs/xfs/libxfs/xfs_dir2_block.c ents = dp->d_ops->leaf_ents_p(leaf); dp 938 fs/xfs/libxfs/xfs_dir2_block.c while (dp->i_d.di_size > args->geo->blksize) { dp 941 fs/xfs/libxfs/xfs_dir2_block.c hdrsz = dp->d_ops->data_entry_offset; dp 956 fs/xfs/libxfs/xfs_dir2_block.c error = xfs_dir3_data_read(tp, dp, args->geo->datablk, -1, &dbp); dp 984 fs/xfs/libxfs/xfs_dir2_block.c xfs_dir3_block_init(mp, tp, dbp, dp); dp 1017 fs/xfs/libxfs/xfs_dir2_block.c xfs_dir2_data_freescan(dp, hdr, &needlog); dp 1030 fs/xfs/libxfs/xfs_dir2_block.c size = xfs_dir2_block_sfsize(dp, hdr, &sfh); dp 1031 fs/xfs/libxfs/xfs_dir2_block.c if (size > XFS_IFORK_DSIZE(dp)) dp 1050 fs/xfs/libxfs/xfs_dir2_block.c xfs_inode_t *dp; /* incore directory inode */ dp 1071 fs/xfs/libxfs/xfs_dir2_block.c dp = args->dp; dp 1073 fs/xfs/libxfs/xfs_dir2_block.c mp = dp->i_mount; dp 1074 fs/xfs/libxfs/xfs_dir2_block.c ifp = XFS_IFORK_PTR(dp, XFS_DATA_FORK); dp 1079 fs/xfs/libxfs/xfs_dir2_block.c if (dp->i_d.di_size < offsetof(xfs_dir2_sf_hdr_t, parent)) { dp 1086 fs/xfs/libxfs/xfs_dir2_block.c ASSERT(ifp->if_bytes == dp->i_d.di_size); dp 1088 fs/xfs/libxfs/xfs_dir2_block.c ASSERT(dp->i_d.di_size >= xfs_dir2_sf_hdr_size(oldsfp->i8count)); dp 1089 fs/xfs/libxfs/xfs_dir2_block.c ASSERT(dp->i_d.di_nextents == 0); dp 1098 fs/xfs/libxfs/xfs_dir2_block.c xfs_idata_realloc(dp, -ifp->if_bytes, XFS_DATA_FORK); dp 1099 fs/xfs/libxfs/xfs_dir2_block.c xfs_bmap_local_to_extents_empty(tp, dp, XFS_DATA_FORK); dp 1100 fs/xfs/libxfs/xfs_dir2_block.c dp->i_d.di_size = 0; dp 1114 fs/xfs/libxfs/xfs_dir2_block.c xfs_dir3_block_init(mp, tp, bp, dp); dp 1126 fs/xfs/libxfs/xfs_dir2_block.c dup = dp->d_ops->data_unused_p(hdr); dp 1152 fs/xfs/libxfs/xfs_dir2_block.c dep = dp->d_ops->data_dot_entry_p(hdr); dp 1153 fs/xfs/libxfs/xfs_dir2_block.c dep->inumber = cpu_to_be64(dp->i_ino); dp 1156 fs/xfs/libxfs/xfs_dir2_block.c dp->d_ops->data_put_ftype(dep, XFS_DIR3_FT_DIR); dp 1157 fs/xfs/libxfs/xfs_dir2_block.c tagp = dp->d_ops->data_entry_tag_p(dep); dp 1166 fs/xfs/libxfs/xfs_dir2_block.c dep = dp->d_ops->data_dotdot_entry_p(hdr); dp 1167 fs/xfs/libxfs/xfs_dir2_block.c dep->inumber = cpu_to_be64(dp->d_ops->sf_get_parent_ino(sfp)); dp 1170 fs/xfs/libxfs/xfs_dir2_block.c dp->d_ops->data_put_ftype(dep, XFS_DIR3_FT_DIR); dp 1171 fs/xfs/libxfs/xfs_dir2_block.c tagp = dp->d_ops->data_entry_tag_p(dep); dp 1177 fs/xfs/libxfs/xfs_dir2_block.c offset = dp->d_ops->data_first_offset; dp 1209 fs/xfs/libxfs/xfs_dir2_block.c dp->d_ops->data_bestfree_p(hdr), dp 1218 fs/xfs/libxfs/xfs_dir2_block.c dep->inumber = cpu_to_be64(dp->d_ops->sf_get_ino(sfp, sfep)); dp 1220 fs/xfs/libxfs/xfs_dir2_block.c dp->d_ops->data_put_ftype(dep, dp->d_ops->sf_get_ftype(sfep)); dp 1222 fs/xfs/libxfs/xfs_dir2_block.c tagp = dp->d_ops->data_entry_tag_p(dep); dp 1235 fs/xfs/libxfs/xfs_dir2_block.c sfep = dp->d_ops->sf_nextentry(sfp, sfep); dp 1250 fs/xfs/libxfs/xfs_dir2_block.c xfs_dir3_data_check(dp, bp); dp 33 fs/xfs/libxfs/xfs_dir2_data.c struct xfs_inode *dp, /* incore inode pointer */ dp 63 fs/xfs/libxfs/xfs_dir2_data.c ops = xfs_dir_get_ops(mp, dp); dp 69 fs/xfs/libxfs/xfs_dir2_data.c if ((dp && !S_ISDIR(VFS_I(dp)->i_mode)) || dp 229 fs/xfs/libxfs/xfs_dir2_data.c struct xfs_inode *dp, dp 234 fs/xfs/libxfs/xfs_dir2_data.c fa = __xfs_dir3_data_check(dp, bp); dp 237 fs/xfs/libxfs/xfs_dir2_data.c xfs_corruption_error(__func__, XFS_ERRLEVEL_LOW, dp->i_mount, dp 355 fs/xfs/libxfs/xfs_dir2_data.c struct xfs_inode *dp, dp 362 fs/xfs/libxfs/xfs_dir2_data.c err = xfs_da_read_buf(tp, dp, bno, mapped_bno, bpp, dp 371 fs/xfs/libxfs/xfs_dir2_data.c struct xfs_inode *dp, dp 375 fs/xfs/libxfs/xfs_dir2_data.c return xfs_da_reada_buf(dp, bno, mapped_bno, dp 620 fs/xfs/libxfs/xfs_dir2_data.c struct xfs_inode *dp, dp 624 fs/xfs/libxfs/xfs_dir2_data.c return xfs_dir2_data_freescan_int(dp->i_mount->m_dir_geo, dp->d_ops, dp 640 fs/xfs/libxfs/xfs_dir2_data.c xfs_inode_t *dp; /* incore directory inode */ dp 649 fs/xfs/libxfs/xfs_dir2_data.c dp = args->dp; dp 650 fs/xfs/libxfs/xfs_dir2_data.c mp = dp->i_mount; dp 655 fs/xfs/libxfs/xfs_dir2_data.c error = xfs_da_get_buf(tp, dp, xfs_dir2_db_to_da(args->geo, blkno), dp 672 fs/xfs/libxfs/xfs_dir2_data.c hdr3->owner = cpu_to_be64(dp->i_ino); dp 678 fs/xfs/libxfs/xfs_dir2_data.c bf = dp->d_ops->data_bestfree_p(hdr); dp 679 fs/xfs/libxfs/xfs_dir2_data.c bf[0].offset = cpu_to_be16(dp->d_ops->data_entry_offset); dp 688 fs/xfs/libxfs/xfs_dir2_data.c dup = dp->d_ops->data_unused_p(hdr); dp 691 fs/xfs/libxfs/xfs_dir2_data.c t = args->geo->blksize - (uint)dp->d_ops->data_entry_offset; dp 721 fs/xfs/libxfs/xfs_dir2_data.c (uint)((char *)(args->dp->d_ops->data_entry_tag_p(dep) + 1) - dp 743 fs/xfs/libxfs/xfs_dir2_data.c args->dp->d_ops->data_entry_offset - 1); dp 811 fs/xfs/libxfs/xfs_dir2_data.c if (offset > args->dp->d_ops->data_entry_offset) { dp 837 fs/xfs/libxfs/xfs_dir2_data.c bf = args->dp->d_ops->data_bestfree_p(hdr); dp 1028 fs/xfs/libxfs/xfs_dir2_data.c bf = args->dp->d_ops->data_bestfree_p(hdr); dp 1146 fs/xfs/libxfs/xfs_dir2_data.c xfs_corruption_error(__func__, XFS_ERRLEVEL_LOW, args->dp->i_mount, dp 40 fs/xfs/libxfs/xfs_dir2_leaf.c struct xfs_inode *dp, dp 46 fs/xfs/libxfs/xfs_dir2_leaf.c dp->d_ops->leaf_hdr_from_disk(&leafhdr, leaf); dp 55 fs/xfs/libxfs/xfs_dir2_leaf.c return xfs_dir3_leaf_check_int(dp->i_mount, dp, &leafhdr, leaf); dp 60 fs/xfs/libxfs/xfs_dir2_leaf.c struct xfs_inode *dp, dp 65 fs/xfs/libxfs/xfs_dir2_leaf.c fa = xfs_dir3_leaf1_check(dp, bp); dp 68 fs/xfs/libxfs/xfs_dir2_leaf.c xfs_corruption_error(__func__, XFS_ERRLEVEL_LOW, dp->i_mount, dp 74 fs/xfs/libxfs/xfs_dir2_leaf.c #define xfs_dir3_leaf_check(dp, bp) dp 80 fs/xfs/libxfs/xfs_dir2_leaf.c struct xfs_inode *dp, dp 96 fs/xfs/libxfs/xfs_dir2_leaf.c ops = xfs_dir_get_ops(mp, dp); dp 217 fs/xfs/libxfs/xfs_dir2_leaf.c struct xfs_inode *dp, dp 224 fs/xfs/libxfs/xfs_dir2_leaf.c err = xfs_da_read_buf(tp, dp, fbno, mappedbno, bpp, dp 234 fs/xfs/libxfs/xfs_dir2_leaf.c struct xfs_inode *dp, dp 241 fs/xfs/libxfs/xfs_dir2_leaf.c err = xfs_da_read_buf(tp, dp, fbno, mappedbno, bpp, dp 303 fs/xfs/libxfs/xfs_dir2_leaf.c struct xfs_inode *dp = args->dp; dp 305 fs/xfs/libxfs/xfs_dir2_leaf.c struct xfs_mount *mp = dp->i_mount; dp 313 fs/xfs/libxfs/xfs_dir2_leaf.c error = xfs_da_get_buf(tp, dp, xfs_dir2_db_to_da(args->geo, bno), dp 318 fs/xfs/libxfs/xfs_dir2_leaf.c xfs_dir3_leaf_init(mp, tp, bp, dp->i_ino, magic); dp 339 fs/xfs/libxfs/xfs_dir2_leaf.c xfs_inode_t *dp; /* incore directory inode */ dp 354 fs/xfs/libxfs/xfs_dir2_leaf.c dp = args->dp; dp 375 fs/xfs/libxfs/xfs_dir2_leaf.c xfs_dir3_data_check(dp, dbp); dp 378 fs/xfs/libxfs/xfs_dir2_leaf.c bf = dp->d_ops->data_bestfree_p(hdr); dp 379 fs/xfs/libxfs/xfs_dir2_leaf.c ents = dp->d_ops->leaf_ents_p(leaf); dp 384 fs/xfs/libxfs/xfs_dir2_leaf.c dp->d_ops->leaf_hdr_from_disk(&leafhdr, leaf); dp 387 fs/xfs/libxfs/xfs_dir2_leaf.c dp->d_ops->leaf_hdr_to_disk(leaf, &leafhdr); dp 418 fs/xfs/libxfs/xfs_dir2_leaf.c xfs_dir2_data_freescan(dp, hdr, &needlog); dp 431 fs/xfs/libxfs/xfs_dir2_leaf.c xfs_dir3_leaf_check(dp, lbp); dp 432 fs/xfs/libxfs/xfs_dir2_leaf.c xfs_dir3_data_check(dp, dbp); dp 572 fs/xfs/libxfs/xfs_dir2_leaf.c struct xfs_inode *dp = args->dp; /* incore directory inode */ dp 597 fs/xfs/libxfs/xfs_dir2_leaf.c error = xfs_dir3_leaf_read(tp, dp, args->geo->leafblk, -1, &lbp); dp 610 fs/xfs/libxfs/xfs_dir2_leaf.c ents = dp->d_ops->leaf_ents_p(leaf); dp 611 fs/xfs/libxfs/xfs_dir2_leaf.c dp->d_ops->leaf_hdr_from_disk(&leafhdr, leaf); dp 613 fs/xfs/libxfs/xfs_dir2_leaf.c length = dp->d_ops->data_entsize(args->namelen); dp 776 fs/xfs/libxfs/xfs_dir2_leaf.c bf = dp->d_ops->data_bestfree_p(hdr); dp 784 fs/xfs/libxfs/xfs_dir2_leaf.c error = xfs_dir3_data_read(tp, dp, dp 792 fs/xfs/libxfs/xfs_dir2_leaf.c bf = dp->d_ops->data_bestfree_p(hdr); dp 818 fs/xfs/libxfs/xfs_dir2_leaf.c dp->d_ops->data_put_ftype(dep, args->filetype); dp 819 fs/xfs/libxfs/xfs_dir2_leaf.c tagp = dp->d_ops->data_entry_tag_p(dep); dp 825 fs/xfs/libxfs/xfs_dir2_leaf.c xfs_dir2_data_freescan(dp, hdr, &needlog); dp 855 fs/xfs/libxfs/xfs_dir2_leaf.c dp->d_ops->leaf_hdr_to_disk(leaf, &leafhdr); dp 858 fs/xfs/libxfs/xfs_dir2_leaf.c xfs_dir3_leaf_check(dp, lbp); dp 859 fs/xfs/libxfs/xfs_dir2_leaf.c xfs_dir3_data_check(dp, dbp); dp 878 fs/xfs/libxfs/xfs_dir2_leaf.c struct xfs_inode *dp = args->dp; dp 887 fs/xfs/libxfs/xfs_dir2_leaf.c ents = dp->d_ops->leaf_ents_p(leaf); dp 908 fs/xfs/libxfs/xfs_dir2_leaf.c dp->d_ops->leaf_hdr_to_disk(leaf, leafhdr); dp 1054 fs/xfs/libxfs/xfs_dir2_leaf.c ents = args->dp->d_ops->leaf_ents_p(leaf); dp 1079 fs/xfs/libxfs/xfs_dir2_leaf.c args->dp->d_ops->leaf_hdr_size - 1); dp 1114 fs/xfs/libxfs/xfs_dir2_leaf.c xfs_inode_t *dp; /* incore directory inode */ dp 1132 fs/xfs/libxfs/xfs_dir2_leaf.c dp = args->dp; dp 1133 fs/xfs/libxfs/xfs_dir2_leaf.c xfs_dir3_leaf_check(dp, lbp); dp 1135 fs/xfs/libxfs/xfs_dir2_leaf.c ents = dp->d_ops->leaf_ents_p(leaf); dp 1151 fs/xfs/libxfs/xfs_dir2_leaf.c args->filetype = dp->d_ops->data_get_ftype(dep); dp 1174 fs/xfs/libxfs/xfs_dir2_leaf.c xfs_inode_t *dp; /* incore directory inode */ dp 1188 fs/xfs/libxfs/xfs_dir2_leaf.c dp = args->dp; dp 1190 fs/xfs/libxfs/xfs_dir2_leaf.c mp = dp->i_mount; dp 1192 fs/xfs/libxfs/xfs_dir2_leaf.c error = xfs_dir3_leaf_read(tp, dp, args->geo->leafblk, -1, &lbp); dp 1198 fs/xfs/libxfs/xfs_dir2_leaf.c xfs_dir3_leaf_check(dp, lbp); dp 1199 fs/xfs/libxfs/xfs_dir2_leaf.c ents = dp->d_ops->leaf_ents_p(leaf); dp 1200 fs/xfs/libxfs/xfs_dir2_leaf.c dp->d_ops->leaf_hdr_from_disk(&leafhdr, leaf); dp 1230 fs/xfs/libxfs/xfs_dir2_leaf.c error = xfs_dir3_data_read(tp, dp, dp 1272 fs/xfs/libxfs/xfs_dir2_leaf.c error = xfs_dir3_data_read(tp, dp, dp 1305 fs/xfs/libxfs/xfs_dir2_leaf.c xfs_inode_t *dp; /* incore directory inode */ dp 1328 fs/xfs/libxfs/xfs_dir2_leaf.c dp = args->dp; dp 1331 fs/xfs/libxfs/xfs_dir2_leaf.c xfs_dir3_data_check(dp, dbp); dp 1332 fs/xfs/libxfs/xfs_dir2_leaf.c bf = dp->d_ops->data_bestfree_p(hdr); dp 1333 fs/xfs/libxfs/xfs_dir2_leaf.c dp->d_ops->leaf_hdr_from_disk(&leafhdr, leaf); dp 1334 fs/xfs/libxfs/xfs_dir2_leaf.c ents = dp->d_ops->leaf_ents_p(leaf); dp 1353 fs/xfs/libxfs/xfs_dir2_leaf.c dp->d_ops->data_entsize(dep->namelen), &needlog, &needscan); dp 1358 fs/xfs/libxfs/xfs_dir2_leaf.c dp->d_ops->leaf_hdr_to_disk(leaf, &leafhdr); dp 1369 fs/xfs/libxfs/xfs_dir2_leaf.c xfs_dir2_data_freescan(dp, hdr, &needlog); dp 1380 fs/xfs/libxfs/xfs_dir2_leaf.c xfs_dir3_data_check(dp, dbp); dp 1385 fs/xfs/libxfs/xfs_dir2_leaf.c args->geo->blksize - dp->d_ops->data_entry_offset) { dp 1396 fs/xfs/libxfs/xfs_dir2_leaf.c xfs_dir3_leaf_check(dp, lbp); dp 1431 fs/xfs/libxfs/xfs_dir2_leaf.c xfs_dir3_leaf_check(dp, lbp); dp 1447 fs/xfs/libxfs/xfs_dir2_leaf.c xfs_inode_t *dp; /* incore directory inode */ dp 1464 fs/xfs/libxfs/xfs_dir2_leaf.c dp = args->dp; dp 1466 fs/xfs/libxfs/xfs_dir2_leaf.c ents = dp->d_ops->leaf_ents_p(leaf); dp 1482 fs/xfs/libxfs/xfs_dir2_leaf.c dp->d_ops->data_put_ftype(dep, args->filetype); dp 1485 fs/xfs/libxfs/xfs_dir2_leaf.c xfs_dir3_leaf_check(dp, lbp); dp 1511 fs/xfs/libxfs/xfs_dir2_leaf.c ents = args->dp->d_ops->leaf_ents_p(leaf); dp 1512 fs/xfs/libxfs/xfs_dir2_leaf.c args->dp->d_ops->leaf_hdr_from_disk(&leafhdr, leaf); dp 1557 fs/xfs/libxfs/xfs_dir2_leaf.c xfs_inode_t *dp; /* incore directory inode */ dp 1563 fs/xfs/libxfs/xfs_dir2_leaf.c dp = args->dp; dp 1568 fs/xfs/libxfs/xfs_dir2_leaf.c error = xfs_dir3_data_read(tp, dp, xfs_dir2_db_to_da(args->geo, db), dp 1579 fs/xfs/libxfs/xfs_dir2_leaf.c struct xfs_dir2_data_free *bf = dp->d_ops->data_bestfree_p(hdr); dp 1584 fs/xfs/libxfs/xfs_dir2_leaf.c args->geo->blksize - dp->d_ops->data_entry_offset); dp 1638 fs/xfs/libxfs/xfs_dir2_leaf.c xfs_inode_t *dp; /* incore directory inode */ dp 1663 fs/xfs/libxfs/xfs_dir2_leaf.c dp = args->dp; dp 1668 fs/xfs/libxfs/xfs_dir2_leaf.c if ((error = xfs_bmap_last_offset(dp, &fo, XFS_DATA_FORK))) { dp 1690 fs/xfs/libxfs/xfs_dir2_leaf.c if ((error = xfs_bmap_last_before(tp, dp, &fo, XFS_DATA_FORK))) { dp 1700 fs/xfs/libxfs/xfs_dir2_leaf.c dp->d_ops->leaf_hdr_from_disk(&leafhdr, leaf); dp 1708 fs/xfs/libxfs/xfs_dir2_leaf.c error = xfs_dir2_free_read(tp, dp, args->geo->freeblk, &fbp); dp 1712 fs/xfs/libxfs/xfs_dir2_leaf.c dp->d_ops->free_hdr_from_disk(&freehdr, free); dp 1746 fs/xfs/libxfs/xfs_dir2_leaf.c memcpy(xfs_dir2_leaf_bests_p(ltp), dp->d_ops->free_bests_p(free), dp 1749 fs/xfs/libxfs/xfs_dir2_leaf.c dp->d_ops->leaf_hdr_to_disk(leaf, &leafhdr); dp 1753 fs/xfs/libxfs/xfs_dir2_leaf.c xfs_dir3_leaf_check(dp, lbp); dp 42 fs/xfs/libxfs/xfs_dir2_node.c struct xfs_inode *dp, dp 48 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->leaf_hdr_from_disk(&leafhdr, leaf); dp 57 fs/xfs/libxfs/xfs_dir2_node.c return xfs_dir3_leaf_check_int(dp->i_mount, dp, &leafhdr, leaf); dp 62 fs/xfs/libxfs/xfs_dir2_node.c struct xfs_inode *dp, dp 67 fs/xfs/libxfs/xfs_dir2_node.c fa = xfs_dir3_leafn_check(dp, bp); dp 70 fs/xfs/libxfs/xfs_dir2_node.c xfs_corruption_error(__func__, XFS_ERRLEVEL_LOW, dp->i_mount, dp 76 fs/xfs/libxfs/xfs_dir2_node.c #define xfs_dir3_leaf_check(dp, bp) dp 158 fs/xfs/libxfs/xfs_dir2_node.c struct xfs_inode *dp, dp 162 fs/xfs/libxfs/xfs_dir2_node.c struct xfs_mount *mp = dp->i_mount; dp 166 fs/xfs/libxfs/xfs_dir2_node.c maxbests = dp->d_ops->free_max_bests(mp->m_dir_geo); dp 195 fs/xfs/libxfs/xfs_dir2_node.c struct xfs_inode *dp, dp 203 fs/xfs/libxfs/xfs_dir2_node.c err = xfs_da_read_buf(tp, dp, fbno, mappedbno, bpp, dp 209 fs/xfs/libxfs/xfs_dir2_node.c fa = xfs_dir3_free_header_check(dp, fbno, *bpp); dp 226 fs/xfs/libxfs/xfs_dir2_node.c struct xfs_inode *dp, dp 230 fs/xfs/libxfs/xfs_dir2_node.c return __xfs_dir3_free_read(tp, dp, fbno, -1, bpp); dp 236 fs/xfs/libxfs/xfs_dir2_node.c struct xfs_inode *dp, dp 240 fs/xfs/libxfs/xfs_dir2_node.c return __xfs_dir3_free_read(tp, dp, fbno, -2, bpp); dp 250 fs/xfs/libxfs/xfs_dir2_node.c struct xfs_inode *dp = args->dp; dp 251 fs/xfs/libxfs/xfs_dir2_node.c struct xfs_mount *mp = dp->i_mount; dp 256 fs/xfs/libxfs/xfs_dir2_node.c error = xfs_da_get_buf(tp, dp, xfs_dir2_db_to_da(args->geo, fbno), dp 277 fs/xfs/libxfs/xfs_dir2_node.c hdr3->hdr.owner = cpu_to_be64(dp->i_ino); dp 281 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->free_hdr_to_disk(bp->b_addr, &hdr); dp 300 fs/xfs/libxfs/xfs_dir2_node.c bests = args->dp->d_ops->free_bests_p(free); dp 325 fs/xfs/libxfs/xfs_dir2_node.c args->dp->d_ops->free_hdr_size - 1); dp 338 fs/xfs/libxfs/xfs_dir2_node.c xfs_inode_t *dp; /* incore directory inode */ dp 355 fs/xfs/libxfs/xfs_dir2_node.c dp = args->dp; dp 372 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->free_hdr_from_disk(&freehdr, free); dp 376 fs/xfs/libxfs/xfs_dir2_node.c (uint)dp->i_d.di_size / args->geo->blksize) dp 384 fs/xfs/libxfs/xfs_dir2_node.c to = dp->d_ops->free_bests_p(free); dp 397 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->free_hdr_to_disk(fbp->b_addr, &freehdr); dp 414 fs/xfs/libxfs/xfs_dir2_node.c xfs_dir3_leaf_check(dp, lbp); dp 429 fs/xfs/libxfs/xfs_dir2_node.c struct xfs_inode *dp = args->dp; dp 441 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->leaf_hdr_from_disk(&leafhdr, leaf); dp 442 fs/xfs/libxfs/xfs_dir2_node.c ents = dp->d_ops->leaf_ents_p(leaf); dp 458 fs/xfs/libxfs/xfs_dir2_node.c if (leafhdr.count == dp->d_ops->leaf_max_ents(args->geo)) { dp 496 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->leaf_hdr_to_disk(leaf, &leafhdr); dp 499 fs/xfs/libxfs/xfs_dir2_node.c xfs_dir3_leaf_check(dp, bp); dp 506 fs/xfs/libxfs/xfs_dir2_node.c struct xfs_inode *dp, dp 512 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->free_hdr_from_disk(&hdr, bp->b_addr); dp 515 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->free_max_bests(dp->i_mount->m_dir_geo)) == 0); dp 520 fs/xfs/libxfs/xfs_dir2_node.c #define xfs_dir2_free_hdr_check(dp, bp, db) dp 529 fs/xfs/libxfs/xfs_dir2_node.c struct xfs_inode *dp, dp 537 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->leaf_hdr_from_disk(&leafhdr, leaf); dp 549 fs/xfs/libxfs/xfs_dir2_node.c ents = dp->d_ops->leaf_ents_p(leaf); dp 567 fs/xfs/libxfs/xfs_dir2_node.c xfs_inode_t *dp; /* incore directory inode */ dp 582 fs/xfs/libxfs/xfs_dir2_node.c dp = args->dp; dp 584 fs/xfs/libxfs/xfs_dir2_node.c mp = dp->i_mount; dp 586 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->leaf_hdr_from_disk(&leafhdr, leaf); dp 587 fs/xfs/libxfs/xfs_dir2_node.c ents = dp->d_ops->leaf_ents_p(leaf); dp 589 fs/xfs/libxfs/xfs_dir2_node.c xfs_dir3_leaf_check(dp, bp); dp 607 fs/xfs/libxfs/xfs_dir2_node.c length = dp->d_ops->data_entsize(args->namelen); dp 640 fs/xfs/libxfs/xfs_dir2_node.c newfdb = dp->d_ops->db_to_fdb(args->geo, newdb); dp 651 fs/xfs/libxfs/xfs_dir2_node.c error = xfs_dir2_free_read(tp, dp, dp 659 fs/xfs/libxfs/xfs_dir2_node.c xfs_dir2_free_hdr_check(dp, curbp, curdb); dp 664 fs/xfs/libxfs/xfs_dir2_node.c fi = dp->d_ops->db_to_fdindex(args->geo, curdb); dp 668 fs/xfs/libxfs/xfs_dir2_node.c bests = dp->d_ops->free_bests_p(free); dp 722 fs/xfs/libxfs/xfs_dir2_node.c xfs_inode_t *dp; /* incore directory inode */ dp 734 fs/xfs/libxfs/xfs_dir2_node.c dp = args->dp; dp 736 fs/xfs/libxfs/xfs_dir2_node.c mp = dp->i_mount; dp 738 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->leaf_hdr_from_disk(&leafhdr, leaf); dp 739 fs/xfs/libxfs/xfs_dir2_node.c ents = dp->d_ops->leaf_ents_p(leaf); dp 741 fs/xfs/libxfs/xfs_dir2_node.c xfs_dir3_leaf_check(dp, bp); dp 795 fs/xfs/libxfs/xfs_dir2_node.c error = xfs_dir3_data_read(tp, dp, dp 802 fs/xfs/libxfs/xfs_dir2_node.c xfs_dir3_data_check(dp, curbp); dp 824 fs/xfs/libxfs/xfs_dir2_node.c args->filetype = dp->d_ops->data_get_ftype(dep); dp 963 fs/xfs/libxfs/xfs_dir2_node.c struct xfs_inode *dp, dp 974 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->leaf_hdr_from_disk(&hdr1, leaf1); dp 975 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->leaf_hdr_from_disk(&hdr2, leaf2); dp 976 fs/xfs/libxfs/xfs_dir2_node.c ents1 = dp->d_ops->leaf_ents_p(leaf1); dp 977 fs/xfs/libxfs/xfs_dir2_node.c ents2 = dp->d_ops->leaf_ents_p(leaf2); dp 1015 fs/xfs/libxfs/xfs_dir2_node.c struct xfs_inode *dp = state->args->dp; dp 1021 fs/xfs/libxfs/xfs_dir2_node.c swap_blocks = xfs_dir2_leafn_order(dp, blk1->bp, blk2->bp); dp 1027 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->leaf_hdr_from_disk(&hdr1, leaf1); dp 1028 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->leaf_hdr_from_disk(&hdr2, leaf2); dp 1029 fs/xfs/libxfs/xfs_dir2_node.c ents1 = dp->d_ops->leaf_ents_p(leaf1); dp 1030 fs/xfs/libxfs/xfs_dir2_node.c ents2 = dp->d_ops->leaf_ents_p(leaf2); dp 1076 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->leaf_hdr_to_disk(leaf1, &hdr1); dp 1077 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->leaf_hdr_to_disk(leaf2, &hdr2); dp 1081 fs/xfs/libxfs/xfs_dir2_node.c xfs_dir3_leaf_check(dp, blk1->bp); dp 1082 fs/xfs/libxfs/xfs_dir2_node.c xfs_dir3_leaf_check(dp, blk2->bp); dp 1106 fs/xfs/libxfs/xfs_dir2_node.c xfs_alert(dp->i_mount, dp 1125 fs/xfs/libxfs/xfs_dir2_node.c struct xfs_inode *dp = args->dp; dp 1127 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->free_hdr_from_disk(&freehdr, free); dp 1128 fs/xfs/libxfs/xfs_dir2_node.c bests = dp->d_ops->free_bests_p(free); dp 1162 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->free_hdr_to_disk(free, &freehdr); dp 1208 fs/xfs/libxfs/xfs_dir2_node.c xfs_inode_t *dp; /* incore directory inode */ dp 1222 fs/xfs/libxfs/xfs_dir2_node.c dp = args->dp; dp 1225 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->leaf_hdr_from_disk(&leafhdr, leaf); dp 1226 fs/xfs/libxfs/xfs_dir2_node.c ents = dp->d_ops->leaf_ents_p(leaf); dp 1246 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->leaf_hdr_to_disk(leaf, &leafhdr); dp 1259 fs/xfs/libxfs/xfs_dir2_node.c bf = dp->d_ops->data_bestfree_p(hdr); dp 1263 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->data_entsize(dep->namelen), &needlog, &needscan); dp 1269 fs/xfs/libxfs/xfs_dir2_node.c xfs_dir2_data_freescan(dp, hdr, &needlog); dp 1272 fs/xfs/libxfs/xfs_dir2_node.c xfs_dir3_data_check(dp, dbp); dp 1288 fs/xfs/libxfs/xfs_dir2_node.c fdb = dp->d_ops->db_to_fdb(args->geo, db); dp 1289 fs/xfs/libxfs/xfs_dir2_node.c error = xfs_dir2_free_read(tp, dp, dp 1298 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->free_hdr_from_disk(&freehdr, free); dp 1299 fs/xfs/libxfs/xfs_dir2_node.c ASSERT(freehdr.firstdb == dp->d_ops->free_max_bests(args->geo) * dp 1307 fs/xfs/libxfs/xfs_dir2_node.c findex = dp->d_ops->db_to_fdindex(args->geo, db); dp 1314 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->data_entry_offset) { dp 1341 fs/xfs/libxfs/xfs_dir2_node.c xfs_dir3_leaf_check(dp, bp); dp 1346 fs/xfs/libxfs/xfs_dir2_node.c *rval = (dp->d_ops->leaf_hdr_size + dp 1364 fs/xfs/libxfs/xfs_dir2_node.c struct xfs_inode *dp; dp 1370 fs/xfs/libxfs/xfs_dir2_node.c dp = args->dp; dp 1405 fs/xfs/libxfs/xfs_dir2_node.c oldblk->hashval = xfs_dir2_leaf_lasthash(dp, oldblk->bp, NULL); dp 1406 fs/xfs/libxfs/xfs_dir2_node.c newblk->hashval = xfs_dir2_leaf_lasthash(dp, newblk->bp, NULL); dp 1407 fs/xfs/libxfs/xfs_dir2_node.c xfs_dir3_leaf_check(dp, oldblk->bp); dp 1408 fs/xfs/libxfs/xfs_dir2_node.c xfs_dir3_leaf_check(dp, newblk->bp); dp 1438 fs/xfs/libxfs/xfs_dir2_node.c struct xfs_inode *dp = state->args->dp; dp 1447 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->leaf_hdr_from_disk(&leafhdr, leaf); dp 1448 fs/xfs/libxfs/xfs_dir2_node.c ents = dp->d_ops->leaf_ents_p(leaf); dp 1449 fs/xfs/libxfs/xfs_dir2_node.c xfs_dir3_leaf_check(dp, blk->bp); dp 1452 fs/xfs/libxfs/xfs_dir2_node.c bytes = dp->d_ops->leaf_hdr_size + count * sizeof(ents[0]); dp 1497 fs/xfs/libxfs/xfs_dir2_node.c error = xfs_dir3_leafn_read(state->args->trans, dp, dp 1510 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->leaf_hdr_from_disk(&hdr2, leaf); dp 1511 fs/xfs/libxfs/xfs_dir2_node.c ents = dp->d_ops->leaf_ents_p(leaf); dp 1565 fs/xfs/libxfs/xfs_dir2_node.c struct xfs_inode *dp = state->args->dp; dp 1573 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->leaf_hdr_from_disk(&savehdr, save_leaf); dp 1574 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->leaf_hdr_from_disk(&drophdr, drop_leaf); dp 1575 fs/xfs/libxfs/xfs_dir2_node.c sents = dp->d_ops->leaf_ents_p(save_leaf); dp 1576 fs/xfs/libxfs/xfs_dir2_node.c dents = dp->d_ops->leaf_ents_p(drop_leaf); dp 1591 fs/xfs/libxfs/xfs_dir2_node.c if (xfs_dir2_leafn_order(dp, save_blk->bp, drop_blk->bp)) dp 1602 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->leaf_hdr_to_disk(save_leaf, &savehdr); dp 1603 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->leaf_hdr_to_disk(drop_leaf, &drophdr); dp 1607 fs/xfs/libxfs/xfs_dir2_node.c xfs_dir3_leaf_check(dp, save_blk->bp); dp 1608 fs/xfs/libxfs/xfs_dir2_node.c xfs_dir3_leaf_check(dp, drop_blk->bp); dp 1624 fs/xfs/libxfs/xfs_dir2_node.c struct xfs_inode *dp = args->dp; dp 1626 fs/xfs/libxfs/xfs_dir2_node.c struct xfs_mount *mp = dp->i_mount; dp 1653 fs/xfs/libxfs/xfs_dir2_node.c fbno = dp->d_ops->db_to_fdb(args->geo, *dbno); dp 1654 fs/xfs/libxfs/xfs_dir2_node.c error = xfs_dir2_free_try_read(tp, dp, dp 1668 fs/xfs/libxfs/xfs_dir2_node.c if (dp->d_ops->db_to_fdb(args->geo, *dbno) != fbno) { dp 1671 fs/xfs/libxfs/xfs_dir2_node.c __func__, (unsigned long long)dp->i_ino, dp 1672 fs/xfs/libxfs/xfs_dir2_node.c (long long)dp->d_ops->db_to_fdb(args->geo, *dbno), dp 1691 fs/xfs/libxfs/xfs_dir2_node.c bests = dp->d_ops->free_bests_p(free); dp 1692 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->free_hdr_from_disk(&freehdr, free); dp 1697 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->free_max_bests(args->geo); dp 1700 fs/xfs/libxfs/xfs_dir2_node.c bests = dp->d_ops->free_bests_p(free); dp 1701 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->free_hdr_from_disk(&freehdr, free); dp 1705 fs/xfs/libxfs/xfs_dir2_node.c *findex = dp->d_ops->db_to_fdindex(args->geo, *dbno); dp 1709 fs/xfs/libxfs/xfs_dir2_node.c ASSERT(*findex < dp->d_ops->free_max_bests(args->geo)); dp 1720 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->free_hdr_to_disk(fbp->b_addr, &freehdr); dp 1726 fs/xfs/libxfs/xfs_dir2_node.c bf = dp->d_ops->data_bestfree_p(hdr); dp 1745 fs/xfs/libxfs/xfs_dir2_node.c struct xfs_inode *dp = args->dp; dp 1769 fs/xfs/libxfs/xfs_dir2_node.c bests = dp->d_ops->free_bests_p(free); dp 1770 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->free_hdr_from_disk(&freehdr, free); dp 1793 fs/xfs/libxfs/xfs_dir2_node.c error = xfs_bmap_last_offset(dp, &fo, XFS_DATA_FORK); dp 1809 fs/xfs/libxfs/xfs_dir2_node.c error = xfs_dir2_free_try_read(tp, dp, dp 1818 fs/xfs/libxfs/xfs_dir2_node.c bests = dp->d_ops->free_bests_p(free); dp 1819 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->free_hdr_from_disk(&freehdr, free); dp 1858 fs/xfs/libxfs/xfs_dir2_node.c struct xfs_inode *dp = args->dp; dp 1872 fs/xfs/libxfs/xfs_dir2_node.c length = dp->d_ops->data_entsize(args->namelen); dp 1899 fs/xfs/libxfs/xfs_dir2_node.c error = xfs_dir3_data_read(tp, dp, dp 1908 fs/xfs/libxfs/xfs_dir2_node.c bf = dp->d_ops->data_bestfree_p(hdr); dp 1929 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->data_put_ftype(dep, args->filetype); dp 1930 fs/xfs/libxfs/xfs_dir2_node.c tagp = dp->d_ops->data_entry_tag_p(dep); dp 1936 fs/xfs/libxfs/xfs_dir2_node.c xfs_dir2_data_freescan(dp, hdr, &needlog); dp 1942 fs/xfs/libxfs/xfs_dir2_node.c bests = dp->d_ops->free_bests_p(free); dp 1977 fs/xfs/libxfs/xfs_dir2_node.c state->mp = args->dp->i_mount; dp 2048 fs/xfs/libxfs/xfs_dir2_node.c state->mp = args->dp->i_mount; dp 2101 fs/xfs/libxfs/xfs_dir2_node.c state->mp = args->dp->i_mount; dp 2170 fs/xfs/libxfs/xfs_dir2_node.c state->mp = args->dp->i_mount; dp 2198 fs/xfs/libxfs/xfs_dir2_node.c ents = args->dp->d_ops->leaf_ents_p(leaf); dp 2216 fs/xfs/libxfs/xfs_dir2_node.c args->dp->d_ops->data_put_ftype(dep, ftype); dp 2249 fs/xfs/libxfs/xfs_dir2_node.c xfs_inode_t *dp; /* incore directory inode */ dp 2255 fs/xfs/libxfs/xfs_dir2_node.c dp = args->dp; dp 2263 fs/xfs/libxfs/xfs_dir2_node.c error = xfs_dir2_free_try_read(tp, dp, fo, &bp); dp 2273 fs/xfs/libxfs/xfs_dir2_node.c dp->d_ops->free_hdr_from_disk(&freehdr, free); dp 19 fs/xfs/libxfs/xfs_dir2_priv.h extern int xfs_dir3_block_read(struct xfs_trans *tp, struct xfs_inode *dp, dp 30 fs/xfs/libxfs/xfs_dir2_priv.h extern void xfs_dir3_data_check(struct xfs_inode *dp, struct xfs_buf *bp); dp 32 fs/xfs/libxfs/xfs_dir2_priv.h #define xfs_dir3_data_check(dp,bp) dp 35 fs/xfs/libxfs/xfs_dir2_priv.h extern xfs_failaddr_t __xfs_dir3_data_check(struct xfs_inode *dp, dp 37 fs/xfs/libxfs/xfs_dir2_priv.h extern int xfs_dir3_data_read(struct xfs_trans *tp, struct xfs_inode *dp, dp 39 fs/xfs/libxfs/xfs_dir2_priv.h extern int xfs_dir3_data_readahead(struct xfs_inode *dp, xfs_dablk_t bno, dp 50 fs/xfs/libxfs/xfs_dir2_priv.h extern int xfs_dir3_leaf_read(struct xfs_trans *tp, struct xfs_inode *dp, dp 52 fs/xfs/libxfs/xfs_dir2_priv.h extern int xfs_dir3_leafn_read(struct xfs_trans *tp, struct xfs_inode *dp, dp 82 fs/xfs/libxfs/xfs_dir2_priv.h struct xfs_inode *dp, struct xfs_dir3_icleaf_hdr *hdr, dp 88 fs/xfs/libxfs/xfs_dir2_priv.h extern xfs_dahash_t xfs_dir2_leaf_lasthash(struct xfs_inode *dp, dp 93 fs/xfs/libxfs/xfs_dir2_priv.h extern int xfs_dir2_leafn_order(struct xfs_inode *dp, struct xfs_buf *leaf1_bp, dp 107 fs/xfs/libxfs/xfs_dir2_priv.h extern int xfs_dir2_free_read(struct xfs_trans *tp, struct xfs_inode *dp, dp 111 fs/xfs/libxfs/xfs_dir2_priv.h extern int xfs_dir2_block_sfsize(struct xfs_inode *dp, dp 123 fs/xfs/libxfs/xfs_dir2_priv.h extern int xfs_readdir(struct xfs_trans *tp, struct xfs_inode *dp, dp 48 fs/xfs/libxfs/xfs_dir2_sf.c xfs_inode_t *dp, /* incore inode pointer */ dp 68 fs/xfs/libxfs/xfs_dir2_sf.c mp = dp->i_mount; dp 120 fs/xfs/libxfs/xfs_dir2_sf.c if (size > XFS_IFORK_DSIZE(dp)) dp 128 fs/xfs/libxfs/xfs_dir2_sf.c dp->d_ops->sf_put_parent_ino(sfhp, parent); dp 145 fs/xfs/libxfs/xfs_dir2_sf.c xfs_inode_t *dp; /* incore directory inode */ dp 158 fs/xfs/libxfs/xfs_dir2_sf.c dp = args->dp; dp 159 fs/xfs/libxfs/xfs_dir2_sf.c mp = dp->i_mount; dp 179 fs/xfs/libxfs/xfs_dir2_sf.c ptr = (char *)dp->d_ops->data_entry_p(hdr); dp 200 fs/xfs/libxfs/xfs_dir2_sf.c ASSERT(be64_to_cpu(dep->inumber) == dp->i_ino); dp 207 fs/xfs/libxfs/xfs_dir2_sf.c dp->d_ops->sf_get_parent_ino(sfp)); dp 217 fs/xfs/libxfs/xfs_dir2_sf.c dp->d_ops->sf_put_ino(sfp, sfep, dp 219 fs/xfs/libxfs/xfs_dir2_sf.c dp->d_ops->sf_put_ftype(sfep, dp 220 fs/xfs/libxfs/xfs_dir2_sf.c dp->d_ops->data_get_ftype(dep)); dp 222 fs/xfs/libxfs/xfs_dir2_sf.c sfep = dp->d_ops->sf_nextentry(sfp, sfep); dp 224 fs/xfs/libxfs/xfs_dir2_sf.c ptr += dp->d_ops->data_entsize(dep->namelen); dp 242 fs/xfs/libxfs/xfs_dir2_sf.c ASSERT(dp->i_df.if_bytes == 0); dp 243 fs/xfs/libxfs/xfs_dir2_sf.c xfs_init_local_fork(dp, XFS_DATA_FORK, dst, size); dp 244 fs/xfs/libxfs/xfs_dir2_sf.c dp->i_d.di_format = XFS_DINODE_FMT_LOCAL; dp 245 fs/xfs/libxfs/xfs_dir2_sf.c dp->i_d.di_size = size; dp 250 fs/xfs/libxfs/xfs_dir2_sf.c xfs_trans_log_inode(args->trans, dp, logflags); dp 265 fs/xfs/libxfs/xfs_dir2_sf.c xfs_inode_t *dp; /* incore directory inode */ dp 278 fs/xfs/libxfs/xfs_dir2_sf.c dp = args->dp; dp 279 fs/xfs/libxfs/xfs_dir2_sf.c ASSERT(dp->i_df.if_flags & XFS_IFINLINE); dp 283 fs/xfs/libxfs/xfs_dir2_sf.c if (dp->i_d.di_size < offsetof(xfs_dir2_sf_hdr_t, parent)) { dp 284 fs/xfs/libxfs/xfs_dir2_sf.c ASSERT(XFS_FORCED_SHUTDOWN(dp->i_mount)); dp 287 fs/xfs/libxfs/xfs_dir2_sf.c ASSERT(dp->i_df.if_bytes == dp->i_d.di_size); dp 288 fs/xfs/libxfs/xfs_dir2_sf.c ASSERT(dp->i_df.if_u1.if_data != NULL); dp 289 fs/xfs/libxfs/xfs_dir2_sf.c sfp = (xfs_dir2_sf_hdr_t *)dp->i_df.if_u1.if_data; dp 290 fs/xfs/libxfs/xfs_dir2_sf.c ASSERT(dp->i_d.di_size >= xfs_dir2_sf_hdr_size(sfp->i8count)); dp 294 fs/xfs/libxfs/xfs_dir2_sf.c incr_isize = dp->d_ops->sf_entsize(sfp, args->namelen); dp 308 fs/xfs/libxfs/xfs_dir2_sf.c new_isize = (int)dp->i_d.di_size + incr_isize; dp 313 fs/xfs/libxfs/xfs_dir2_sf.c if (new_isize > XFS_IFORK_DSIZE(dp) || dp 349 fs/xfs/libxfs/xfs_dir2_sf.c xfs_trans_log_inode(args->trans, dp, XFS_ILOG_CORE | XFS_ILOG_DDATA); dp 368 fs/xfs/libxfs/xfs_dir2_sf.c xfs_inode_t *dp; /* incore directory inode */ dp 371 fs/xfs/libxfs/xfs_dir2_sf.c dp = args->dp; dp 373 fs/xfs/libxfs/xfs_dir2_sf.c sfp = (xfs_dir2_sf_hdr_t *)dp->i_df.if_u1.if_data; dp 378 fs/xfs/libxfs/xfs_dir2_sf.c xfs_idata_realloc(dp, dp->d_ops->sf_entsize(sfp, args->namelen), dp 383 fs/xfs/libxfs/xfs_dir2_sf.c sfp = (xfs_dir2_sf_hdr_t *)dp->i_df.if_u1.if_data; dp 391 fs/xfs/libxfs/xfs_dir2_sf.c dp->d_ops->sf_put_ino(sfp, sfep, args->inumber); dp 392 fs/xfs/libxfs/xfs_dir2_sf.c dp->d_ops->sf_put_ftype(sfep, args->filetype); dp 400 fs/xfs/libxfs/xfs_dir2_sf.c dp->i_d.di_size = new_isize; dp 421 fs/xfs/libxfs/xfs_dir2_sf.c xfs_inode_t *dp; /* incore directory inode */ dp 435 fs/xfs/libxfs/xfs_dir2_sf.c dp = args->dp; dp 437 fs/xfs/libxfs/xfs_dir2_sf.c sfp = (xfs_dir2_sf_hdr_t *)dp->i_df.if_u1.if_data; dp 438 fs/xfs/libxfs/xfs_dir2_sf.c old_isize = (int)dp->i_d.di_size; dp 447 fs/xfs/libxfs/xfs_dir2_sf.c for (offset = dp->d_ops->data_first_offset, dp 449 fs/xfs/libxfs/xfs_dir2_sf.c add_datasize = dp->d_ops->data_entsize(args->namelen), dp 452 fs/xfs/libxfs/xfs_dir2_sf.c offset = new_offset + dp->d_ops->data_entsize(oldsfep->namelen), dp 453 fs/xfs/libxfs/xfs_dir2_sf.c oldsfep = dp->d_ops->sf_nextentry(oldsfp, oldsfep), dp 464 fs/xfs/libxfs/xfs_dir2_sf.c xfs_idata_realloc(dp, -old_isize, XFS_DATA_FORK); dp 465 fs/xfs/libxfs/xfs_dir2_sf.c xfs_idata_realloc(dp, new_isize, XFS_DATA_FORK); dp 469 fs/xfs/libxfs/xfs_dir2_sf.c sfp = (xfs_dir2_sf_hdr_t *)dp->i_df.if_u1.if_data; dp 482 fs/xfs/libxfs/xfs_dir2_sf.c dp->d_ops->sf_put_ino(sfp, sfep, args->inumber); dp 483 fs/xfs/libxfs/xfs_dir2_sf.c dp->d_ops->sf_put_ftype(sfep, args->filetype); dp 491 fs/xfs/libxfs/xfs_dir2_sf.c sfep = dp->d_ops->sf_nextentry(sfp, sfep); dp 495 fs/xfs/libxfs/xfs_dir2_sf.c dp->i_d.di_size = new_isize; dp 513 fs/xfs/libxfs/xfs_dir2_sf.c xfs_inode_t *dp; /* incore directory inode */ dp 522 fs/xfs/libxfs/xfs_dir2_sf.c dp = args->dp; dp 524 fs/xfs/libxfs/xfs_dir2_sf.c sfp = (xfs_dir2_sf_hdr_t *)dp->i_df.if_u1.if_data; dp 525 fs/xfs/libxfs/xfs_dir2_sf.c size = dp->d_ops->data_entsize(args->namelen); dp 526 fs/xfs/libxfs/xfs_dir2_sf.c offset = dp->d_ops->data_first_offset; dp 538 fs/xfs/libxfs/xfs_dir2_sf.c dp->d_ops->data_entsize(sfep->namelen); dp 539 fs/xfs/libxfs/xfs_dir2_sf.c sfep = dp->d_ops->sf_nextentry(sfp, sfep); dp 581 fs/xfs/libxfs/xfs_dir2_sf.c xfs_inode_t *dp; /* incore directory inode */ dp 589 fs/xfs/libxfs/xfs_dir2_sf.c dp = args->dp; dp 591 fs/xfs/libxfs/xfs_dir2_sf.c sfp = (xfs_dir2_sf_hdr_t *)dp->i_df.if_u1.if_data; dp 592 fs/xfs/libxfs/xfs_dir2_sf.c offset = dp->d_ops->data_first_offset; dp 593 fs/xfs/libxfs/xfs_dir2_sf.c ino = dp->d_ops->sf_get_parent_ino(sfp); dp 598 fs/xfs/libxfs/xfs_dir2_sf.c i++, sfep = dp->d_ops->sf_nextentry(sfp, sfep)) { dp 600 fs/xfs/libxfs/xfs_dir2_sf.c ino = dp->d_ops->sf_get_ino(sfp, sfep); dp 604 fs/xfs/libxfs/xfs_dir2_sf.c dp->d_ops->data_entsize(sfep->namelen); dp 605 fs/xfs/libxfs/xfs_dir2_sf.c ASSERT(dp->d_ops->sf_get_ftype(sfep) < XFS_DIR3_FT_MAX); dp 608 fs/xfs/libxfs/xfs_dir2_sf.c ASSERT((char *)sfep - (char *)sfp == dp->i_d.di_size); dp 729 fs/xfs/libxfs/xfs_dir2_sf.c xfs_inode_t *dp; /* incore directory inode */ dp 736 fs/xfs/libxfs/xfs_dir2_sf.c dp = args->dp; dp 738 fs/xfs/libxfs/xfs_dir2_sf.c ASSERT(dp != NULL); dp 739 fs/xfs/libxfs/xfs_dir2_sf.c ASSERT(dp->i_d.di_size == 0); dp 744 fs/xfs/libxfs/xfs_dir2_sf.c if (dp->i_d.di_format == XFS_DINODE_FMT_EXTENTS) { dp 745 fs/xfs/libxfs/xfs_dir2_sf.c dp->i_df.if_flags &= ~XFS_IFEXTENTS; /* just in case */ dp 746 fs/xfs/libxfs/xfs_dir2_sf.c dp->i_d.di_format = XFS_DINODE_FMT_LOCAL; dp 747 fs/xfs/libxfs/xfs_dir2_sf.c xfs_trans_log_inode(args->trans, dp, XFS_ILOG_CORE); dp 748 fs/xfs/libxfs/xfs_dir2_sf.c dp->i_df.if_flags |= XFS_IFINLINE; dp 750 fs/xfs/libxfs/xfs_dir2_sf.c ASSERT(dp->i_df.if_flags & XFS_IFINLINE); dp 751 fs/xfs/libxfs/xfs_dir2_sf.c ASSERT(dp->i_df.if_bytes == 0); dp 757 fs/xfs/libxfs/xfs_dir2_sf.c xfs_idata_realloc(dp, size, XFS_DATA_FORK); dp 761 fs/xfs/libxfs/xfs_dir2_sf.c sfp = (xfs_dir2_sf_hdr_t *)dp->i_df.if_u1.if_data; dp 766 fs/xfs/libxfs/xfs_dir2_sf.c dp->d_ops->sf_put_parent_ino(sfp, pino); dp 768 fs/xfs/libxfs/xfs_dir2_sf.c dp->i_d.di_size = size; dp 770 fs/xfs/libxfs/xfs_dir2_sf.c xfs_trans_log_inode(args->trans, dp, XFS_ILOG_CORE | XFS_ILOG_DDATA); dp 782 fs/xfs/libxfs/xfs_dir2_sf.c xfs_inode_t *dp; /* incore directory inode */ dp 793 fs/xfs/libxfs/xfs_dir2_sf.c dp = args->dp; dp 795 fs/xfs/libxfs/xfs_dir2_sf.c ASSERT(dp->i_df.if_flags & XFS_IFINLINE); dp 799 fs/xfs/libxfs/xfs_dir2_sf.c if (dp->i_d.di_size < offsetof(xfs_dir2_sf_hdr_t, parent)) { dp 800 fs/xfs/libxfs/xfs_dir2_sf.c ASSERT(XFS_FORCED_SHUTDOWN(dp->i_mount)); dp 803 fs/xfs/libxfs/xfs_dir2_sf.c ASSERT(dp->i_df.if_bytes == dp->i_d.di_size); dp 804 fs/xfs/libxfs/xfs_dir2_sf.c ASSERT(dp->i_df.if_u1.if_data != NULL); dp 805 fs/xfs/libxfs/xfs_dir2_sf.c sfp = (xfs_dir2_sf_hdr_t *)dp->i_df.if_u1.if_data; dp 806 fs/xfs/libxfs/xfs_dir2_sf.c ASSERT(dp->i_d.di_size >= xfs_dir2_sf_hdr_size(sfp->i8count)); dp 811 fs/xfs/libxfs/xfs_dir2_sf.c args->inumber = dp->i_ino; dp 821 fs/xfs/libxfs/xfs_dir2_sf.c args->inumber = dp->d_ops->sf_get_parent_ino(sfp); dp 831 fs/xfs/libxfs/xfs_dir2_sf.c i++, sfep = dp->d_ops->sf_nextentry(sfp, sfep)) { dp 837 fs/xfs/libxfs/xfs_dir2_sf.c cmp = dp->i_mount->m_dirnameops->compname(args, sfep->name, dp 841 fs/xfs/libxfs/xfs_dir2_sf.c args->inumber = dp->d_ops->sf_get_ino(sfp, sfep); dp 842 fs/xfs/libxfs/xfs_dir2_sf.c args->filetype = dp->d_ops->sf_get_ftype(sfep); dp 868 fs/xfs/libxfs/xfs_dir2_sf.c xfs_inode_t *dp; /* incore directory inode */ dp 878 fs/xfs/libxfs/xfs_dir2_sf.c dp = args->dp; dp 880 fs/xfs/libxfs/xfs_dir2_sf.c ASSERT(dp->i_df.if_flags & XFS_IFINLINE); dp 881 fs/xfs/libxfs/xfs_dir2_sf.c oldsize = (int)dp->i_d.di_size; dp 886 fs/xfs/libxfs/xfs_dir2_sf.c ASSERT(XFS_FORCED_SHUTDOWN(dp->i_mount)); dp 889 fs/xfs/libxfs/xfs_dir2_sf.c ASSERT(dp->i_df.if_bytes == oldsize); dp 890 fs/xfs/libxfs/xfs_dir2_sf.c ASSERT(dp->i_df.if_u1.if_data != NULL); dp 891 fs/xfs/libxfs/xfs_dir2_sf.c sfp = (xfs_dir2_sf_hdr_t *)dp->i_df.if_u1.if_data; dp 898 fs/xfs/libxfs/xfs_dir2_sf.c i++, sfep = dp->d_ops->sf_nextentry(sfp, sfep)) { dp 901 fs/xfs/libxfs/xfs_dir2_sf.c ASSERT(dp->d_ops->sf_get_ino(sfp, sfep) == dp 915 fs/xfs/libxfs/xfs_dir2_sf.c entsize = dp->d_ops->sf_entsize(sfp, args->namelen); dp 927 fs/xfs/libxfs/xfs_dir2_sf.c dp->i_d.di_size = newsize; dp 931 fs/xfs/libxfs/xfs_dir2_sf.c xfs_idata_realloc(dp, newsize - oldsize, XFS_DATA_FORK); dp 932 fs/xfs/libxfs/xfs_dir2_sf.c sfp = (xfs_dir2_sf_hdr_t *)dp->i_df.if_u1.if_data; dp 943 fs/xfs/libxfs/xfs_dir2_sf.c xfs_trans_log_inode(args->trans, dp, XFS_ILOG_CORE | XFS_ILOG_DDATA); dp 954 fs/xfs/libxfs/xfs_dir2_sf.c xfs_inode_t *dp; /* incore directory inode */ dp 963 fs/xfs/libxfs/xfs_dir2_sf.c dp = args->dp; dp 965 fs/xfs/libxfs/xfs_dir2_sf.c ASSERT(dp->i_df.if_flags & XFS_IFINLINE); dp 969 fs/xfs/libxfs/xfs_dir2_sf.c if (dp->i_d.di_size < offsetof(xfs_dir2_sf_hdr_t, parent)) { dp 970 fs/xfs/libxfs/xfs_dir2_sf.c ASSERT(XFS_FORCED_SHUTDOWN(dp->i_mount)); dp 973 fs/xfs/libxfs/xfs_dir2_sf.c ASSERT(dp->i_df.if_bytes == dp->i_d.di_size); dp 974 fs/xfs/libxfs/xfs_dir2_sf.c ASSERT(dp->i_df.if_u1.if_data != NULL); dp 975 fs/xfs/libxfs/xfs_dir2_sf.c sfp = (xfs_dir2_sf_hdr_t *)dp->i_df.if_u1.if_data; dp 976 fs/xfs/libxfs/xfs_dir2_sf.c ASSERT(dp->i_d.di_size >= xfs_dir2_sf_hdr_size(sfp->i8count)); dp 985 fs/xfs/libxfs/xfs_dir2_sf.c newsize = dp->i_df.if_bytes + (sfp->count + 1) * XFS_INO64_DIFF; dp 989 fs/xfs/libxfs/xfs_dir2_sf.c if (newsize > XFS_IFORK_DSIZE(dp)) { dp 1001 fs/xfs/libxfs/xfs_dir2_sf.c sfp = (xfs_dir2_sf_hdr_t *)dp->i_df.if_u1.if_data; dp 1011 fs/xfs/libxfs/xfs_dir2_sf.c ino = dp->d_ops->sf_get_parent_ino(sfp); dp 1013 fs/xfs/libxfs/xfs_dir2_sf.c dp->d_ops->sf_put_parent_ino(sfp, args->inumber); dp 1020 fs/xfs/libxfs/xfs_dir2_sf.c i++, sfep = dp->d_ops->sf_nextentry(sfp, sfep)) { dp 1023 fs/xfs/libxfs/xfs_dir2_sf.c ino = dp->d_ops->sf_get_ino(sfp, sfep); dp 1025 fs/xfs/libxfs/xfs_dir2_sf.c dp->d_ops->sf_put_ino(sfp, sfep, args->inumber); dp 1026 fs/xfs/libxfs/xfs_dir2_sf.c dp->d_ops->sf_put_ftype(sfep, args->filetype); dp 1067 fs/xfs/libxfs/xfs_dir2_sf.c xfs_trans_log_inode(args->trans, dp, XFS_ILOG_DDATA); dp 1080 fs/xfs/libxfs/xfs_dir2_sf.c xfs_inode_t *dp; /* incore directory inode */ dp 1091 fs/xfs/libxfs/xfs_dir2_sf.c dp = args->dp; dp 1098 fs/xfs/libxfs/xfs_dir2_sf.c oldsize = dp->i_df.if_bytes; dp 1100 fs/xfs/libxfs/xfs_dir2_sf.c oldsfp = (xfs_dir2_sf_hdr_t *)dp->i_df.if_u1.if_data; dp 1107 fs/xfs/libxfs/xfs_dir2_sf.c xfs_idata_realloc(dp, -oldsize, XFS_DATA_FORK); dp 1108 fs/xfs/libxfs/xfs_dir2_sf.c xfs_idata_realloc(dp, newsize, XFS_DATA_FORK); dp 1113 fs/xfs/libxfs/xfs_dir2_sf.c sfp = (xfs_dir2_sf_hdr_t *)dp->i_df.if_u1.if_data; dp 1119 fs/xfs/libxfs/xfs_dir2_sf.c dp->d_ops->sf_put_parent_ino(sfp, dp->d_ops->sf_get_parent_ino(oldsfp)); dp 1126 fs/xfs/libxfs/xfs_dir2_sf.c i++, sfep = dp->d_ops->sf_nextentry(sfp, sfep), dp 1127 fs/xfs/libxfs/xfs_dir2_sf.c oldsfep = dp->d_ops->sf_nextentry(oldsfp, oldsfep)) { dp 1131 fs/xfs/libxfs/xfs_dir2_sf.c dp->d_ops->sf_put_ino(sfp, sfep, dp 1132 fs/xfs/libxfs/xfs_dir2_sf.c dp->d_ops->sf_get_ino(oldsfp, oldsfep)); dp 1133 fs/xfs/libxfs/xfs_dir2_sf.c dp->d_ops->sf_put_ftype(sfep, dp->d_ops->sf_get_ftype(oldsfep)); dp 1139 fs/xfs/libxfs/xfs_dir2_sf.c dp->i_d.di_size = newsize; dp 1140 fs/xfs/libxfs/xfs_dir2_sf.c xfs_trans_log_inode(args->trans, dp, XFS_ILOG_CORE | XFS_ILOG_DDATA); dp 1153 fs/xfs/libxfs/xfs_dir2_sf.c xfs_inode_t *dp; /* incore directory inode */ dp 1164 fs/xfs/libxfs/xfs_dir2_sf.c dp = args->dp; dp 1171 fs/xfs/libxfs/xfs_dir2_sf.c oldsize = dp->i_df.if_bytes; dp 1173 fs/xfs/libxfs/xfs_dir2_sf.c oldsfp = (xfs_dir2_sf_hdr_t *)dp->i_df.if_u1.if_data; dp 1180 fs/xfs/libxfs/xfs_dir2_sf.c xfs_idata_realloc(dp, -oldsize, XFS_DATA_FORK); dp 1181 fs/xfs/libxfs/xfs_dir2_sf.c xfs_idata_realloc(dp, newsize, XFS_DATA_FORK); dp 1186 fs/xfs/libxfs/xfs_dir2_sf.c sfp = (xfs_dir2_sf_hdr_t *)dp->i_df.if_u1.if_data; dp 1192 fs/xfs/libxfs/xfs_dir2_sf.c dp->d_ops->sf_put_parent_ino(sfp, dp->d_ops->sf_get_parent_ino(oldsfp)); dp 1199 fs/xfs/libxfs/xfs_dir2_sf.c i++, sfep = dp->d_ops->sf_nextentry(sfp, sfep), dp 1200 fs/xfs/libxfs/xfs_dir2_sf.c oldsfep = dp->d_ops->sf_nextentry(oldsfp, oldsfep)) { dp 1204 fs/xfs/libxfs/xfs_dir2_sf.c dp->d_ops->sf_put_ino(sfp, sfep, dp 1205 fs/xfs/libxfs/xfs_dir2_sf.c dp->d_ops->sf_get_ino(oldsfp, oldsfep)); dp 1206 fs/xfs/libxfs/xfs_dir2_sf.c dp->d_ops->sf_put_ftype(sfep, dp->d_ops->sf_get_ftype(oldsfep)); dp 1212 fs/xfs/libxfs/xfs_dir2_sf.c dp->i_d.di_size = newsize; dp 1213 fs/xfs/libxfs/xfs_dir2_sf.c xfs_trans_log_inode(args->trans, dp, XFS_ILOG_CORE | XFS_ILOG_DDATA); dp 209 fs/xfs/libxfs/xfs_inode_fork.c struct xfs_bmbt_rec *dp; dp 230 fs/xfs/libxfs/xfs_inode_fork.c dp = (xfs_bmbt_rec_t *) XFS_DFORK_PTR(dip, whichfork); dp 233 fs/xfs/libxfs/xfs_inode_fork.c for (i = 0; i < nex; i++, dp++) { dp 236 fs/xfs/libxfs/xfs_inode_fork.c xfs_bmbt_disk_get_all(dp, &new); dp 241 fs/xfs/libxfs/xfs_inode_fork.c dp, sizeof(*dp), fa); dp 548 fs/xfs/libxfs/xfs_inode_fork.c struct xfs_bmbt_rec *dp, dp 564 fs/xfs/libxfs/xfs_inode_fork.c xfs_bmbt_disk_set_all(dp, &rec); dp 567 fs/xfs/libxfs/xfs_inode_fork.c dp++; dp 127 fs/xfs/scrub/attr.c xchk_ino_set_preen(sx->sc, context->dp->i_ino); dp 155 fs/xfs/scrub/attr.c args.geo = context->dp->i_mount->m_attr_geo; dp 157 fs/xfs/scrub/attr.c args.dp = context->dp; dp 165 fs/xfs/scrub/attr.c error = xfs_attr_get_ilocked(context->dp, &args); dp 494 fs/xfs/scrub/attr.c sx.context.dp = sc->ip; dp 101 fs/xfs/scrub/dabtree.c ents = (char *)ds->dargs.dp->d_ops->leaf_ents_p(baddr); dp 105 fs/xfs/scrub/dabtree.c ents = (char *)ds->dargs.dp->d_ops->leaf_ents_p(baddr); dp 109 fs/xfs/scrub/dabtree.c ents = (char *)ds->dargs.dp->d_ops->node_tree_p(baddr); dp 337 fs/xfs/scrub/dabtree.c struct xfs_inode *ip = ds->dargs.dp; dp 358 fs/xfs/scrub/dabtree.c error = xfs_da_read_buf(dargs->trans, dargs->dp, blk->blkno, -2, dp 493 fs/xfs/scrub/dabtree.c ds.dargs.dp = sc->ip; dp 187 fs/xfs/scrub/dir.c struct xfs_inode *dp = ds->dargs.dp; dp 220 fs/xfs/scrub/dir.c error = xfs_dir3_data_read(ds->dargs.trans, dp, rec_bno, -2, &bp); dp 264 fs/xfs/scrub/dir.c tag = be16_to_cpup(dp->d_ops->data_entry_tag_p(dent)); dp 651 fs/xfs/scrub/dir.c args.dp = sc->ip; dp 121 fs/xfs/scrub/parent.c struct xfs_inode *dp = NULL; dp 157 fs/xfs/scrub/parent.c error = xfs_iget(mp, sc->tp, dnum, XFS_IGET_UNTRUSTED, 0, &dp); dp 165 fs/xfs/scrub/parent.c if (dp == sc->ip || !S_ISDIR(VFS_I(dp)->i_mode)) { dp 177 fs/xfs/scrub/parent.c if (xfs_ilock_nowait(dp, XFS_IOLOCK_SHARED)) { dp 178 fs/xfs/scrub/parent.c error = xchk_parent_count_parent_dentries(sc, dp, &nlink); dp 195 fs/xfs/scrub/parent.c error = xchk_ilock_inverted(dp, XFS_IOLOCK_SHARED); dp 200 fs/xfs/scrub/parent.c error = xchk_parent_count_parent_dentries(sc, dp, &nlink); dp 205 fs/xfs/scrub/parent.c xfs_iunlock(dp, XFS_IOLOCK_SHARED); dp 224 fs/xfs/scrub/parent.c if (dnum != dp->i_ino) { dp 225 fs/xfs/scrub/parent.c xfs_irele(dp); dp 229 fs/xfs/scrub/parent.c xfs_irele(dp); dp 240 fs/xfs/scrub/parent.c xfs_iunlock(dp, XFS_IOLOCK_SHARED); dp 242 fs/xfs/scrub/parent.c xfs_irele(dp); dp 33 fs/xfs/xfs_attr_inactive.c struct xfs_inode *dp, dp 57 fs/xfs/xfs_attr_inactive.c error = xfs_bmapi_read(dp, (xfs_fileoff_t)tblkno, tblkcnt, dp 71 fs/xfs/xfs_attr_inactive.c dblkno = XFS_FSB_TO_DADDR(dp->i_mount, dp 73 fs/xfs/xfs_attr_inactive.c dblkcnt = XFS_FSB_TO_BB(dp->i_mount, dp 76 fs/xfs/xfs_attr_inactive.c dp->i_mount->m_ddev_targp, dp 84 fs/xfs/xfs_attr_inactive.c error = xfs_trans_roll_inode(trans, dp); dp 105 fs/xfs/xfs_attr_inactive.c struct xfs_inode *dp, dp 163 fs/xfs/xfs_attr_inactive.c lp->valuelen = xfs_attr3_rmt_blocks(dp->i_mount, dp 176 fs/xfs/xfs_attr_inactive.c tmp = xfs_attr3_leaf_freextent(trans, dp, dp 194 fs/xfs/xfs_attr_inactive.c struct xfs_inode *dp, dp 216 fs/xfs/xfs_attr_inactive.c dp->d_ops->node_hdr_from_disk(&ichdr, node); dp 222 fs/xfs/xfs_attr_inactive.c btree = dp->d_ops->node_tree_p(node); dp 238 fs/xfs/xfs_attr_inactive.c error = xfs_da3_node_read(*trans, dp, child_fsb, -1, &child_bp, dp 253 fs/xfs/xfs_attr_inactive.c error = xfs_attr3_node_inactive(trans, dp, child_bp, dp 258 fs/xfs/xfs_attr_inactive.c error = xfs_attr3_leaf_inactive(trans, dp, child_bp); dp 271 fs/xfs/xfs_attr_inactive.c error = xfs_da_get_buf(*trans, dp, 0, child_blkno, &child_bp, dp 282 fs/xfs/xfs_attr_inactive.c error = xfs_da3_node_read(*trans, dp, 0, parent_blkno, dp 287 fs/xfs/xfs_attr_inactive.c btree = dp->d_ops->node_tree_p(node); dp 294 fs/xfs/xfs_attr_inactive.c error = xfs_trans_roll_inode(trans, dp); dp 311 fs/xfs/xfs_attr_inactive.c struct xfs_inode *dp) dp 324 fs/xfs/xfs_attr_inactive.c error = xfs_da3_node_read(*trans, dp, 0, -1, &bp, XFS_ATTR_FORK); dp 337 fs/xfs/xfs_attr_inactive.c error = xfs_attr3_node_inactive(trans, dp, bp, 1); dp 341 fs/xfs/xfs_attr_inactive.c error = xfs_attr3_leaf_inactive(trans, dp, bp); dp 354 fs/xfs/xfs_attr_inactive.c error = xfs_da_get_buf(*trans, dp, 0, blkno, &bp, XFS_ATTR_FORK); dp 361 fs/xfs/xfs_attr_inactive.c error = xfs_trans_roll_inode(trans, dp); dp 376 fs/xfs/xfs_attr_inactive.c struct xfs_inode *dp) dp 383 fs/xfs/xfs_attr_inactive.c mp = dp->i_mount; dp 384 fs/xfs/xfs_attr_inactive.c ASSERT(! XFS_NOT_DQATTACHED(mp, dp)); dp 386 fs/xfs/xfs_attr_inactive.c xfs_ilock(dp, lock_mode); dp 387 fs/xfs/xfs_attr_inactive.c if (!XFS_IFORK_Q(dp)) dp 389 fs/xfs/xfs_attr_inactive.c xfs_iunlock(dp, lock_mode); dp 398 fs/xfs/xfs_attr_inactive.c xfs_ilock(dp, lock_mode); dp 400 fs/xfs/xfs_attr_inactive.c if (!XFS_IFORK_Q(dp)) dp 407 fs/xfs/xfs_attr_inactive.c xfs_trans_ijoin(trans, dp, 0); dp 415 fs/xfs/xfs_attr_inactive.c if (xfs_inode_hasattr(dp) && dp 416 fs/xfs/xfs_attr_inactive.c dp->i_d.di_aformat != XFS_DINODE_FMT_LOCAL) { dp 417 fs/xfs/xfs_attr_inactive.c error = xfs_attr3_root_inactive(&trans, dp); dp 421 fs/xfs/xfs_attr_inactive.c error = xfs_itruncate_extents(&trans, dp, XFS_ATTR_FORK, 0); dp 427 fs/xfs/xfs_attr_inactive.c xfs_attr_fork_remove(dp, trans); dp 430 fs/xfs/xfs_attr_inactive.c xfs_iunlock(dp, lock_mode); dp 437 fs/xfs/xfs_attr_inactive.c if (dp->i_afp) dp 438 fs/xfs/xfs_attr_inactive.c xfs_idestroy_fork(dp, XFS_ATTR_FORK); dp 440 fs/xfs/xfs_attr_inactive.c xfs_iunlock(dp, lock_mode); dp 58 fs/xfs/xfs_attr_list.c xfs_inode_t *dp; dp 62 fs/xfs/xfs_attr_list.c dp = context->dp; dp 63 fs/xfs/xfs_attr_list.c ASSERT(dp != NULL); dp 64 fs/xfs/xfs_attr_list.c ASSERT(dp->i_afp != NULL); dp 65 fs/xfs/xfs_attr_list.c sf = (xfs_attr_shortform_t *)dp->i_afp->if_u1.if_data; dp 85 fs/xfs/xfs_attr_list.c (dp->i_afp->if_bytes + sf->hdr.count * 16) < context->bufsize)) { dp 122 fs/xfs/xfs_attr_list.c ((char *)sfe >= ((char *)sf + dp->i_afp->if_bytes)))) { dp 125 fs/xfs/xfs_attr_list.c context->dp->i_mount, sfe, dp 204 fs/xfs/xfs_attr_list.c struct xfs_inode *dp = context->dp; dp 205 fs/xfs/xfs_attr_list.c struct xfs_mount *mp = dp->i_mount; dp 216 fs/xfs/xfs_attr_list.c error = xfs_da3_node_read(tp, dp, cursor->blkno, -1, &bp, dp 232 fs/xfs/xfs_attr_list.c dp->d_ops->node_hdr_from_disk(&nodehdr, node); dp 246 fs/xfs/xfs_attr_list.c btree = dp->d_ops->node_tree_p(node); dp 285 fs/xfs/xfs_attr_list.c struct xfs_inode *dp = context->dp; dp 286 fs/xfs/xfs_attr_list.c struct xfs_mount *mp = dp->i_mount; dp 301 fs/xfs/xfs_attr_list.c error = xfs_da3_node_read(context->tp, dp, cursor->blkno, -1, dp 367 fs/xfs/xfs_attr_list.c error = xfs_attr3_leaf_read(context->tp, dp, cursor->blkno, -1, &bp); dp 389 fs/xfs/xfs_attr_list.c struct xfs_mount *mp = context->dp->i_mount; dp 482 fs/xfs/xfs_attr_list.c error = xfs_attr3_leaf_read(context->tp, context->dp, 0, -1, &bp); dp 495 fs/xfs/xfs_attr_list.c struct xfs_inode *dp = context->dp; dp 497 fs/xfs/xfs_attr_list.c ASSERT(xfs_isilocked(dp, XFS_ILOCK_SHARED | XFS_ILOCK_EXCL)); dp 502 fs/xfs/xfs_attr_list.c if (!xfs_inode_hasattr(dp)) dp 504 fs/xfs/xfs_attr_list.c else if (dp->i_d.di_aformat == XFS_DINODE_FMT_LOCAL) dp 506 fs/xfs/xfs_attr_list.c else if (xfs_bmap_one_block(dp, XFS_ATTR_FORK)) dp 516 fs/xfs/xfs_attr_list.c xfs_inode_t *dp = context->dp; dp 519 fs/xfs/xfs_attr_list.c XFS_STATS_INC(dp->i_mount, xs_attr_list); dp 521 fs/xfs/xfs_attr_list.c if (XFS_FORCED_SHUTDOWN(dp->i_mount)) dp 524 fs/xfs/xfs_attr_list.c lock_mode = xfs_ilock_attr_map_shared(dp); dp 526 fs/xfs/xfs_attr_list.c xfs_iunlock(dp, lock_mode); dp 599 fs/xfs/xfs_attr_list.c xfs_inode_t *dp, dp 634 fs/xfs/xfs_attr_list.c context.dp = dp; dp 49 fs/xfs/xfs_dir2_readdir.c struct xfs_inode *dp = args->dp; /* incore directory inode */ dp 58 fs/xfs/xfs_dir2_readdir.c ASSERT(dp->i_df.if_flags & XFS_IFINLINE); dp 59 fs/xfs/xfs_dir2_readdir.c ASSERT(dp->i_df.if_bytes == dp->i_d.di_size); dp 60 fs/xfs/xfs_dir2_readdir.c ASSERT(dp->i_df.if_u1.if_data != NULL); dp 62 fs/xfs/xfs_dir2_readdir.c sfp = (xfs_dir2_sf_hdr_t *)dp->i_df.if_u1.if_data; dp 77 fs/xfs/xfs_dir2_readdir.c dp->d_ops->data_dot_offset); dp 79 fs/xfs/xfs_dir2_readdir.c dp->d_ops->data_dotdot_offset); dp 86 fs/xfs/xfs_dir2_readdir.c if (!dir_emit(ctx, ".", 1, dp->i_ino, DT_DIR)) dp 94 fs/xfs/xfs_dir2_readdir.c ino = dp->d_ops->sf_get_parent_ino(sfp); dp 111 fs/xfs/xfs_dir2_readdir.c sfep = dp->d_ops->sf_nextentry(sfp, sfep); dp 115 fs/xfs/xfs_dir2_readdir.c ino = dp->d_ops->sf_get_ino(sfp, sfep); dp 116 fs/xfs/xfs_dir2_readdir.c filetype = dp->d_ops->sf_get_ftype(sfep); dp 119 fs/xfs/xfs_dir2_readdir.c xfs_dir3_get_dtype(dp->i_mount, filetype))) dp 121 fs/xfs/xfs_dir2_readdir.c sfep = dp->d_ops->sf_nextentry(sfp, sfep); dp 137 fs/xfs/xfs_dir2_readdir.c struct xfs_inode *dp = args->dp; /* incore directory inode */ dp 156 fs/xfs/xfs_dir2_readdir.c lock_mode = xfs_ilock_data_map_shared(dp); dp 157 fs/xfs/xfs_dir2_readdir.c error = xfs_dir3_block_read(args->trans, dp, &bp); dp 158 fs/xfs/xfs_dir2_readdir.c xfs_iunlock(dp, lock_mode); dp 168 fs/xfs/xfs_dir2_readdir.c xfs_dir3_data_check(dp, bp); dp 172 fs/xfs/xfs_dir2_readdir.c ptr = (char *)dp->d_ops->data_entry_p(hdr); dp 196 fs/xfs/xfs_dir2_readdir.c ptr += dp->d_ops->data_entsize(dep->namelen); dp 207 fs/xfs/xfs_dir2_readdir.c filetype = dp->d_ops->data_get_ftype(dep); dp 213 fs/xfs/xfs_dir2_readdir.c xfs_dir3_get_dtype(dp->i_mount, filetype))) { dp 242 fs/xfs/xfs_dir2_readdir.c struct xfs_inode *dp = args->dp; dp 245 fs/xfs/xfs_dir2_readdir.c struct xfs_ifork *ifp = XFS_IFORK_PTR(dp, XFS_DATA_FORK); dp 257 fs/xfs/xfs_dir2_readdir.c error = xfs_iread_extents(args->trans, dp, XFS_DATA_FORK); dp 269 fs/xfs/xfs_dir2_readdir.c if (!xfs_iext_lookup_extent(dp, ifp, map_off, &icur, &map)) dp 279 fs/xfs/xfs_dir2_readdir.c error = xfs_dir3_data_read(args->trans, dp, map.br_startoff, -1, &bp); dp 314 fs/xfs/xfs_dir2_readdir.c xfs_dir3_data_readahead(dp, next_ra, -2); dp 345 fs/xfs/xfs_dir2_readdir.c struct xfs_inode *dp = args->dp; dp 389 fs/xfs/xfs_dir2_readdir.c lock_mode = xfs_ilock_data_map_shared(dp); dp 392 fs/xfs/xfs_dir2_readdir.c xfs_iunlock(dp, lock_mode); dp 397 fs/xfs/xfs_dir2_readdir.c xfs_dir3_data_check(dp, bp); dp 401 fs/xfs/xfs_dir2_readdir.c ptr = (char *)dp->d_ops->data_entry_p(hdr); dp 407 fs/xfs/xfs_dir2_readdir.c curoff += dp->d_ops->data_entry_offset; dp 424 fs/xfs/xfs_dir2_readdir.c dp->d_ops->data_entsize(dep->namelen); dp 455 fs/xfs/xfs_dir2_readdir.c length = dp->d_ops->data_entsize(dep->namelen); dp 456 fs/xfs/xfs_dir2_readdir.c filetype = dp->d_ops->data_get_ftype(dep); dp 461 fs/xfs/xfs_dir2_readdir.c xfs_dir3_get_dtype(dp->i_mount, filetype))) dp 496 fs/xfs/xfs_dir2_readdir.c struct xfs_inode *dp, dp 504 fs/xfs/xfs_dir2_readdir.c trace_xfs_readdir(dp); dp 506 fs/xfs/xfs_dir2_readdir.c if (XFS_FORCED_SHUTDOWN(dp->i_mount)) dp 509 fs/xfs/xfs_dir2_readdir.c ASSERT(S_ISDIR(VFS_I(dp)->i_mode)); dp 510 fs/xfs/xfs_dir2_readdir.c XFS_STATS_INC(dp->i_mount, xs_dir_getdents); dp 512 fs/xfs/xfs_dir2_readdir.c args.dp = dp; dp 513 fs/xfs/xfs_dir2_readdir.c args.geo = dp->i_mount->m_dir_geo; dp 516 fs/xfs/xfs_dir2_readdir.c if (dp->i_d.di_format == XFS_DINODE_FMT_LOCAL) dp 680 fs/xfs/xfs_inode.c xfs_inode_t *dp, dp 688 fs/xfs/xfs_inode.c trace_xfs_lookup(dp, name); dp 690 fs/xfs/xfs_inode.c if (XFS_FORCED_SHUTDOWN(dp->i_mount)) dp 693 fs/xfs/xfs_inode.c error = xfs_dir_lookup(NULL, dp, name, &inum, ci_name); dp 697 fs/xfs/xfs_inode.c error = xfs_iget(dp->i_mount, NULL, inum, 0, 0, ipp); dp 963 fs/xfs/xfs_inode.c xfs_inode_t *dp, /* directory within whose allocate dp 997 fs/xfs/xfs_inode.c code = xfs_ialloc(tp, dp, mode, nlink, rdev, prid, &ialloc_context, dp 1067 fs/xfs/xfs_inode.c code = xfs_ialloc(tp, dp, mode, nlink, rdev, prid, dp 1127 fs/xfs/xfs_inode.c xfs_inode_t *dp, dp 1134 fs/xfs/xfs_inode.c struct xfs_mount *mp = dp->i_mount; dp 1146 fs/xfs/xfs_inode.c trace_xfs_create(dp, name); dp 1151 fs/xfs/xfs_inode.c prid = xfs_get_initial_prid(dp); dp 1156 fs/xfs/xfs_inode.c error = xfs_qm_vop_dqalloc(dp, xfs_kuid_to_uid(current_fsuid()), dp 1186 fs/xfs/xfs_inode.c xfs_ilock(dp, XFS_ILOCK_EXCL | XFS_ILOCK_PARENT); dp 1202 fs/xfs/xfs_inode.c error = xfs_dir_ialloc(&tp, dp, mode, is_dir ? 2 : 1, rdev, prid, &ip); dp 1213 fs/xfs/xfs_inode.c xfs_trans_ijoin(tp, dp, XFS_ILOCK_EXCL); dp 1216 fs/xfs/xfs_inode.c error = xfs_dir_createname(tp, dp, name, ip->i_ino, dp 1223 fs/xfs/xfs_inode.c xfs_trans_ichgtime(tp, dp, XFS_ICHGTIME_MOD | XFS_ICHGTIME_CHG); dp 1224 fs/xfs/xfs_inode.c xfs_trans_log_inode(tp, dp, XFS_ILOG_CORE); dp 1227 fs/xfs/xfs_inode.c error = xfs_dir_init(tp, ip, dp); dp 1231 fs/xfs/xfs_inode.c xfs_bumplink(tp, dp); dp 1278 fs/xfs/xfs_inode.c xfs_iunlock(dp, XFS_ILOCK_EXCL); dp 1284 fs/xfs/xfs_inode.c struct xfs_inode *dp, dp 1288 fs/xfs/xfs_inode.c struct xfs_mount *mp = dp->i_mount; dp 1302 fs/xfs/xfs_inode.c prid = xfs_get_initial_prid(dp); dp 1307 fs/xfs/xfs_inode.c error = xfs_qm_vop_dqalloc(dp, xfs_kuid_to_uid(current_fsuid()), dp 1326 fs/xfs/xfs_inode.c error = xfs_dir_ialloc(&tp, dp, mode, 0, 0, prid, &ip); dp 2859 fs/xfs/xfs_inode.c xfs_inode_t *dp, dp 2863 fs/xfs/xfs_inode.c xfs_mount_t *mp = dp->i_mount; dp 2869 fs/xfs/xfs_inode.c trace_xfs_remove(dp, name); dp 2874 fs/xfs/xfs_inode.c error = xfs_qm_dqattach(dp); dp 2903 fs/xfs/xfs_inode.c xfs_lock_two_inodes(dp, XFS_ILOCK_EXCL, ip, XFS_ILOCK_EXCL); dp 2905 fs/xfs/xfs_inode.c xfs_trans_ijoin(tp, dp, XFS_ILOCK_EXCL); dp 2923 fs/xfs/xfs_inode.c error = xfs_droplink(tp, dp); dp 2937 fs/xfs/xfs_inode.c xfs_trans_log_inode(tp, dp, XFS_ILOG_CORE); dp 2939 fs/xfs/xfs_inode.c xfs_trans_ichgtime(tp, dp, XFS_ICHGTIME_MOD | XFS_ICHGTIME_CHG); dp 2946 fs/xfs/xfs_inode.c error = xfs_dir_removename(tp, dp, name, ip->i_ino, resblks); dp 3159 fs/xfs/xfs_inode.c struct xfs_inode *dp, dp 3165 fs/xfs/xfs_inode.c error = xfs_create_tmpfile(dp, S_IFCHR | WHITEOUT_MODE, &tmpfile); dp 200 fs/xfs/xfs_inode.h xfs_get_initial_prid(struct xfs_inode *dp) dp 202 fs/xfs/xfs_inode.h if (dp->i_d.di_flags & XFS_DIFLAG_PROJINHERIT) dp 203 fs/xfs/xfs_inode.h return xfs_get_projid(dp); dp 415 fs/xfs/xfs_inode.h int xfs_lookup(struct xfs_inode *dp, struct xfs_name *name, dp 417 fs/xfs/xfs_inode.h int xfs_create(struct xfs_inode *dp, struct xfs_name *name, dp 419 fs/xfs/xfs_inode.h int xfs_create_tmpfile(struct xfs_inode *dp, umode_t mode, dp 421 fs/xfs/xfs_inode.h int xfs_remove(struct xfs_inode *dp, struct xfs_name *name, dp 1614 fs/xfs/xfs_log.c char *dp; dp 1618 fs/xfs/xfs_log.c dp = iclog->ic_datap; dp 1622 fs/xfs/xfs_log.c iclog->ic_header.h_cycle_data[i] = *(__be32 *)dp; dp 1623 fs/xfs/xfs_log.c *(__be32 *)dp = cycle_lsn; dp 1624 fs/xfs/xfs_log.c dp += BBSIZE; dp 1633 fs/xfs/xfs_log.c xhdr[j].hic_xheader.xh_cycle_data[k] = *(__be32 *)dp; dp 1634 fs/xfs/xfs_log.c *(__be32 *)dp = cycle_lsn; dp 1635 fs/xfs/xfs_log.c dp += BBSIZE; dp 1653 fs/xfs/xfs_log.c char *dp, dp 1680 fs/xfs/xfs_log.c crc = crc32c(crc, dp, size); dp 425 fs/xfs/xfs_log_priv.h char *dp, int size); dp 4175 fs/xfs/xfs_log_recover.c char *dp, dp 4196 fs/xfs/xfs_log_recover.c memcpy(ptr, dp, len); dp 4207 fs/xfs/xfs_log_recover.c memcpy(&ptr[old_len], dp, len); dp 4231 fs/xfs/xfs_log_recover.c char *dp, dp 4242 fs/xfs/xfs_log_recover.c if (*(uint *)dp != XFS_TRANS_HEADER_MAGIC) { dp 4262 fs/xfs/xfs_log_recover.c memcpy(&trans->r_theader, dp, len); dp 4267 fs/xfs/xfs_log_recover.c memcpy(ptr, dp, len); dp 4339 fs/xfs/xfs_log_recover.c char *dp, dp 4361 fs/xfs/xfs_log_recover.c error = xlog_recover_add_to_trans(log, trans, dp, len); dp 4364 fs/xfs/xfs_log_recover.c error = xlog_recover_add_to_cont_trans(log, trans, dp, len); dp 4448 fs/xfs/xfs_log_recover.c char *dp, dp 4470 fs/xfs/xfs_log_recover.c if (dp + len > end) { dp 4513 fs/xfs/xfs_log_recover.c return xlog_recovery_process_trans(log, trans, dp, len, dp 4531 fs/xfs/xfs_log_recover.c char *dp, dp 4540 fs/xfs/xfs_log_recover.c end = dp + be32_to_cpu(rhead->h_len); dp 4548 fs/xfs/xfs_log_recover.c while ((dp < end) && num_logops) { dp 4550 fs/xfs/xfs_log_recover.c ohead = (struct xlog_op_header *)dp; dp 4551 fs/xfs/xfs_log_recover.c dp += sizeof(*ohead); dp 4552 fs/xfs/xfs_log_recover.c ASSERT(dp <= end); dp 4556 fs/xfs/xfs_log_recover.c dp, end, pass, buffer_list); dp 4560 fs/xfs/xfs_log_recover.c dp += be32_to_cpu(ohead->oh_len); dp 5104 fs/xfs/xfs_log_recover.c char *dp, dp 5111 fs/xfs/xfs_log_recover.c *(__be32 *)dp = *(__be32 *)&rhead->h_cycle_data[i]; dp 5112 fs/xfs/xfs_log_recover.c dp += BBSIZE; dp 5120 fs/xfs/xfs_log_recover.c *(__be32 *)dp = xhdr[j].hic_xheader.xh_cycle_data[k]; dp 5121 fs/xfs/xfs_log_recover.c dp += BBSIZE; dp 5134 fs/xfs/xfs_log_recover.c char *dp, dp 5141 fs/xfs/xfs_log_recover.c crc = xlog_cksum(log, rhead, dp, be32_to_cpu(rhead->h_len)); dp 5168 fs/xfs/xfs_log_recover.c xfs_hex_dump(dp, 32); dp 5179 fs/xfs/xfs_log_recover.c xlog_unpack_data(rhead, dp, log); dp 5181 fs/xfs/xfs_log_recover.c return xlog_recover_process_data(log, rhash, rhead, dp, pass, dp 146 fs/xfs/xfs_symlink.c struct xfs_inode *dp, dp 152 fs/xfs/xfs_symlink.c struct xfs_mount *mp = dp->i_mount; dp 175 fs/xfs/xfs_symlink.c trace_xfs_symlink(dp, link_name); dp 189 fs/xfs/xfs_symlink.c prid = xfs_get_initial_prid(dp); dp 194 fs/xfs/xfs_symlink.c error = xfs_qm_vop_dqalloc(dp, dp 206 fs/xfs/xfs_symlink.c if (pathlen <= XFS_LITINO(mp, dp->i_d.di_version)) dp 216 fs/xfs/xfs_symlink.c xfs_ilock(dp, XFS_ILOCK_EXCL | XFS_ILOCK_PARENT); dp 222 fs/xfs/xfs_symlink.c if (dp->i_d.di_flags & XFS_DIFLAG_NOSYMLINKS) { dp 238 fs/xfs/xfs_symlink.c error = xfs_dir_ialloc(&tp, dp, S_IFLNK | (mode & ~S_IFMT), 1, 0, dp 250 fs/xfs/xfs_symlink.c xfs_trans_ijoin(tp, dp, XFS_ILOCK_EXCL); dp 323 fs/xfs/xfs_symlink.c error = xfs_dir_createname(tp, dp, link_name, ip->i_ino, resblks); dp 326 fs/xfs/xfs_symlink.c xfs_trans_ichgtime(tp, dp, XFS_ICHGTIME_MOD | XFS_ICHGTIME_CHG); dp 327 fs/xfs/xfs_symlink.c xfs_trans_log_inode(tp, dp, XFS_ILOG_CORE); dp 367 fs/xfs/xfs_symlink.c xfs_iunlock(dp, XFS_ILOCK_EXCL); dp 10 fs/xfs/xfs_symlink.h int xfs_symlink(struct xfs_inode *dp, struct xfs_name *link_name, dp 56 fs/xfs/xfs_trace.h __entry->dev = VFS_I(ctx->dp)->i_sb->s_dev; dp 57 fs/xfs/xfs_trace.h __entry->ino = ctx->dp->i_ino; dp 182 fs/xfs/xfs_trace.h __entry->dev = VFS_I(ctx->dp)->i_sb->s_dev; dp 183 fs/xfs/xfs_trace.h __entry->ino = ctx->dp->i_ino; dp 790 fs/xfs/xfs_trace.h TP_PROTO(struct xfs_inode *dp, struct xfs_name *name), dp 791 fs/xfs/xfs_trace.h TP_ARGS(dp, name), dp 799 fs/xfs/xfs_trace.h __entry->dev = VFS_I(dp)->i_sb->s_dev; dp 800 fs/xfs/xfs_trace.h __entry->dp_ino = dp->i_ino; dp 813 fs/xfs/xfs_trace.h TP_PROTO(struct xfs_inode *dp, struct xfs_name *name), \ dp 814 fs/xfs/xfs_trace.h TP_ARGS(dp, name)) dp 1679 fs/xfs/xfs_trace.h __entry->dev = VFS_I(args->dp)->i_sb->s_dev; dp 1680 fs/xfs/xfs_trace.h __entry->ino = args->dp->i_ino; dp 1744 fs/xfs/xfs_trace.h __entry->dev = VFS_I(args->dp)->i_sb->s_dev; dp 1745 fs/xfs/xfs_trace.h __entry->ino = args->dp->i_ino; dp 1849 fs/xfs/xfs_trace.h __entry->dev = VFS_I(args->dp)->i_sb->s_dev; dp 1850 fs/xfs/xfs_trace.h __entry->ino = args->dp->i_ino; dp 1882 fs/xfs/xfs_trace.h __entry->dev = VFS_I(args->dp)->i_sb->s_dev; dp 1883 fs/xfs/xfs_trace.h __entry->ino = args->dp->i_ino; dp 224 fs/xfs/xfs_xattr.c context.dp = XFS_I(inode); dp 37 include/crypto/internal/rsa.h const u8 *dp; dp 41 include/drm/bridge/analogix_dp.h int analogix_dp_resume(struct analogix_dp_device *dp); dp 42 include/drm/bridge/analogix_dp.h int analogix_dp_suspend(struct analogix_dp_device *dp); dp 46 include/drm/bridge/analogix_dp.h int analogix_dp_bind(struct analogix_dp_device *dp, struct drm_device *drm_dev); dp 47 include/drm/bridge/analogix_dp.h void analogix_dp_unbind(struct analogix_dp_device *dp); dp 48 include/drm/bridge/analogix_dp.h void analogix_dp_remove(struct analogix_dp_device *dp); dp 59 include/linux/dsa/sja1105.h struct dsa_port *dp; dp 549 include/linux/mISDNif.h _alloc_mISDN_skb(u_int prim, u_int id, u_int len, void *dp, gfp_t gfp_mask) dp 557 include/linux/mISDNif.h skb_put_data(skb, dp, len); dp 566 include/linux/mISDNif.h u_int id, u_int len, void *dp, gfp_t gfp_mask) dp 572 include/linux/mISDNif.h skb = _alloc_mISDN_skb(prim, id, len, dp, gfp_mask); dp 333 include/net/dsa.h const struct dsa_port *dp = dsa_to_port(ds, port); dp 334 include/net/dsa.h const struct dsa_port *cpu_dp = dp->cpu_dp; dp 342 include/net/dsa.h static inline bool dsa_port_is_vlan_filtering(const struct dsa_port *dp) dp 344 include/net/dsa.h const struct dsa_switch *ds = dp->ds; dp 349 include/net/dsa.h return dp->vlan_filtering; dp 642 include/net/dsa.h int dsa_port_get_phy_strings(struct dsa_port *dp, uint8_t *data); dp 643 include/net/dsa.h int dsa_port_get_ethtool_phy_stats(struct dsa_port *dp, uint64_t *data); dp 644 include/net/dsa.h int dsa_port_get_phy_sset_count(struct dsa_port *dp); dp 159 include/uapi/linux/coda.h #define DIRSIZ(dp) ((sizeof (struct venus_dirent) - (CODA_MAXNAMLEN+1)) + \ dp 160 include/uapi/linux/coda.h (((dp)->d_namlen+1 + 3) &~ 3)) dp 36 include/uapi/linux/ppp-comp.h #define CCP_CODE(dp) ((dp)[0]) dp 37 include/uapi/linux/ppp-comp.h #define CCP_ID(dp) ((dp)[1]) dp 38 include/uapi/linux/ppp-comp.h #define CCP_LENGTH(dp) (((dp)[2] << 8) + (dp)[3]) dp 41 include/uapi/linux/ppp-comp.h #define CCP_OPT_CODE(dp) ((dp)[0]) dp 42 include/uapi/linux/ppp-comp.h #define CCP_OPT_LENGTH(dp) ((dp)[1]) dp 329 include/video/imx-ipu-v3.h int ipu_dp_enable_channel(struct ipu_dp *dp); dp 330 include/video/imx-ipu-v3.h void ipu_dp_disable_channel(struct ipu_dp *dp, bool sync); dp 332 include/video/imx-ipu-v3.h int ipu_dp_setup_channel(struct ipu_dp *dp, dp 335 include/video/imx-ipu-v3.h int ipu_dp_set_global_alpha(struct ipu_dp *dp, bool enable, u8 alpha, dp 499 include/video/imx-ipu-v3.h int dp; dp 366 include/xen/interface/platform.h GUEST_HANDLE(xen_processor_csd) dp; /* NULL if no dependency */ dp 855 kernel/cgroup/cpuset.c struct cpumask *dp; dp 863 kernel/cgroup/cpuset.c dp = doms[nslot]; dp 875 kernel/cgroup/cpuset.c cpumask_clear(dp); dp 882 kernel/cgroup/cpuset.c cpumask_or(dp, dp, b->effective_cpus); dp 883 kernel/cgroup/cpuset.c cpumask_and(dp, dp, housekeeping_cpumask(HK_FLAG_DOMAIN)); dp 62 lib/asn1_decoder.c size_t dp = *_dp, len, n; dp 66 lib/asn1_decoder.c if (unlikely(datalen - dp < 2)) { dp 67 lib/asn1_decoder.c if (datalen == dp) dp 73 lib/asn1_decoder.c tag = data[dp++]; dp 76 lib/asn1_decoder.c if (data[dp++] != 0) dp 79 lib/asn1_decoder.c *_len = dp - *_dp; dp 80 lib/asn1_decoder.c *_dp = dp; dp 88 lib/asn1_decoder.c if (unlikely(datalen - dp < 2)) dp 90 lib/asn1_decoder.c tmp = data[dp++]; dp 95 lib/asn1_decoder.c len = data[dp++]; dp 110 lib/asn1_decoder.c if (unlikely(n > datalen - dp)) dp 115 lib/asn1_decoder.c len |= data[dp++]; dp 118 lib/asn1_decoder.c if (len > datalen - dp) dp 120 lib/asn1_decoder.c dp += len; dp 138 lib/asn1_decoder.c *_dp = dp; dp 178 lib/asn1_decoder.c size_t pc = 0, dp = 0, tdp = 0, len = 0; dp 202 lib/asn1_decoder.c pc, machlen, dp, datalen, csp, jsp); dp 217 lib/asn1_decoder.c (op & ASN1_OP_MATCH__SKIP && dp == datalen)) { dp 227 lib/asn1_decoder.c if (unlikely(datalen - dp < 2)) dp 229 lib/asn1_decoder.c tag = data[dp++]; dp 252 lib/asn1_decoder.c dp--; dp 260 lib/asn1_decoder.c len = data[dp++]; dp 267 lib/asn1_decoder.c if (unlikely(2 > datalen - dp)) dp 273 lib/asn1_decoder.c if (unlikely(n > datalen - dp)) dp 278 lib/asn1_decoder.c len |= data[dp++]; dp 280 lib/asn1_decoder.c if (unlikely(len > datalen - dp)) dp 284 lib/asn1_decoder.c if (unlikely(len > datalen - dp)) dp 294 lib/asn1_decoder.c cons_dp_stack[csp] = dp; dp 298 lib/asn1_decoder.c datalen = dp + len; dp 307 lib/asn1_decoder.c tdp = dp; dp 329 lib/asn1_decoder.c size_t tmp = dp; dp 346 lib/asn1_decoder.c ret = actions[act](context, hdr, tag, data + dp, len); dp 352 lib/asn1_decoder.c dp += len; dp 400 lib/asn1_decoder.c tdp, dp, len, datalen); dp 404 lib/asn1_decoder.c if (unlikely(datalen - dp < 2)) dp 406 lib/asn1_decoder.c if (data[dp++] != 0) { dp 408 lib/asn1_decoder.c dp--; dp 416 lib/asn1_decoder.c if (data[dp++] != 0) dp 418 lib/asn1_decoder.c len = dp - tdp - 2; dp 420 lib/asn1_decoder.c if (dp < len && (op & ASN1_OP_END__OF)) { dp 427 lib/asn1_decoder.c if (dp != len) dp 430 lib/asn1_decoder.c pr_debug("- cons len l=%zu d=%zu\n", len, dp - tdp); dp 516 lib/asn1_decoder.c errmsg, pc, dp, optag, tag, len); dp 91 lib/dynamic_debug.c static char *ddebug_describe_flags(struct _ddebug *dp, char *buf, dp 99 lib/dynamic_debug.c if (dp->flags & opt_array[i].flag) dp 159 lib/dynamic_debug.c struct _ddebug *dp = &dt->ddebugs[i]; dp 163 lib/dynamic_debug.c !match_wildcard(query->filename, dp->filename) && dp 165 lib/dynamic_debug.c kbasename(dp->filename)) && dp 167 lib/dynamic_debug.c trim_prefix(dp->filename))) dp 172 lib/dynamic_debug.c !match_wildcard(query->function, dp->function)) dp 177 lib/dynamic_debug.c !strstr(dp->format, query->format)) dp 182 lib/dynamic_debug.c dp->lineno < query->first_lineno) dp 185 lib/dynamic_debug.c dp->lineno > query->last_lineno) dp 190 lib/dynamic_debug.c newflags = (dp->flags & mask) | flags; dp 191 lib/dynamic_debug.c if (newflags == dp->flags) dp 194 lib/dynamic_debug.c if (dp->flags & _DPRINTK_FLAGS_PRINT) { dp 196 lib/dynamic_debug.c static_branch_disable(&dp->key.dd_key_true); dp 198 lib/dynamic_debug.c static_branch_enable(&dp->key.dd_key_true); dp 200 lib/dynamic_debug.c dp->flags = newflags; dp 202 lib/dynamic_debug.c trim_prefix(dp->filename), dp->lineno, dp 203 lib/dynamic_debug.c dt->mod_name, dp->function, dp 204 lib/dynamic_debug.c ddebug_describe_flags(dp, flagbuf, dp 771 lib/dynamic_debug.c struct _ddebug *dp; dp 782 lib/dynamic_debug.c dp = ddebug_iter_first(iter); dp 783 lib/dynamic_debug.c while (dp != NULL && --n > 0) dp 784 lib/dynamic_debug.c dp = ddebug_iter_next(iter); dp 785 lib/dynamic_debug.c return dp; dp 796 lib/dynamic_debug.c struct _ddebug *dp; dp 802 lib/dynamic_debug.c dp = ddebug_iter_first(iter); dp 804 lib/dynamic_debug.c dp = ddebug_iter_next(iter); dp 806 lib/dynamic_debug.c return dp; dp 818 lib/dynamic_debug.c struct _ddebug *dp = p; dp 830 lib/dynamic_debug.c trim_prefix(dp->filename), dp->lineno, dp 831 lib/dynamic_debug.c iter->table->mod_name, dp->function, dp 832 lib/dynamic_debug.c ddebug_describe_flags(dp, flagsbuf, sizeof(flagsbuf))); dp 833 lib/dynamic_debug.c seq_escape(m, dp->format, "\t\r\n\""); dp 151 lib/mpi/mpi-internal.h mpi_ptr_t dp, mpi_size_t dsize); dp 46 lib/mpi/mpih-div.c mpi_ptr_t np, mpi_size_t nsize, mpi_ptr_t dp, mpi_size_t dsize) dp 66 lib/mpi/mpih-div.c d = dp[0]; dp 93 lib/mpi/mpih-div.c d1 = dp[1]; dp 94 lib/mpi/mpih-div.c d0 = dp[0]; dp 157 lib/mpi/mpih-div.c dX = dp[dsize - 1]; dp 158 lib/mpi/mpih-div.c d1 = dp[dsize - 2]; dp 163 lib/mpi/mpih-div.c || mpihelp_cmp(np, dp, dsize - 1) >= 0) { dp 164 lib/mpi/mpih-div.c mpihelp_sub_n(np, np, dp, dsize); dp 209 lib/mpi/mpih-div.c cy_limb = mpihelp_submul_1(np, dp, dsize, q); dp 212 lib/mpi/mpih-div.c mpihelp_add_n(np, np, dp, dsize); dp 23 lib/raid6/recov.c u8 *p, *q, *dp, *dq; dp 34 lib/raid6/recov.c dp = (u8 *)ptrs[faila]; dp 36 lib/raid6/recov.c ptrs[disks-2] = dp; dp 44 lib/raid6/recov.c ptrs[faila] = dp; dp 55 lib/raid6/recov.c px = *p ^ *dp; dp 58 lib/raid6/recov.c *dp++ = db ^ px; /* Reconstructed A */ dp 21 lib/raid6/recov_avx2.c u8 *p, *q, *dp, *dq; dp 32 lib/raid6/recov_avx2.c dp = (u8 *)ptrs[faila]; dp 34 lib/raid6/recov_avx2.c ptrs[disks-2] = dp; dp 42 lib/raid6/recov_avx2.c ptrs[faila] = dp; dp 65 lib/raid6/recov_avx2.c asm volatile("vpxor %0, %%ymm0, %%ymm0" : : "m" (dp[0])); dp 66 lib/raid6/recov_avx2.c asm volatile("vpxor %0, %%ymm8, %%ymm8" : : "m" (dp[32])); dp 127 lib/raid6/recov_avx2.c asm volatile("vmovdqa %%ymm0, %0" : "=m" (dp[0])); dp 128 lib/raid6/recov_avx2.c asm volatile("vmovdqa %%ymm8, %0" : "=m" (dp[32])); dp 133 lib/raid6/recov_avx2.c dp += 64; dp 139 lib/raid6/recov_avx2.c asm volatile("vpxor %0, %%ymm0, %%ymm0" : : "m" (*dp)); dp 175 lib/raid6/recov_avx2.c asm volatile("vmovdqa %%ymm0, %0" : "=m" (dp[0])); dp 180 lib/raid6/recov_avx2.c dp += 32; dp 27 lib/raid6/recov_avx512.c u8 *p, *q, *dp, *dq; dp 41 lib/raid6/recov_avx512.c dp = (u8 *)ptrs[faila]; dp 43 lib/raid6/recov_avx512.c ptrs[disks-2] = dp; dp 51 lib/raid6/recov_avx512.c ptrs[faila] = dp; dp 79 lib/raid6/recov_avx512.c "m" (dp[0]), "m" (dp[64])); dp 153 lib/raid6/recov_avx512.c : "m" (dp[0]), "m" (dp[64])); dp 158 lib/raid6/recov_avx512.c dp += 128; dp 166 lib/raid6/recov_avx512.c : "m" (*q), "m" (*p), "m"(*dq), "m" (*dp)); dp 214 lib/raid6/recov_avx512.c : "m" (dp[0])); dp 219 lib/raid6/recov_avx512.c dp += 64; dp 22 lib/raid6/recov_neon.c void __raid6_2data_recov_neon(int bytes, uint8_t *p, uint8_t *q, uint8_t *dp, dp 32 lib/raid6/recov_neon.c u8 *p, *q, *dp, *dq; dp 44 lib/raid6/recov_neon.c dp = (u8 *)ptrs[faila]; dp 46 lib/raid6/recov_neon.c ptrs[disks - 2] = dp; dp 54 lib/raid6/recov_neon.c ptrs[faila] = dp; dp 65 lib/raid6/recov_neon.c __raid6_2data_recov_neon(bytes, p, q, dp, dq, pbmul, qmul); dp 27 lib/raid6/recov_neon_inner.c void __raid6_2data_recov_neon(int bytes, uint8_t *p, uint8_t *q, uint8_t *dp, dp 52 lib/raid6/recov_neon_inner.c px = veorq_u8(vld1q_u8(p), vld1q_u8(dp)); dp 67 lib/raid6/recov_neon_inner.c vst1q_u8(dp, veorq_u8(db, px)); dp 72 lib/raid6/recov_neon_inner.c dp += 16; dp 26 lib/raid6/recov_s390xc.c u8 *p, *q, *dp, *dq; dp 37 lib/raid6/recov_s390xc.c dp = (u8 *)ptrs[faila]; dp 39 lib/raid6/recov_s390xc.c ptrs[disks-2] = dp; dp 47 lib/raid6/recov_s390xc.c ptrs[faila] = dp; dp 58 lib/raid6/recov_s390xc.c xor_block(dp, p); dp 61 lib/raid6/recov_s390xc.c dq[i] = pbmul[dp[i]] ^ qmul[dq[i]]; dp 62 lib/raid6/recov_s390xc.c xor_block(dp, dq); dp 65 lib/raid6/recov_s390xc.c dp += 256; dp 21 lib/raid6/recov_ssse3.c u8 *p, *q, *dp, *dq; dp 34 lib/raid6/recov_ssse3.c dp = (u8 *)ptrs[faila]; dp 36 lib/raid6/recov_ssse3.c ptrs[disks-2] = dp; dp 44 lib/raid6/recov_ssse3.c ptrs[faila] = dp; dp 75 lib/raid6/recov_ssse3.c asm volatile("pxor %0,%%xmm0" : : "m" (dp[0])); dp 76 lib/raid6/recov_ssse3.c asm volatile("pxor %0,%%xmm8" : : "m" (dp[16])); dp 131 lib/raid6/recov_ssse3.c asm volatile("movdqa %%xmm0,%0" : "=m" (dp[0])); dp 132 lib/raid6/recov_ssse3.c asm volatile("movdqa %%xmm8,%0" : "=m" (dp[16])); dp 137 lib/raid6/recov_ssse3.c dp += 32; dp 143 lib/raid6/recov_ssse3.c asm volatile("pxor %0,%%xmm0" : : "m" (*dp)); dp 179 lib/raid6/recov_ssse3.c asm volatile("movdqa %%xmm0,%0" : "=m" (*dp)); dp 184 lib/raid6/recov_ssse3.c dp += 16; dp 1452 net/ax25/af_ax25.c ax25_digi dtmp, *dp; dp 1533 net/ax25/af_ax25.c dp = NULL; dp 1535 net/ax25/af_ax25.c dp = &dtmp; dp 1548 net/ax25/af_ax25.c dp = ax25->digipeat; dp 1589 net/ax25/af_ax25.c skb_push(skb, 1 + ax25_addr_size(dp)); dp 1595 net/ax25/af_ax25.c dp, AX25_COMMAND, AX25_MODULUS); dp 273 net/ax25/ax25_addr.c int ax25_addr_size(const ax25_digi *dp) dp 275 net/ax25/ax25_addr.c if (dp == NULL) dp 278 net/ax25/ax25_addr.c return AX25_ADDR_LEN * (2 + dp->ndigi); dp 189 net/ax25/ax25_in.c ax25_digi dp, reverse_dp; dp 206 net/ax25/ax25_in.c if (ax25_addr_parse(skb->data, skb->len, &src, &dest, &dp, &type, &dama) == NULL) dp 212 net/ax25/ax25_in.c if (dp.lastrepeat + 1 < dp.ndigi) /* Not yet digipeated completely */ dp 213 net/ax25/ax25_in.c next_digi = &dp.calls[dp.lastrepeat + 1]; dp 218 net/ax25/ax25_in.c skb_pull(skb, ax25_addr_size(&dp)); dp 221 net/ax25/ax25_in.c if (ax25cmp(&dest, dev_addr) == 0 && dp.lastrepeat + 1 == dp.ndigi) dp 225 net/ax25/ax25_in.c if (!mine && ax25_listen_mine(&dest, dev) && dp.lastrepeat + 1 == dp.ndigi) dp 229 net/ax25/ax25_in.c if ((*skb->data & ~0x10) == AX25_UI && dp.lastrepeat + 1 == dp.ndigi) { dp 301 net/ax25/ax25_in.c ax25_digi_invert(&dp, &reverse_dp); dp 328 net/ax25/ax25_in.c ax25_return_dm(dev, &src, &dest, &dp); dp 335 net/ax25/ax25_in.c if (dp.lastrepeat + 1 == dp.ndigi) dp 345 net/ax25/ax25_in.c ax25_return_dm(dev, &src, &dest, &dp); dp 366 net/ax25/ax25_in.c ax25_return_dm(dev, &src, &dest, &dp); dp 379 net/ax25/ax25_in.c if (dp.ndigi && !ax25->digipeat && dp 388 net/ax25/ax25_in.c if (dp.ndigi == 0) { dp 165 net/ceph/auth_x.c void *dp, *dend; dp 193 net/ceph/auth_x.c dp = *p + ceph_x_encrypt_offset(); dp 198 net/ceph/auth_x.c dend = dp + ret; dp 200 net/ceph/auth_x.c tkt_struct_v = ceph_decode_8(&dp); dp 204 net/ceph/auth_x.c ret = ceph_crypto_key_decode(&new_session_key, &dp, dend); dp 208 net/ceph/auth_x.c ceph_decode_timespec64(&validity, dp); dp 209 net/ceph/auth_x.c dp += sizeof(struct ceph_timespec); dp 113 net/dccp/ccid.h static inline int ccid_get_current_rx_ccid(struct dccp_sock *dp) dp 115 net/dccp/ccid.h struct ccid *ccid = dp->dccps_hc_rx_ccid; dp 122 net/dccp/ccid.h static inline int ccid_get_current_tx_ccid(struct dccp_sock *dp) dp 124 net/dccp/ccid.h struct ccid *ccid = dp->dccps_hc_tx_ccid; dp 237 net/dccp/ccids/ccid2.c struct dccp_sock *dp = dccp_sk(sk); dp 265 net/dccp/ccids/ccid2.c hc->tx_seqh->ccid2s_seq = dp->dccps_gss; dp 315 net/dccp/ccids/ccid2.c if (dp->dccps_l_ack_ratio > 1) { dp 317 net/dccp/ccids/ccid2.c int denom = dp->dccps_l_ack_ratio * dp->dccps_l_ack_ratio - dp 318 net/dccp/ccids/ccid2.c dp->dccps_l_ack_ratio; dp 323 net/dccp/ccids/ccid2.c ccid2_change_l_ack_ratio(sk, dp->dccps_l_ack_ratio - 1); dp 433 net/dccp/ccids/ccid2.c struct dccp_sock *dp = dccp_sk(sk); dp 434 net/dccp/ccids/ccid2.c int r_seq_used = hc->tx_cwnd / dp->dccps_l_ack_ratio; dp 436 net/dccp/ccids/ccid2.c if (hc->tx_cwnd < dp->dccps_l_seq_win && dp 437 net/dccp/ccids/ccid2.c r_seq_used < dp->dccps_r_seq_win) { dp 454 net/dccp/ccids/ccid2.c if (r_seq_used * CCID2_WIN_CHANGE_FACTOR >= dp->dccps_r_seq_win) dp 455 net/dccp/ccids/ccid2.c ccid2_change_l_ack_ratio(sk, dp->dccps_l_ack_ratio * 2); dp 456 net/dccp/ccids/ccid2.c else if (r_seq_used * CCID2_WIN_CHANGE_FACTOR < dp->dccps_r_seq_win/2) dp 457 net/dccp/ccids/ccid2.c ccid2_change_l_ack_ratio(sk, dp->dccps_l_ack_ratio / 2 ? : 1U); dp 459 net/dccp/ccids/ccid2.c if (hc->tx_cwnd * CCID2_WIN_CHANGE_FACTOR >= dp->dccps_l_seq_win) dp 460 net/dccp/ccids/ccid2.c ccid2_change_l_seq_window(sk, dp->dccps_l_seq_win * 2); dp 461 net/dccp/ccids/ccid2.c else if (hc->tx_cwnd * CCID2_WIN_CHANGE_FACTOR < dp->dccps_l_seq_win/2) dp 462 net/dccp/ccids/ccid2.c ccid2_change_l_seq_window(sk, dp->dccps_l_seq_win / 2); dp 508 net/dccp/ccids/ccid2.c struct dccp_sock *dp = dccp_sk(sk); dp 548 net/dccp/ccids/ccid2.c ccid2_change_l_ack_ratio(sk, 2 * dp->dccps_l_ack_ratio); dp 581 net/dccp/ccids/ccid2.c maxincr = DIV_ROUND_UP(dp->dccps_l_ack_ratio, 2); dp 716 net/dccp/ccids/ccid2.c struct dccp_sock *dp = dccp_sk(sk); dp 723 net/dccp/ccids/ccid2.c hc->tx_cwnd = rfc3390_bytes_to_packets(dp->dccps_mss_cache); dp 728 net/dccp/ccids/ccid2.c if (dp->dccps_l_ack_ratio == 0 || dp->dccps_l_ack_ratio > max_ratio) dp 729 net/dccp/ccids/ccid2.c dp->dccps_l_ack_ratio = max_ratio; dp 269 net/dccp/ccids/ccid3.c struct dccp_sock *dp = dccp_sk(sk); dp 298 net/dccp/ccids/ccid3.c if (dp->dccps_syn_rtt) { dp 299 net/dccp/ccids/ccid3.c ccid3_pr_debug("SYN RTT = %uus\n", dp->dccps_syn_rtt); dp 300 net/dccp/ccids/ccid3.c hc->tx_rtt = dp->dccps_syn_rtt; dp 336 net/dccp/ccids/ccid3.c dp->dccps_hc_tx_insert_options = 1; dp 589 net/dccp/ccids/ccid3.c struct dccp_sock *dp = dccp_sk(sk); dp 629 net/dccp/ccids/ccid3.c dp->dccps_hc_rx_insert_options = 1; dp 265 net/dccp/dccp.h int dccp_reqsk_init(struct request_sock *rq, struct dccp_sock const *dp, dp 330 net/dccp/dccp.h const struct dccp_sock *dp = dccp_sk(sk); dp 332 net/dccp/dccp.h if (dp->dccps_service == service) dp 334 net/dccp/dccp.h return !dccp_list_has_service(dp->dccps_service_list, service); dp 419 net/dccp/dccp.h struct dccp_sock *dp = dccp_sk(sk); dp 421 net/dccp/dccp.h if (after48(seq, dp->dccps_gsr)) dp 422 net/dccp/dccp.h dp->dccps_gsr = seq; dp 424 net/dccp/dccp.h dp->dccps_swl = SUB48(ADD48(dp->dccps_gsr, 1), dp->dccps_r_seq_win / 4); dp 440 net/dccp/dccp.h if (before48(dp->dccps_swl, dp->dccps_isr)) dp 441 net/dccp/dccp.h dp->dccps_swl = dp->dccps_isr; dp 442 net/dccp/dccp.h dp->dccps_swh = ADD48(dp->dccps_gsr, (3 * dp->dccps_r_seq_win) / 4); dp 447 net/dccp/dccp.h struct dccp_sock *dp = dccp_sk(sk); dp 449 net/dccp/dccp.h dp->dccps_gss = seq; dp 451 net/dccp/dccp.h dp->dccps_awl = SUB48(ADD48(dp->dccps_gss, 1), dp->dccps_l_seq_win); dp 453 net/dccp/dccp.h if (before48(dp->dccps_awl, dp->dccps_iss)) dp 454 net/dccp/dccp.h dp->dccps_awl = dp->dccps_iss; dp 455 net/dccp/dccp.h dp->dccps_awh = dp->dccps_gss; dp 470 net/dccp/dccp.h int dccp_feat_finalise_settings(struct dccp_sock *dp); dp 18 net/dccp/diag.c struct dccp_sock *dp = dccp_sk(sk); dp 29 net/dccp/diag.c if (dp->dccps_hc_rx_ackvec != NULL) dp 32 net/dccp/diag.c if (dp->dccps_hc_rx_ccid != NULL) dp 33 net/dccp/diag.c ccid_hc_rx_get_info(dp->dccps_hc_rx_ccid, sk, info); dp 35 net/dccp/diag.c if (dp->dccps_hc_tx_ccid != NULL) dp 36 net/dccp/diag.c ccid_hc_tx_get_info(dp->dccps_hc_tx_ccid, sk, info); dp 38 net/dccp/feat.c struct dccp_sock *dp = dccp_sk(sk); dp 45 net/dccp/feat.c ccid_hc_rx_delete(dp->dccps_hc_rx_ccid, sk); dp 46 net/dccp/feat.c dp->dccps_hc_rx_ccid = new_ccid; dp 48 net/dccp/feat.c ccid_hc_tx_delete(dp->dccps_hc_tx_ccid, sk); dp 49 net/dccp/feat.c dp->dccps_hc_tx_ccid = new_ccid; dp 56 net/dccp/feat.c struct dccp_sock *dp = dccp_sk(sk); dp 59 net/dccp/feat.c dp->dccps_r_seq_win = seq_win; dp 61 net/dccp/feat.c dccp_update_gsr(sk, dp->dccps_gsr); dp 63 net/dccp/feat.c dp->dccps_l_seq_win = seq_win; dp 65 net/dccp/feat.c dccp_update_gss(sk, dp->dccps_gss); dp 81 net/dccp/feat.c struct dccp_sock *dp = dccp_sk(sk); dp 84 net/dccp/feat.c if (enable && dp->dccps_hc_rx_ackvec == NULL) { dp 85 net/dccp/feat.c dp->dccps_hc_rx_ackvec = dccp_ackvec_alloc(gfp_any()); dp 86 net/dccp/feat.c if (dp->dccps_hc_rx_ackvec == NULL) dp 89 net/dccp/feat.c dccp_ackvec_free(dp->dccps_hc_rx_ackvec); dp 90 net/dccp/feat.c dp->dccps_hc_rx_ackvec = NULL; dp 114 net/dccp/feat.c struct dccp_sock *dp = dccp_sk(sk); dp 117 net/dccp/feat.c dp->dccps_pcrlen = cscov; dp 119 net/dccp/feat.c if (dp->dccps_pcslen == 0) dp 120 net/dccp/feat.c dp->dccps_pcslen = cscov; dp 121 net/dccp/feat.c else if (cscov > dp->dccps_pcslen) dp 123 net/dccp/feat.c dp->dccps_pcslen, (u8)cscov); dp 629 net/dccp/feat.c int dccp_feat_insert_opts(struct dccp_sock *dp, struct dccp_request_sock *dreq, dp 632 net/dccp/feat.c struct list_head *fn = dreq ? &dreq->dreq_featneg : &dp->dccps_featneg; dp 774 net/dccp/feat.c struct dccp_sock *dp = dccp_sk(sk); dp 777 net/dccp/feat.c entry = dccp_feat_list_lookup(&dp->dccps_featneg, feat, 1); dp 783 net/dccp/feat.c return dp->dccps_l_ack_ratio; dp 785 net/dccp/feat.c return dp->dccps_l_seq_win; dp 968 net/dccp/feat.c int dccp_feat_finalise_settings(struct dccp_sock *dp) dp 970 net/dccp/feat.c struct list_head *fn = &dp->dccps_featneg; dp 1397 net/dccp/feat.c struct dccp_sock *dp = dccp_sk(sk); dp 1398 net/dccp/feat.c struct list_head *fn = dreq ? &dreq->dreq_featneg : &dp->dccps_featneg; dp 1499 net/dccp/feat.c struct dccp_sock *dp = dccp_sk(sk); dp 1558 net/dccp/feat.c ccid_hc_rx_delete(dp->dccps_hc_rx_ccid, sk); dp 1559 net/dccp/feat.c ccid_hc_tx_delete(dp->dccps_hc_tx_ccid, sk); dp 1560 net/dccp/feat.c dp->dccps_hc_rx_ccid = dp->dccps_hc_tx_ccid = NULL; dp 1561 net/dccp/feat.c dccp_ackvec_free(dp->dccps_hc_rx_ackvec); dp 1562 net/dccp/feat.c dp->dccps_hc_rx_ackvec = NULL; dp 172 net/dccp/input.c const struct dccp_sock *dp = dccp_sk(sk); dp 176 net/dccp/input.c ccid_hc_rx_packet_recv(dp->dccps_hc_rx_ccid, sk, skb); dp 182 net/dccp/input.c ccid_hc_tx_packet_recv(dp->dccps_hc_tx_ccid, sk, skb); dp 188 net/dccp/input.c struct dccp_sock *dp = dccp_sk(sk); dp 206 net/dccp/input.c if (between48(ackno, dp->dccps_awl, dp->dccps_awh) && dp 207 net/dccp/input.c dccp_delta_seqno(dp->dccps_swl, seqno) >= 0) dp 224 net/dccp/input.c lswl = dp->dccps_swl; dp 225 net/dccp/input.c lawl = dp->dccps_awl; dp 230 net/dccp/input.c lswl = ADD48(dp->dccps_gsr, 1); dp 231 net/dccp/input.c lawl = dp->dccps_gar; dp 234 net/dccp/input.c if (between48(seqno, lswl, dp->dccps_swh) && dp 236 net/dccp/input.c between48(ackno, lawl, dp->dccps_awh))) { dp 241 net/dccp/input.c after48(ackno, dp->dccps_gar)) dp 242 net/dccp/input.c dp->dccps_gar = ackno; dp 257 net/dccp/input.c if (time_before(now, (dp->dccps_rate_last + dp 266 net/dccp/input.c (unsigned long long) dp->dccps_swh, dp 270 net/dccp/input.c (unsigned long long) dp->dccps_awh); dp 272 net/dccp/input.c dp->dccps_rate_last = now; dp 275 net/dccp/input.c seqno = dp->dccps_gsr; dp 286 net/dccp/input.c struct dccp_sock *dp = dccp_sk(sk); dp 331 net/dccp/input.c if (dp->dccps_role != DCCP_ROLE_LISTEN) dp 335 net/dccp/input.c if (dp->dccps_role != DCCP_ROLE_CLIENT) dp 338 net/dccp/input.c if (dccp_delta_seqno(dp->dccps_osr, dp 402 net/dccp/input.c struct dccp_sock *dp = dccp_sk(sk); dp 406 net/dccp/input.c dp->dccps_awl, dp->dccps_awh)) { dp 409 net/dccp/input.c (unsigned long long)dp->dccps_awl, dp 411 net/dccp/input.c (unsigned long long)dp->dccps_awh); dp 424 net/dccp/input.c if (likely(dp->dccps_options_received.dccpor_timestamp_echo)) dp 425 net/dccp/input.c dp->dccps_syn_rtt = dccp_sample_rtt(sk, 10 * (tstamp - dp 426 net/dccp/input.c dp->dccps_options_received.dccpor_timestamp_echo)); dp 441 net/dccp/input.c dp->dccps_gsr = dp->dccps_isr = DCCP_SKB_CB(skb)->dccpd_seq; dp 468 net/dccp/input.c if (dccp_feat_activate_values(sk, &dp->dccps_featneg)) dp 522 net/dccp/input.c struct dccp_sock *dp = dccp_sk(sk); dp 523 net/dccp/input.c u32 sample = dp->dccps_options_received.dccpor_timestamp_echo; dp 553 net/dccp/input.c dp->dccps_syn_rtt = dccp_sample_rtt(sk, 10 * delta); dp 556 net/dccp/input.c dp->dccps_osr = DCCP_SKB_CB(skb)->dccpd_seq; dp 574 net/dccp/input.c struct dccp_sock *dp = dccp_sk(sk); dp 640 net/dccp/input.c if ((dp->dccps_role != DCCP_ROLE_CLIENT && dp 642 net/dccp/input.c (dp->dccps_role == DCCP_ROLE_CLIENT && dp 42 net/dccp/ipv4.c struct dccp_sock *dp = dccp_sk(sk); dp 50 net/dccp/ipv4.c dp->dccps_role = DCCP_ROLE_CLIENT; dp 116 net/dccp/ipv4.c dp->dccps_iss = secure_dccp_sequence_number(inet->inet_saddr, dp 149 net/dccp/ipv4.c const struct dccp_sock *dp = dccp_sk(sk); dp 182 net/dccp/ipv4.c dccp_send_sync(sk, dp->dccps_gsr, DCCP_PKT_SYNC); dp 235 net/dccp/ipv4.c struct dccp_sock *dp; dp 281 net/dccp/ipv4.c dp = dccp_sk(sk); dp 283 net/dccp/ipv4.c !between48(seq, dp->dccps_awl, dp->dccps_awh)) { dp 72 net/dccp/ipv6.c struct dccp_sock *dp; dp 115 net/dccp/ipv6.c dp = dccp_sk(sk); dp 117 net/dccp/ipv6.c !between48(seq, dp->dccps_awl, dp->dccps_awh)) { dp 810 net/dccp/ipv6.c struct dccp_sock *dp = dccp_sk(sk); dp 818 net/dccp/ipv6.c dp->dccps_role = DCCP_ROLE_CLIENT; dp 943 net/dccp/ipv6.c dp->dccps_iss = secure_dccpv6_sequence_number(np->saddr.s6_addr32, dp 254 net/dccp/minisocks.c struct dccp_sock const *dp, struct sk_buff const *skb) dp 265 net/dccp/minisocks.c return dccp_feat_clone_list(&dp->dccps_featneg, &dreq->dreq_featneg); dp 50 net/dccp/options.c struct dccp_sock *dp = dccp_sk(sk); dp 57 net/dccp/options.c struct dccp_options_received *opt_recv = &dp->dccps_options_received; dp 148 net/dccp/options.c dp->dccps_timestamp_echo = ntohl(opt_val); dp 149 net/dccp/options.c dp->dccps_timestamp_time = dccp_timestamp(); dp 214 net/dccp/options.c if (ccid_hc_rx_parse_options(dp->dccps_hc_rx_ccid, sk, dp 229 net/dccp/options.c if (ccid_hc_tx_parse_options(dp->dccps_hc_tx_ccid, sk, dp 310 net/dccp/options.c struct dccp_sock *dp = dccp_sk(sk); dp 311 net/dccp/options.c u64 ndp = dp->dccps_ndp_count; dp 314 net/dccp/options.c ++dp->dccps_ndp_count; dp 316 net/dccp/options.c dp->dccps_ndp_count = 0; dp 351 net/dccp/options.c static int dccp_insert_option_timestamp_echo(struct dccp_sock *dp, dp 364 net/dccp/options.c elapsed_time = dccp_timestamp() - dp->dccps_timestamp_time; dp 365 net/dccp/options.c tstamp_echo = htonl(dp->dccps_timestamp_echo); dp 366 net/dccp/options.c dp->dccps_timestamp_echo = 0; dp 397 net/dccp/options.c struct dccp_sock *dp = dccp_sk(sk); dp 398 net/dccp/options.c struct dccp_ackvec *av = dp->dccps_hc_rx_ackvec; dp 419 net/dccp/options.c len + dcb->dccpd_opt_len + skb->len > dp->dccps_mss_cache) { dp 422 net/dccp/options.c dcb->dccpd_opt_len, dp->dccps_mss_cache); dp 423 net/dccp/options.c dp->dccps_sync_scheduled = 1; dp 547 net/dccp/options.c struct dccp_sock *dp = dccp_sk(sk); dp 551 net/dccp/options.c if (dp->dccps_send_ndp_count && dccp_insert_option_ndp(sk, skb)) dp 557 net/dccp/options.c if (dccp_feat_insert_opts(dp, NULL, skb)) dp 574 net/dccp/options.c if (dp->dccps_hc_rx_insert_options) { dp 575 net/dccp/options.c if (ccid_hc_rx_insert_options(dp->dccps_hc_rx_ccid, sk, skb)) dp 577 net/dccp/options.c dp->dccps_hc_rx_insert_options = 0; dp 580 net/dccp/options.c if (dp->dccps_timestamp_echo != 0 && dp 581 net/dccp/options.c dccp_insert_option_timestamp_echo(dp, NULL, skb)) dp 47 net/dccp/output.c struct dccp_sock *dp = dccp_sk(sk); dp 55 net/dccp/output.c u64 ackno = dp->dccps_gsr; dp 60 net/dccp/output.c dcb->dccpd_seq = ADD48(dp->dccps_gss, 1); dp 74 net/dccp/output.c dcb->dccpd_seq = dp->dccps_iss; dp 106 net/dccp/output.c dh->dccph_cscov = dp->dccps_pcslen; dp 111 net/dccp/output.c dccp_hdr_set_seq(dh, dp->dccps_gss); dp 118 net/dccp/output.c dp->dccps_service; dp 123 net/dccp/output.c dp->dccps_awl = dp->dccps_iss; dp 150 net/dccp/output.c static u32 dccp_determine_ccmps(const struct dccp_sock *dp) dp 152 net/dccp/output.c const struct ccid *tx_ccid = dp->dccps_hc_tx_ccid; dp 162 net/dccp/output.c struct dccp_sock *dp = dccp_sk(sk); dp 163 net/dccp/output.c u32 ccmps = dccp_determine_ccmps(dp); dp 183 net/dccp/output.c cur_mps -= roundup(1 + 6 + 10 + dp->dccps_send_ndp_count * 8 + 6 + dp 184 net/dccp/output.c (dp->dccps_hc_rx_ackvec ? DCCPAV_MIN_OPTLEN : 0), 4); dp 188 net/dccp/output.c dp->dccps_mss_cache = cur_mps; dp 244 net/dccp/output.c struct dccp_sock *dp = dccp_sk(sk); dp 252 net/dccp/output.c const u32 cur_mps = dp->dccps_mss_cache - DCCP_FEATNEG_OVERHEAD; dp 260 net/dccp/output.c if (!list_empty(&dp->dccps_featneg) && len > cur_mps) { dp 263 net/dccp/output.c dccp_feat_list_purge(&dp->dccps_featneg); dp 285 net/dccp/output.c ccid_hc_tx_packet_sent(dp->dccps_hc_tx_ccid, sk, len); dp 293 net/dccp/output.c if (dp->dccps_sync_scheduled) dp 294 net/dccp/output.c dccp_send_sync(sk, dp->dccps_gsr, DCCP_PKT_SYNC); dp 306 net/dccp/output.c struct dccp_sock *dp = dccp_sk(sk); dp 311 net/dccp/output.c rc = ccid_hc_tx_send_packet(dp->dccps_hc_tx_ccid, sk, skb); dp 345 net/dccp/output.c struct dccp_sock *dp = dccp_sk(sk); dp 349 net/dccp/output.c int rc = ccid_hc_tx_send_packet(dp->dccps_hc_tx_ccid, sk, skb); dp 355 net/dccp/output.c sk_reset_timer(sk, &dp->dccps_xmit_timer, dp 535 net/dccp/output.c struct dccp_sock *dp = dccp_sk(sk); dp 549 net/dccp/output.c dp->dccps_gar = dp->dccps_iss; dp 669 net/dccp/output.c struct dccp_sock *dp = dccp_sk(sk); dp 679 net/dccp/output.c if (dp->dccps_role == DCCP_ROLE_SERVER && !dp->dccps_server_timewait) dp 176 net/dccp/proto.c struct dccp_sock *dp = dccp_sk(sk); dp 178 net/dccp/proto.c ccid_hc_tx_delete(dp->dccps_hc_tx_ccid, sk); dp 179 net/dccp/proto.c dp->dccps_hc_tx_ccid = NULL; dp 185 net/dccp/proto.c struct dccp_sock *dp = dccp_sk(sk); dp 194 net/dccp/proto.c dp->dccps_mss_cache = 536; dp 195 net/dccp/proto.c dp->dccps_rate_last = jiffies; dp 196 net/dccp/proto.c dp->dccps_role = DCCP_ROLE_UNDEFINED; dp 197 net/dccp/proto.c dp->dccps_service = DCCP_SERVICE_CODE_IS_ABSENT; dp 198 net/dccp/proto.c dp->dccps_tx_qlen = sysctl_dccp_tx_qlen; dp 202 net/dccp/proto.c INIT_LIST_HEAD(&dp->dccps_featneg); dp 213 net/dccp/proto.c struct dccp_sock *dp = dccp_sk(sk); dp 225 net/dccp/proto.c kfree(dp->dccps_service_list); dp 226 net/dccp/proto.c dp->dccps_service_list = NULL; dp 228 net/dccp/proto.c if (dp->dccps_hc_rx_ackvec != NULL) { dp 229 net/dccp/proto.c dccp_ackvec_free(dp->dccps_hc_rx_ackvec); dp 230 net/dccp/proto.c dp->dccps_hc_rx_ackvec = NULL; dp 232 net/dccp/proto.c ccid_hc_rx_delete(dp->dccps_hc_rx_ccid, sk); dp 233 net/dccp/proto.c dp->dccps_hc_rx_ccid = NULL; dp 236 net/dccp/proto.c dccp_feat_list_purge(&dp->dccps_featneg); dp 243 net/dccp/proto.c struct dccp_sock *dp = dccp_sk(sk); dp 245 net/dccp/proto.c dp->dccps_role = DCCP_ROLE_LISTEN; dp 247 net/dccp/proto.c if (dccp_feat_finalise_settings(dp)) dp 262 net/dccp/proto.c struct dccp_sock *dp = dccp_sk(sk); dp 281 net/dccp/proto.c ccid_hc_rx_delete(dp->dccps_hc_rx_ccid, sk); dp 282 net/dccp/proto.c dp->dccps_hc_rx_ccid = NULL; dp 407 net/dccp/proto.c struct dccp_sock *dp = dccp_sk(sk); dp 430 net/dccp/proto.c dp->dccps_service = service; dp 432 net/dccp/proto.c kfree(dp->dccps_service_list); dp 434 net/dccp/proto.c dp->dccps_service_list = sl; dp 503 net/dccp/proto.c struct dccp_sock *dp = dccp_sk(sk); dp 532 net/dccp/proto.c if (dp->dccps_role != DCCP_ROLE_SERVER) dp 535 net/dccp/proto.c dp->dccps_server_timewait = (val != 0); dp 549 net/dccp/proto.c dp->dccps_qpolicy = val; dp 555 net/dccp/proto.c dp->dccps_tx_qlen = val; dp 595 net/dccp/proto.c const struct dccp_sock *dp = dccp_sk(sk); dp 600 net/dccp/proto.c if ((sl = dp->dccps_service_list) != NULL) { dp 611 net/dccp/proto.c put_user(dp->dccps_service, optval) || dp 622 net/dccp/proto.c struct dccp_sock *dp; dp 631 net/dccp/proto.c dp = dccp_sk(sk); dp 641 net/dccp/proto.c val = dp->dccps_mss_cache; dp 646 net/dccp/proto.c val = ccid_get_current_tx_ccid(dp); dp 651 net/dccp/proto.c val = ccid_get_current_rx_ccid(dp); dp 656 net/dccp/proto.c val = dp->dccps_server_timewait; dp 659 net/dccp/proto.c val = dp->dccps_pcslen; dp 662 net/dccp/proto.c val = dp->dccps_pcrlen; dp 665 net/dccp/proto.c val = dp->dccps_qpolicy; dp 668 net/dccp/proto.c val = dp->dccps_tx_qlen; dp 671 net/dccp/proto.c return ccid_hc_rx_getsockopt(dp->dccps_hc_rx_ccid, sk, optname, dp 674 net/dccp/proto.c return ccid_hc_tx_getsockopt(dp->dccps_hc_tx_ccid, sk, optname, dp 754 net/dccp/proto.c const struct dccp_sock *dp = dccp_sk(sk); dp 763 net/dccp/proto.c if (len > dp->dccps_mss_cache) dp 811 net/dccp/proto.c if (!timer_pending(&dp->dccps_xmit_timer)) dp 998 net/dccp/proto.c struct dccp_sock *dp = dccp_sk(sk); dp 1016 net/dccp/proto.c sk_stop_timer(sk, &dp->dccps_xmit_timer); dp 236 net/dccp/timer.c struct dccp_sock *dp = from_timer(dp, t, dccps_xmit_timer); dp 237 net/dccp/timer.c struct sock *sk = &dp->dccps_inet_connection.icsk_inet.sk; dp 244 net/dccp/timer.c struct dccp_sock *dp = dccp_sk(sk); dp 246 net/dccp/timer.c tasklet_init(&dp->dccps_xmitlet, dccp_write_xmitlet, (unsigned long)sk); dp 247 net/dccp/timer.c timer_setup(&dp->dccps_xmit_timer, dccp_write_xmit_timer, 0); dp 182 net/dsa/dsa.c struct dsa_switch *ds = p->dp->ds; dp 198 net/dsa/dsa.c return ds->ops->port_rxtstamp(ds, p->dp->index, skb, type); dp 106 net/dsa/dsa2.c static bool dsa_port_is_user(struct dsa_port *dp) dp 108 net/dsa/dsa2.c return dp->type == DSA_PORT_TYPE_USER; dp 115 net/dsa/dsa2.c struct dsa_port *dp; dp 124 net/dsa/dsa2.c dp = &ds->ports[port]; dp 126 net/dsa/dsa2.c if (dp->dn == dn) dp 127 net/dsa/dsa2.c return dp; dp 134 net/dsa/dsa2.c static bool dsa_port_setup_routing_table(struct dsa_port *dp) dp 136 net/dsa/dsa2.c struct dsa_switch *ds = dp->ds; dp 138 net/dsa/dsa2.c struct device_node *dn = dp->dn; dp 150 net/dsa/dsa2.c ds->rtable[link_dp->ds->index] = dp->index; dp 159 net/dsa/dsa2.c struct dsa_port *dp; dp 166 net/dsa/dsa2.c dp = &ds->ports[i]; dp 168 net/dsa/dsa2.c if (dsa_port_is_dsa(dp)) { dp 169 net/dsa/dsa2.c complete = dsa_port_setup_routing_table(dp); dp 200 net/dsa/dsa2.c struct dsa_port *dp; dp 209 net/dsa/dsa2.c dp = &ds->ports[port]; dp 211 net/dsa/dsa2.c if (dsa_port_is_cpu(dp)) dp 212 net/dsa/dsa2.c return dp; dp 222 net/dsa/dsa2.c struct dsa_port *dp; dp 239 net/dsa/dsa2.c dp = &ds->ports[port]; dp 241 net/dsa/dsa2.c if (dsa_port_is_user(dp) || dsa_port_is_dsa(dp)) dp 242 net/dsa/dsa2.c dp->cpu_dp = dst->cpu_dp; dp 255 net/dsa/dsa2.c static int dsa_port_setup(struct dsa_port *dp) dp 257 net/dsa/dsa2.c struct dsa_switch *ds = dp->ds; dp 261 net/dsa/dsa2.c struct devlink_port *dlp = &dp->devlink_port; dp 268 net/dsa/dsa2.c switch (dp->type) { dp 270 net/dsa/dsa2.c dsa_port_disable(dp); dp 275 net/dsa/dsa2.c dp->index, false, 0, id, len); dp 276 net/dsa/dsa2.c err = devlink_port_register(dl, dlp, dp->index); dp 281 net/dsa/dsa2.c err = dsa_port_link_register_of(dp); dp 286 net/dsa/dsa2.c err = dsa_port_enable(dp, NULL); dp 295 net/dsa/dsa2.c dp->index, false, 0, id, len); dp 296 net/dsa/dsa2.c err = devlink_port_register(dl, dlp, dp->index); dp 301 net/dsa/dsa2.c err = dsa_port_link_register_of(dp); dp 306 net/dsa/dsa2.c err = dsa_port_enable(dp, NULL); dp 315 net/dsa/dsa2.c dp->index, false, 0, id, len); dp 316 net/dsa/dsa2.c err = devlink_port_register(dl, dlp, dp->index); dp 321 net/dsa/dsa2.c dp->mac = of_get_mac_address(dp->dn); dp 322 net/dsa/dsa2.c err = dsa_slave_create(dp); dp 326 net/dsa/dsa2.c devlink_port_type_eth_set(dlp, dp->slave); dp 331 net/dsa/dsa2.c dsa_port_disable(dp); dp 333 net/dsa/dsa2.c dsa_port_link_unregister_of(dp); dp 340 net/dsa/dsa2.c static void dsa_port_teardown(struct dsa_port *dp) dp 342 net/dsa/dsa2.c struct devlink_port *dlp = &dp->devlink_port; dp 344 net/dsa/dsa2.c switch (dp->type) { dp 348 net/dsa/dsa2.c dsa_port_disable(dp); dp 349 net/dsa/dsa2.c dsa_tag_driver_put(dp->tag_ops); dp 351 net/dsa/dsa2.c dsa_port_link_unregister_of(dp); dp 354 net/dsa/dsa2.c dsa_port_disable(dp); dp 356 net/dsa/dsa2.c dsa_port_link_unregister_of(dp); dp 360 net/dsa/dsa2.c if (dp->slave) { dp 361 net/dsa/dsa2.c dsa_slave_destroy(dp->slave); dp 362 net/dsa/dsa2.c dp->slave = NULL; dp 446 net/dsa/dsa2.c struct dsa_port *dp; dp 460 net/dsa/dsa2.c dp = &ds->ports[port]; dp 462 net/dsa/dsa2.c err = dsa_port_setup(dp); dp 477 net/dsa/dsa2.c dp = &ds->ports[port]; dp 479 net/dsa/dsa2.c dsa_port_teardown(dp); dp 491 net/dsa/dsa2.c struct dsa_port *dp; dp 500 net/dsa/dsa2.c dp = &ds->ports[port]; dp 502 net/dsa/dsa2.c dsa_port_teardown(dp); dp 613 net/dsa/dsa2.c static int dsa_port_parse_user(struct dsa_port *dp, const char *name) dp 618 net/dsa/dsa2.c dp->type = DSA_PORT_TYPE_USER; dp 619 net/dsa/dsa2.c dp->name = name; dp 624 net/dsa/dsa2.c static int dsa_port_parse_dsa(struct dsa_port *dp) dp 626 net/dsa/dsa2.c dp->type = DSA_PORT_TYPE_DSA; dp 631 net/dsa/dsa2.c static int dsa_port_parse_cpu(struct dsa_port *dp, struct net_device *master) dp 633 net/dsa/dsa2.c struct dsa_switch *ds = dp->ds; dp 638 net/dsa/dsa2.c tag_protocol = ds->ops->get_tag_protocol(ds, dp->index); dp 647 net/dsa/dsa2.c dp->type = DSA_PORT_TYPE_CPU; dp 648 net/dsa/dsa2.c dp->filter = tag_ops->filter; dp 649 net/dsa/dsa2.c dp->rcv = tag_ops->rcv; dp 650 net/dsa/dsa2.c dp->tag_ops = tag_ops; dp 651 net/dsa/dsa2.c dp->master = master; dp 652 net/dsa/dsa2.c dp->dst = dst; dp 657 net/dsa/dsa2.c static int dsa_port_parse_of(struct dsa_port *dp, struct device_node *dn) dp 663 net/dsa/dsa2.c dp->dn = dn; dp 672 net/dsa/dsa2.c return dsa_port_parse_cpu(dp, master); dp 676 net/dsa/dsa2.c return dsa_port_parse_dsa(dp); dp 678 net/dsa/dsa2.c return dsa_port_parse_user(dp, name); dp 685 net/dsa/dsa2.c struct dsa_port *dp; dp 705 net/dsa/dsa2.c dp = &ds->ports[reg]; dp 707 net/dsa/dsa2.c err = dsa_port_parse_of(dp, port); dp 750 net/dsa/dsa2.c static int dsa_port_parse(struct dsa_port *dp, const char *name, dp 762 net/dsa/dsa2.c return dsa_port_parse_cpu(dp, master); dp 766 net/dsa/dsa2.c return dsa_port_parse_dsa(dp); dp 768 net/dsa/dsa2.c return dsa_port_parse_user(dp, name); dp 775 net/dsa/dsa2.c struct dsa_port *dp; dp 784 net/dsa/dsa2.c dp = &ds->ports[i]; dp 789 net/dsa/dsa2.c err = dsa_port_parse(dp, name, dev); dp 72 net/dsa/dsa_priv.h struct dsa_port *dp; dp 129 net/dsa/dsa_priv.h int dsa_port_set_state(struct dsa_port *dp, u8 state, dp 131 net/dsa/dsa_priv.h int dsa_port_enable_rt(struct dsa_port *dp, struct phy_device *phy); dp 132 net/dsa/dsa_priv.h int dsa_port_enable(struct dsa_port *dp, struct phy_device *phy); dp 133 net/dsa/dsa_priv.h void dsa_port_disable_rt(struct dsa_port *dp); dp 134 net/dsa/dsa_priv.h void dsa_port_disable(struct dsa_port *dp); dp 135 net/dsa/dsa_priv.h int dsa_port_bridge_join(struct dsa_port *dp, struct net_device *br); dp 136 net/dsa/dsa_priv.h void dsa_port_bridge_leave(struct dsa_port *dp, struct net_device *br); dp 137 net/dsa/dsa_priv.h int dsa_port_vlan_filtering(struct dsa_port *dp, bool vlan_filtering, dp 139 net/dsa/dsa_priv.h int dsa_port_ageing_time(struct dsa_port *dp, clock_t ageing_clock, dp 141 net/dsa/dsa_priv.h int dsa_port_fdb_add(struct dsa_port *dp, const unsigned char *addr, dp 143 net/dsa/dsa_priv.h int dsa_port_fdb_del(struct dsa_port *dp, const unsigned char *addr, dp 145 net/dsa/dsa_priv.h int dsa_port_fdb_dump(struct dsa_port *dp, dsa_fdb_dump_cb_t *cb, void *data); dp 146 net/dsa/dsa_priv.h int dsa_port_mdb_add(const struct dsa_port *dp, dp 149 net/dsa/dsa_priv.h int dsa_port_mdb_del(const struct dsa_port *dp, dp 151 net/dsa/dsa_priv.h int dsa_port_pre_bridge_flags(const struct dsa_port *dp, unsigned long flags, dp 153 net/dsa/dsa_priv.h int dsa_port_bridge_flags(const struct dsa_port *dp, unsigned long flags, dp 155 net/dsa/dsa_priv.h int dsa_port_mrouter(struct dsa_port *dp, bool mrouter, dp 157 net/dsa/dsa_priv.h int dsa_port_vlan_add(struct dsa_port *dp, dp 160 net/dsa/dsa_priv.h int dsa_port_vlan_del(struct dsa_port *dp, dp 162 net/dsa/dsa_priv.h int dsa_port_vid_add(struct dsa_port *dp, u16 vid, u16 flags); dp 163 net/dsa/dsa_priv.h int dsa_port_vid_del(struct dsa_port *dp, u16 vid); dp 164 net/dsa/dsa_priv.h int dsa_port_link_register_of(struct dsa_port *dp); dp 165 net/dsa/dsa_priv.h void dsa_port_link_unregister_of(struct dsa_port *dp); dp 187 net/dsa/dsa_priv.h int dsa_slave_create(struct dsa_port *dp); dp 200 net/dsa/dsa_priv.h return p->dp; dp 206 net/dsa/dsa_priv.h struct dsa_port *dp = dsa_slave_to_port(dev); dp 208 net/dsa/dsa_priv.h return dp->cpu_dp->master; dp 16 net/dsa/port.c static int dsa_port_notify(const struct dsa_port *dp, unsigned long e, void *v) dp 18 net/dsa/port.c struct raw_notifier_head *nh = &dp->ds->dst->nh; dp 26 net/dsa/port.c int dsa_port_set_state(struct dsa_port *dp, u8 state, dp 29 net/dsa/port.c struct dsa_switch *ds = dp->ds; dp 30 net/dsa/port.c int port = dp->index; dp 44 net/dsa/port.c if ((dp->stp_state == BR_STATE_LEARNING || dp 45 net/dsa/port.c dp->stp_state == BR_STATE_FORWARDING) && dp 52 net/dsa/port.c dp->stp_state = state; dp 57 net/dsa/port.c static void dsa_port_set_state_now(struct dsa_port *dp, u8 state) dp 61 net/dsa/port.c err = dsa_port_set_state(dp, state, NULL); dp 66 net/dsa/port.c int dsa_port_enable_rt(struct dsa_port *dp, struct phy_device *phy) dp 68 net/dsa/port.c struct dsa_switch *ds = dp->ds; dp 69 net/dsa/port.c int port = dp->index; dp 78 net/dsa/port.c if (!dp->bridge_dev) dp 79 net/dsa/port.c dsa_port_set_state_now(dp, BR_STATE_FORWARDING); dp 81 net/dsa/port.c if (dp->pl) dp 82 net/dsa/port.c phylink_start(dp->pl); dp 87 net/dsa/port.c int dsa_port_enable(struct dsa_port *dp, struct phy_device *phy) dp 92 net/dsa/port.c err = dsa_port_enable_rt(dp, phy); dp 98 net/dsa/port.c void dsa_port_disable_rt(struct dsa_port *dp) dp 100 net/dsa/port.c struct dsa_switch *ds = dp->ds; dp 101 net/dsa/port.c int port = dp->index; dp 103 net/dsa/port.c if (dp->pl) dp 104 net/dsa/port.c phylink_stop(dp->pl); dp 106 net/dsa/port.c if (!dp->bridge_dev) dp 107 net/dsa/port.c dsa_port_set_state_now(dp, BR_STATE_DISABLED); dp 113 net/dsa/port.c void dsa_port_disable(struct dsa_port *dp) dp 116 net/dsa/port.c dsa_port_disable_rt(dp); dp 120 net/dsa/port.c int dsa_port_bridge_join(struct dsa_port *dp, struct net_device *br) dp 123 net/dsa/port.c .sw_index = dp->ds->index, dp 124 net/dsa/port.c .port = dp->index, dp 130 net/dsa/port.c err = dsa_port_bridge_flags(dp, BR_FLOOD | BR_MCAST_FLOOD, NULL); dp 137 net/dsa/port.c dp->bridge_dev = br; dp 139 net/dsa/port.c err = dsa_port_notify(dp, DSA_NOTIFIER_BRIDGE_JOIN, &info); dp 143 net/dsa/port.c dsa_port_bridge_flags(dp, 0, NULL); dp 144 net/dsa/port.c dp->bridge_dev = NULL; dp 150 net/dsa/port.c void dsa_port_bridge_leave(struct dsa_port *dp, struct net_device *br) dp 153 net/dsa/port.c .sw_index = dp->ds->index, dp 154 net/dsa/port.c .port = dp->index, dp 162 net/dsa/port.c dp->bridge_dev = NULL; dp 164 net/dsa/port.c err = dsa_port_notify(dp, DSA_NOTIFIER_BRIDGE_LEAVE, &info); dp 169 net/dsa/port.c dsa_port_bridge_flags(dp, 0, NULL); dp 174 net/dsa/port.c dsa_port_set_state_now(dp, BR_STATE_FORWARDING); dp 177 net/dsa/port.c static bool dsa_port_can_apply_vlan_filtering(struct dsa_port *dp, dp 180 net/dsa/port.c struct dsa_switch *ds = dp->ds; dp 200 net/dsa/port.c if (other_bridge == dp->bridge_dev) dp 210 net/dsa/port.c int dsa_port_vlan_filtering(struct dsa_port *dp, bool vlan_filtering, dp 213 net/dsa/port.c struct dsa_switch *ds = dp->ds; dp 223 net/dsa/port.c if (!dsa_port_can_apply_vlan_filtering(dp, vlan_filtering)) dp 226 net/dsa/port.c if (dsa_port_is_vlan_filtering(dp) == vlan_filtering) dp 229 net/dsa/port.c err = ds->ops->port_vlan_filtering(ds, dp->index, dp 237 net/dsa/port.c dp->vlan_filtering = vlan_filtering; dp 241 net/dsa/port.c int dsa_port_ageing_time(struct dsa_port *dp, clock_t ageing_clock, dp 252 net/dsa/port.c return dsa_port_notify(dp, DSA_NOTIFIER_AGEING_TIME, &info); dp 254 net/dsa/port.c dp->ageing_time = ageing_time; dp 256 net/dsa/port.c return dsa_port_notify(dp, DSA_NOTIFIER_AGEING_TIME, &info); dp 259 net/dsa/port.c int dsa_port_pre_bridge_flags(const struct dsa_port *dp, unsigned long flags, dp 262 net/dsa/port.c struct dsa_switch *ds = dp->ds; dp 271 net/dsa/port.c int dsa_port_bridge_flags(const struct dsa_port *dp, unsigned long flags, dp 274 net/dsa/port.c struct dsa_switch *ds = dp->ds; dp 275 net/dsa/port.c int port = dp->index; dp 288 net/dsa/port.c int dsa_port_mrouter(struct dsa_port *dp, bool mrouter, dp 291 net/dsa/port.c struct dsa_switch *ds = dp->ds; dp 292 net/dsa/port.c int port = dp->index; dp 300 net/dsa/port.c int dsa_port_fdb_add(struct dsa_port *dp, const unsigned char *addr, dp 304 net/dsa/port.c .sw_index = dp->ds->index, dp 305 net/dsa/port.c .port = dp->index, dp 310 net/dsa/port.c return dsa_port_notify(dp, DSA_NOTIFIER_FDB_ADD, &info); dp 313 net/dsa/port.c int dsa_port_fdb_del(struct dsa_port *dp, const unsigned char *addr, dp 317 net/dsa/port.c .sw_index = dp->ds->index, dp 318 net/dsa/port.c .port = dp->index, dp 324 net/dsa/port.c return dsa_port_notify(dp, DSA_NOTIFIER_FDB_DEL, &info); dp 327 net/dsa/port.c int dsa_port_fdb_dump(struct dsa_port *dp, dsa_fdb_dump_cb_t *cb, void *data) dp 329 net/dsa/port.c struct dsa_switch *ds = dp->ds; dp 330 net/dsa/port.c int port = dp->index; dp 338 net/dsa/port.c int dsa_port_mdb_add(const struct dsa_port *dp, dp 343 net/dsa/port.c .sw_index = dp->ds->index, dp 344 net/dsa/port.c .port = dp->index, dp 349 net/dsa/port.c return dsa_port_notify(dp, DSA_NOTIFIER_MDB_ADD, &info); dp 352 net/dsa/port.c int dsa_port_mdb_del(const struct dsa_port *dp, dp 356 net/dsa/port.c .sw_index = dp->ds->index, dp 357 net/dsa/port.c .port = dp->index, dp 361 net/dsa/port.c return dsa_port_notify(dp, DSA_NOTIFIER_MDB_DEL, &info); dp 364 net/dsa/port.c int dsa_port_vlan_add(struct dsa_port *dp, dp 369 net/dsa/port.c .sw_index = dp->ds->index, dp 370 net/dsa/port.c .port = dp->index, dp 375 net/dsa/port.c return dsa_port_notify(dp, DSA_NOTIFIER_VLAN_ADD, &info); dp 378 net/dsa/port.c int dsa_port_vlan_del(struct dsa_port *dp, dp 382 net/dsa/port.c .sw_index = dp->ds->index, dp 383 net/dsa/port.c .port = dp->index, dp 387 net/dsa/port.c return dsa_port_notify(dp, DSA_NOTIFIER_VLAN_DEL, &info); dp 390 net/dsa/port.c int dsa_port_vid_add(struct dsa_port *dp, u16 vid, u16 flags) dp 402 net/dsa/port.c err = dsa_port_vlan_add(dp, &vlan, &trans); dp 407 net/dsa/port.c return dsa_port_vlan_add(dp, &vlan, &trans); dp 411 net/dsa/port.c int dsa_port_vid_del(struct dsa_port *dp, u16 vid) dp 419 net/dsa/port.c return dsa_port_vlan_del(dp, &vlan); dp 423 net/dsa/port.c static struct phy_device *dsa_port_get_phy_device(struct dsa_port *dp) dp 428 net/dsa/port.c phy_dn = of_parse_phandle(dp->dn, "phy-handle", 0); dp 446 net/dsa/port.c struct dsa_port *dp = container_of(config, struct dsa_port, pl_config); dp 447 net/dsa/port.c struct dsa_switch *ds = dp->ds; dp 452 net/dsa/port.c ds->ops->phylink_validate(ds, dp->index, supported, state); dp 459 net/dsa/port.c struct dsa_port *dp = container_of(config, struct dsa_port, pl_config); dp 460 net/dsa/port.c struct dsa_switch *ds = dp->ds; dp 466 net/dsa/port.c return ds->ops->phylink_mac_link_state(ds, dp->index, state); dp 474 net/dsa/port.c struct dsa_port *dp = container_of(config, struct dsa_port, pl_config); dp 475 net/dsa/port.c struct dsa_switch *ds = dp->ds; dp 480 net/dsa/port.c ds->ops->phylink_mac_config(ds, dp->index, mode, state); dp 486 net/dsa/port.c struct dsa_port *dp = container_of(config, struct dsa_port, pl_config); dp 487 net/dsa/port.c struct dsa_switch *ds = dp->ds; dp 492 net/dsa/port.c ds->ops->phylink_mac_an_restart(ds, dp->index); dp 500 net/dsa/port.c struct dsa_port *dp = container_of(config, struct dsa_port, pl_config); dp 502 net/dsa/port.c struct dsa_switch *ds = dp->ds; dp 504 net/dsa/port.c if (dsa_is_user_port(ds, dp->index)) dp 505 net/dsa/port.c phydev = dp->slave->phydev; dp 509 net/dsa/port.c ds->ops->adjust_link(ds, dp->index, phydev); dp 513 net/dsa/port.c ds->ops->phylink_mac_link_down(ds, dp->index, mode, interface); dp 522 net/dsa/port.c struct dsa_port *dp = container_of(config, struct dsa_port, pl_config); dp 523 net/dsa/port.c struct dsa_switch *ds = dp->ds; dp 527 net/dsa/port.c ds->ops->adjust_link(ds, dp->index, phydev); dp 531 net/dsa/port.c ds->ops->phylink_mac_link_up(ds, dp->index, mode, interface, phydev); dp 544 net/dsa/port.c static int dsa_port_setup_phy_of(struct dsa_port *dp, bool enable) dp 546 net/dsa/port.c struct dsa_switch *ds = dp->ds; dp 548 net/dsa/port.c int port = dp->index; dp 551 net/dsa/port.c phydev = dsa_port_get_phy_device(dp); dp 582 net/dsa/port.c static int dsa_port_fixed_link_register_of(struct dsa_port *dp) dp 584 net/dsa/port.c struct device_node *dn = dp->dn; dp 585 net/dsa/port.c struct dsa_switch *ds = dp->ds; dp 587 net/dsa/port.c int port = dp->index; dp 616 net/dsa/port.c static int dsa_port_phylink_register(struct dsa_port *dp) dp 618 net/dsa/port.c struct dsa_switch *ds = dp->ds; dp 619 net/dsa/port.c struct device_node *port_dn = dp->dn; dp 626 net/dsa/port.c dp->pl_config.dev = ds->dev; dp 627 net/dsa/port.c dp->pl_config.type = PHYLINK_DEV; dp 629 net/dsa/port.c dp->pl = phylink_create(&dp->pl_config, of_fwnode_handle(port_dn), dp 631 net/dsa/port.c if (IS_ERR(dp->pl)) { dp 632 net/dsa/port.c pr_err("error creating PHYLINK: %ld\n", PTR_ERR(dp->pl)); dp 633 net/dsa/port.c return PTR_ERR(dp->pl); dp 636 net/dsa/port.c err = phylink_of_phy_connect(dp->pl, port_dn, 0); dp 645 net/dsa/port.c phylink_destroy(dp->pl); dp 649 net/dsa/port.c int dsa_port_link_register_of(struct dsa_port *dp) dp 651 net/dsa/port.c struct dsa_switch *ds = dp->ds; dp 655 net/dsa/port.c phy_np = of_parse_phandle(dp->dn, "phy-handle", 0); dp 656 net/dsa/port.c if (of_phy_is_fixed_link(dp->dn) || phy_np) dp 657 net/dsa/port.c return dsa_port_phylink_register(dp); dp 664 net/dsa/port.c if (of_phy_is_fixed_link(dp->dn)) dp 665 net/dsa/port.c return dsa_port_fixed_link_register_of(dp); dp 667 net/dsa/port.c return dsa_port_setup_phy_of(dp, true); dp 670 net/dsa/port.c void dsa_port_link_unregister_of(struct dsa_port *dp) dp 672 net/dsa/port.c struct dsa_switch *ds = dp->ds; dp 674 net/dsa/port.c if (!ds->ops->adjust_link && dp->pl) { dp 676 net/dsa/port.c phylink_disconnect_phy(dp->pl); dp 678 net/dsa/port.c phylink_destroy(dp->pl); dp 679 net/dsa/port.c dp->pl = NULL; dp 683 net/dsa/port.c if (of_phy_is_fixed_link(dp->dn)) dp 684 net/dsa/port.c of_phy_deregister_fixed_link(dp->dn); dp 686 net/dsa/port.c dsa_port_setup_phy_of(dp, false); dp 689 net/dsa/port.c int dsa_port_get_phy_strings(struct dsa_port *dp, uint8_t *data) dp 694 net/dsa/port.c if (of_phy_is_fixed_link(dp->dn)) dp 697 net/dsa/port.c phydev = dsa_port_get_phy_device(dp); dp 708 net/dsa/port.c int dsa_port_get_ethtool_phy_stats(struct dsa_port *dp, uint64_t *data) dp 713 net/dsa/port.c if (of_phy_is_fixed_link(dp->dn)) dp 716 net/dsa/port.c phydev = dsa_port_get_phy_device(dp); dp 727 net/dsa/port.c int dsa_port_get_phy_sset_count(struct dsa_port *dp) dp 732 net/dsa/port.c if (of_phy_is_fixed_link(dp->dn)) dp 735 net/dsa/port.c phydev = dsa_port_get_phy_device(dp); dp 70 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 93 net/dsa/slave.c err = dsa_port_enable_rt(dp, dev->phydev); dp 115 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 117 net/dsa/slave.c cancel_work_sync(&dp->xmit_work); dp 118 net/dsa/slave.c skb_queue_purge(&dp->xmit_queue); dp 120 net/dsa/slave.c dsa_port_disable_rt(dp); dp 239 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 248 net/dsa/slave.c err = dsa_port_fdb_dump(dp, dsa_slave_port_fdb_do_dump, &dump); dp 257 net/dsa/slave.c struct dsa_switch *ds = p->dp->ds; dp 258 net/dsa/slave.c int port = p->dp->index; dp 272 net/dsa/slave.c return phylink_mii_ioctl(p->dp->pl, ifr, cmd); dp 279 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 284 net/dsa/slave.c ret = dsa_port_set_state(dp, attr->u.stp_state, trans); dp 287 net/dsa/slave.c ret = dsa_port_vlan_filtering(dp, attr->u.vlan_filtering, dp 291 net/dsa/slave.c ret = dsa_port_ageing_time(dp, attr->u.ageing_time, trans); dp 294 net/dsa/slave.c ret = dsa_port_pre_bridge_flags(dp, attr->u.brport_flags, dp 298 net/dsa/slave.c ret = dsa_port_bridge_flags(dp, attr->u.brport_flags, trans); dp 301 net/dsa/slave.c ret = dsa_port_mrouter(dp->cpu_dp, attr->u.mrouter, trans); dp 315 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 322 net/dsa/slave.c if (dp->bridge_dev && !br_vlan_enabled(dp->bridge_dev)) dp 327 net/dsa/slave.c err = dsa_port_vlan_add(dp, &vlan, trans); dp 337 net/dsa/slave.c err = dsa_port_vlan_add(dp->cpu_dp, &vlan, trans); dp 349 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 361 net/dsa/slave.c err = dsa_port_mdb_add(dp, SWITCHDEV_OBJ_PORT_MDB(obj), trans); dp 367 net/dsa/slave.c err = dsa_port_mdb_add(dp->cpu_dp, SWITCHDEV_OBJ_PORT_MDB(obj), dp 384 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 389 net/dsa/slave.c if (dp->bridge_dev && !br_vlan_enabled(dp->bridge_dev)) dp 395 net/dsa/slave.c return dsa_port_vlan_del(dp, SWITCHDEV_OBJ_PORT_VLAN(obj)); dp 401 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 408 net/dsa/slave.c err = dsa_port_mdb_del(dp, SWITCHDEV_OBJ_PORT_MDB(obj)); dp 414 net/dsa/slave.c err = dsa_port_mdb_del(dp->cpu_dp, SWITCHDEV_OBJ_PORT_MDB(obj)); dp 430 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 431 net/dsa/slave.c struct dsa_switch *ds = dp->ds; dp 438 net/dsa/slave.c if (dp->ds->devlink) dp 464 net/dsa/slave.c struct dsa_switch *ds = p->dp->ds; dp 481 net/dsa/slave.c if (ds->ops->port_txtstamp(ds, p->dp->index, clone, type)) dp 540 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 544 net/dsa/slave.c skb_queue_tail(&dp->xmit_queue, skb); dp 545 net/dsa/slave.c schedule_work(&dp->xmit_work); dp 552 net/dsa/slave.c struct dsa_port *dp = container_of(work, struct dsa_port, xmit_work); dp 553 net/dsa/slave.c struct dsa_switch *ds = dp->ds; dp 559 net/dsa/slave.c while ((skb = skb_dequeue(&dp->xmit_queue)) != NULL) dp 560 net/dsa/slave.c ds->ops->port_deferred_xmit(ds, dp->index, skb); dp 575 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 576 net/dsa/slave.c struct dsa_switch *ds = dp->ds; dp 579 net/dsa/slave.c return ds->ops->get_regs_len(ds, dp->index); dp 587 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 588 net/dsa/slave.c struct dsa_switch *ds = dp->ds; dp 591 net/dsa/slave.c ds->ops->get_regs(ds, dp->index, regs, _p); dp 596 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 598 net/dsa/slave.c return phylink_ethtool_nway_reset(dp->pl); dp 603 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 604 net/dsa/slave.c struct dsa_switch *ds = dp->ds; dp 618 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 619 net/dsa/slave.c struct dsa_switch *ds = dp->ds; dp 630 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 631 net/dsa/slave.c struct dsa_switch *ds = dp->ds; dp 642 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 643 net/dsa/slave.c struct dsa_switch *ds = dp->ds; dp 653 net/dsa/slave.c ds->ops->get_strings(ds, dp->index, stringset, dp 662 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 664 net/dsa/slave.c struct dsa_switch *ds = dp->ds; dp 686 net/dsa/slave.c ds->ops->get_ethtool_stats(ds, dp->index, data + 4); dp 691 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 692 net/dsa/slave.c struct dsa_switch *ds = dp->ds; dp 699 net/dsa/slave.c count += ds->ops->get_sset_count(ds, dp->index, sset); dp 709 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 710 net/dsa/slave.c struct dsa_switch *ds = dp->ds; dp 712 net/dsa/slave.c phylink_ethtool_get_wol(dp->pl, w); dp 715 net/dsa/slave.c ds->ops->get_wol(ds, dp->index, w); dp 720 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 721 net/dsa/slave.c struct dsa_switch *ds = dp->ds; dp 724 net/dsa/slave.c phylink_ethtool_set_wol(dp->pl, w); dp 727 net/dsa/slave.c ret = ds->ops->set_wol(ds, dp->index, w); dp 734 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 735 net/dsa/slave.c struct dsa_switch *ds = dp->ds; dp 739 net/dsa/slave.c if (!dev->phydev || !dp->pl) dp 745 net/dsa/slave.c ret = ds->ops->set_mac_eee(ds, dp->index, e); dp 749 net/dsa/slave.c return phylink_ethtool_set_eee(dp->pl, e); dp 754 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 755 net/dsa/slave.c struct dsa_switch *ds = dp->ds; dp 759 net/dsa/slave.c if (!dev->phydev || !dp->pl) dp 765 net/dsa/slave.c ret = ds->ops->get_mac_eee(ds, dp->index, e); dp 769 net/dsa/slave.c return phylink_ethtool_get_eee(dp->pl, e); dp 775 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 777 net/dsa/slave.c return phylink_ethtool_ksettings_get(dp->pl, cmd); dp 783 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 785 net/dsa/slave.c return phylink_ethtool_ksettings_set(dp->pl, cmd); dp 833 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 839 net/dsa/slave.c if (dp->ds->devlink) dp 842 net/dsa/slave.c if (snprintf(name, len, "p%d", dp->index) >= len) dp 865 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 869 net/dsa/slave.c struct dsa_switch *ds = dp->ds; dp 904 net/dsa/slave.c err = ds->ops->port_mirror_add(ds, dp->index, mirror, ingress); dp 919 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 921 net/dsa/slave.c struct dsa_switch *ds = dp->ds; dp 934 net/dsa/slave.c ds->ops->port_mirror_del(ds, dp->index, &mall_tc_entry->mirror); dp 1034 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 1035 net/dsa/slave.c struct dsa_switch *ds = dp->ds; dp 1043 net/dsa/slave.c return ds->ops->port_setup_tc(ds, dp->index, type, type_data); dp 1077 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 1078 net/dsa/slave.c struct dsa_switch *ds = dp->ds; dp 1083 net/dsa/slave.c return ds->ops->get_rxnfc(ds, dp->index, nfc, rule_locs); dp 1089 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 1090 net/dsa/slave.c struct dsa_switch *ds = dp->ds; dp 1095 net/dsa/slave.c return ds->ops->set_rxnfc(ds, dp->index, nfc); dp 1102 net/dsa/slave.c struct dsa_switch *ds = p->dp->ds; dp 1107 net/dsa/slave.c return ds->ops->get_ts_info(ds, p->dp->index, ts); dp 1113 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 1120 net/dsa/slave.c if (dp->bridge_dev) { dp 1121 net/dsa/slave.c if (!br_vlan_enabled(dp->bridge_dev)) dp 1128 net/dsa/slave.c ret = br_vlan_get_info(dp->bridge_dev, vid, &info); dp 1133 net/dsa/slave.c ret = dsa_port_vid_add(dp, vid, 0); dp 1137 net/dsa/slave.c ret = dsa_port_vid_add(dp->cpu_dp, vid, 0); dp 1147 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 1154 net/dsa/slave.c if (dp->bridge_dev) { dp 1155 net/dsa/slave.c if (!br_vlan_enabled(dp->bridge_dev)) dp 1162 net/dsa/slave.c ret = br_vlan_get_info(dp->bridge_dev, vid, &info); dp 1170 net/dsa/slave.c return dsa_port_vid_del(dp, vid); dp 1203 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 1205 net/dsa/slave.c return dsa_port_fdb_add(dp, addr, vid); dp 1212 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 1214 net/dsa/slave.c return dsa_port_fdb_del(dp, addr, vid); dp 1219 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 1221 net/dsa/slave.c return dp->ds->devlink ? &dp->devlink_port : NULL; dp 1256 net/dsa/slave.c const struct dsa_port *dp = dsa_to_port(ds, port); dp 1258 net/dsa/slave.c phylink_mac_change(dp->pl, up); dp 1265 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 1266 net/dsa/slave.c struct dsa_switch *ds = dp->ds; dp 1271 net/dsa/slave.c ds->ops->phylink_fixed_state(ds, dp->index, state); dp 1277 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(slave_dev); dp 1278 net/dsa/slave.c struct dsa_switch *ds = dp->ds; dp 1286 net/dsa/slave.c return phylink_connect_phy(dp->pl, slave_dev->phydev); dp 1291 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(slave_dev); dp 1292 net/dsa/slave.c struct device_node *port_dn = dp->dn; dp 1293 net/dsa/slave.c struct dsa_switch *ds = dp->ds; dp 1301 net/dsa/slave.c dp->pl_config.dev = &slave_dev->dev; dp 1302 net/dsa/slave.c dp->pl_config.type = PHYLINK_NETDEV; dp 1304 net/dsa/slave.c dp->pl = phylink_create(&dp->pl_config, of_fwnode_handle(port_dn), mode, dp 1306 net/dsa/slave.c if (IS_ERR(dp->pl)) { dp 1308 net/dsa/slave.c "error creating PHYLINK: %ld\n", PTR_ERR(dp->pl)); dp 1309 net/dsa/slave.c return PTR_ERR(dp->pl); dp 1317 net/dsa/slave.c phylink_fixed_state_cb(dp->pl, dsa_slave_phylink_fixed_state); dp 1320 net/dsa/slave.c phy_flags = ds->ops->get_phy_flags(ds, dp->index); dp 1322 net/dsa/slave.c ret = phylink_of_phy_connect(dp->pl, port_dn, phy_flags); dp 1327 net/dsa/slave.c ret = dsa_slave_phy_connect(slave_dev, dp->index); dp 1331 net/dsa/slave.c dp->index, ret); dp 1332 net/dsa/slave.c phylink_destroy(dp->pl); dp 1342 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(slave_dev); dp 1347 net/dsa/slave.c cancel_work_sync(&dp->xmit_work); dp 1348 net/dsa/slave.c skb_queue_purge(&dp->xmit_queue); dp 1353 net/dsa/slave.c phylink_stop(dp->pl); dp 1361 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(slave_dev); dp 1369 net/dsa/slave.c phylink_start(dp->pl); dp 1378 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 1380 net/dsa/slave.c .switch_number = dp->ds->index, dp 1381 net/dsa/slave.c .port_number = dp->index, dp 1434 net/dsa/slave.c p->dp = port; dp 1462 net/dsa/slave.c phylink_disconnect_phy(p->dp->pl); dp 1464 net/dsa/slave.c phylink_destroy(p->dp->pl); dp 1474 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(slave_dev); dp 1479 net/dsa/slave.c phylink_disconnect_phy(dp->pl); dp 1484 net/dsa/slave.c phylink_destroy(dp->pl); dp 1497 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 1502 net/dsa/slave.c err = dsa_port_bridge_join(dp, info->upper_dev); dp 1505 net/dsa/slave.c dsa_port_bridge_leave(dp, info->upper_dev); dp 1519 net/dsa/slave.c struct dsa_port *dp; dp 1530 net/dsa/slave.c dp = dsa_slave_to_port(slave); dp 1531 net/dsa/slave.c if (!dp->bridge_dev) dp 1535 net/dsa/slave.c if (br_vlan_enabled(dp->bridge_dev) && dp 1573 net/dsa/slave.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 1583 net/dsa/slave.c err = dsa_port_fdb_add(dp, fdb_info->addr, fdb_info->vid); dp 1598 net/dsa/slave.c err = dsa_port_fdb_del(dp, fdb_info->addr, fdb_info->vid); dp 23 net/dsa/switch.c struct dsa_port *dp = &ds->ports[i]; dp 25 net/dsa/switch.c if (dp->ageing_time && dp->ageing_time < ageing_time) dp 26 net/dsa/switch.c ageing_time = dp->ageing_time; dp 210 net/dsa/switch.c const struct dsa_port *dp = dsa_to_port(ds, port); dp 216 net/dsa/switch.c if (!dp->bridge_dev) dp 225 net/dsa/switch.c return vlan_for_each(dp->slave, dsa_port_vlan_device_check, dp 133 net/dsa/tag_8021q.c struct dsa_port *dp = &ds->ports[port]; dp 138 net/dsa/tag_8021q.c return dsa_port_vid_add(dp, vid, flags); dp 140 net/dsa/tag_8021q.c err = dsa_port_vid_del(dp, vid); dp 152 net/dsa/tag_8021q.c err = br_vlan_get_info(dp->slave, vid, &vinfo); dp 160 net/dsa/tag_8021q.c err = dsa_port_vid_add(dp, vid, vinfo.flags); dp 166 net/dsa/tag_8021q.c return dsa_port_vid_add(dp->cpu_dp, vid, vinfo.flags); dp 65 net/dsa/tag_brcm.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 98 net/dsa/tag_brcm.c if (dp->index == 8) dp 100 net/dsa/tag_brcm.c brcm_tag[3] = (1 << dp->index) & BRCM_IG_DSTMAP1_MASK; dp 105 net/dsa/tag_brcm.c skb_set_queue_mapping(skb, BRCM_TAG_SET_PORT_QUEUE(dp->index, queue)); dp 17 net/dsa/tag_dsa.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 33 net/dsa/tag_dsa.c dsa_header[0] = 0x60 | dp->ds->index; dp 34 net/dsa/tag_dsa.c dsa_header[1] = dp->index << 3; dp 54 net/dsa/tag_dsa.c dsa_header[0] = 0x40 | dp->ds->index; dp 55 net/dsa/tag_dsa.c dsa_header[1] = dp->index << 3; dp 18 net/dsa/tag_edsa.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 42 net/dsa/tag_edsa.c edsa_header[4] = 0x60 | dp->ds->index; dp 43 net/dsa/tag_edsa.c edsa_header[5] = dp->index << 3; dp 67 net/dsa/tag_edsa.c edsa_header[4] = 0x40 | dp->ds->index; dp 68 net/dsa/tag_edsa.c edsa_header[5] = dp->index << 3; dp 62 net/dsa/tag_gswip.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 76 net/dsa/tag_gswip.c gswip_tag[3] = BIT(dp->index + GSWIP_TX_PORT_MAP_SHIFT) & GSWIP_TX_PORT_MAP_MASK; dp 94 net/dsa/tag_ksz.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 107 net/dsa/tag_ksz.c *tag = 1 << dp->index; dp 159 net/dsa/tag_ksz.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 172 net/dsa/tag_ksz.c *tag = BIT(dp->index); dp 214 net/dsa/tag_ksz.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 227 net/dsa/tag_ksz.c *tag = BIT(dp->index); dp 48 net/dsa/tag_lan9303.c static int lan9303_xmit_use_arl(struct dsa_port *dp, u8 *dest_addr) dp 50 net/dsa/tag_lan9303.c struct lan9303 *chip = dp->ds->priv; dp 57 net/dsa/tag_lan9303.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 77 net/dsa/tag_lan9303.c lan9303_tag[1] = lan9303_xmit_use_arl(dp, skb->data) ? dp 79 net/dsa/tag_lan9303.c dp->index | LAN9303_TAG_TX_STP_OVERRIDE; dp 23 net/dsa/tag_mtk.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 52 net/dsa/tag_mtk.c mtk_tag[1] = (1 << dp->index) & MTK_HDR_XMIT_DP_BIT_MASK; dp 33 net/dsa/tag_qca.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 46 net/dsa/tag_qca.c QCA_HDR_XMIT_FROM_CPU | BIT(dp->index); dp 89 net/dsa/tag_sja1105.c struct dsa_port *dp = dsa_slave_to_port(netdev); dp 90 net/dsa/tag_sja1105.c struct dsa_switch *ds = dp->ds; dp 91 net/dsa/tag_sja1105.c u16 tx_vid = dsa_8021q_tx_vid(ds, dp->index); dp 107 net/dsa/tag_sja1105.c if (dsa_port_is_vlan_filtering(dp)) dp 151 net/dsa/tag_sja1105.c struct dsa_port *dp; dp 153 net/dsa/tag_sja1105.c dp = dsa_slave_to_port(skb->dev); dp 154 net/dsa/tag_sja1105.c sp = dp->priv; dp 169 net/dsa/tag_sja1105.c dev_err_ratelimited(dp->ds->dev, dp 209 net/dsa/tag_sja1105.c dev_err_ratelimited(dp->ds->dev, dp 216 net/dsa/tag_sja1105.c dev_err_ratelimited(dp->ds->dev, dp 15 net/dsa/tag_trailer.c struct dsa_port *dp = dsa_slave_to_port(dev); dp 47 net/dsa/tag_trailer.c trailer[1] = 1 << dp->index; dp 1670 net/ipv4/ipconfig.c char *cp, *ip, *dp; dp 1720 net/ipv4/ipconfig.c if ((dp = strchr(ip, '.'))) { dp 1721 net/ipv4/ipconfig.c *dp++ = '\0'; dp 1722 net/ipv4/ipconfig.c strlcpy(utsname()->domainname, dp, dp 153 net/openvswitch/actions.c static int clone_execute(struct datapath *dp, struct sk_buff *skb, dp 159 net/openvswitch/actions.c static int do_execute_actions(struct datapath *dp, struct sk_buff *skb, dp 879 net/openvswitch/actions.c static void do_output(struct datapath *dp, struct sk_buff *skb, int out_port, dp 882 net/openvswitch/actions.c struct vport *vport = ovs_vport_rcu(dp, out_port); dp 899 net/openvswitch/actions.c struct net *net = read_pnet(&dp->net); dp 910 net/openvswitch/actions.c static int output_userspace(struct datapath *dp, struct sk_buff *skb, dp 938 net/openvswitch/actions.c vport = ovs_vport_rcu(dp, nla_get_u32(a)); dp 960 net/openvswitch/actions.c return ovs_dp_upcall(dp, skb, key, &upcall, cutlen); dp 967 net/openvswitch/actions.c static int sample(struct datapath *dp, struct sk_buff *skb, dp 990 net/openvswitch/actions.c return clone_execute(dp, skb, key, 0, actions, rem, last, dp 998 net/openvswitch/actions.c static int clone(struct datapath *dp, struct sk_buff *skb, dp 1012 net/openvswitch/actions.c return clone_execute(dp, skb, key, 0, actions, rem, last, dp 1126 net/openvswitch/actions.c static int execute_recirc(struct datapath *dp, struct sk_buff *skb, dp 1142 net/openvswitch/actions.c return clone_execute(dp, skb, key, recirc_id, NULL, 0, last, true); dp 1145 net/openvswitch/actions.c static int execute_check_pkt_len(struct datapath *dp, struct sk_buff *skb, dp 1175 net/openvswitch/actions.c return clone_execute(dp, skb, key, 0, nla_data(actions), dp 1180 net/openvswitch/actions.c static int do_execute_actions(struct datapath *dp, struct sk_buff *skb, dp 1201 net/openvswitch/actions.c do_output(dp, skb, port, key); dp 1209 net/openvswitch/actions.c do_output(dp, clone, port, key); dp 1223 net/openvswitch/actions.c output_userspace(dp, skb, key, a, attr, dp 1251 net/openvswitch/actions.c err = execute_recirc(dp, skb, key, a, last); dp 1274 net/openvswitch/actions.c err = sample(dp, skb, key, a, last); dp 1288 net/openvswitch/actions.c err = ovs_ct_execute(ovs_dp_get_net(dp), skb, key, dp 1325 net/openvswitch/actions.c if (ovs_meter_execute(dp, skb, key, nla_get_u32(a))) { dp 1334 net/openvswitch/actions.c err = clone(dp, skb, key, a, last); dp 1344 net/openvswitch/actions.c err = execute_check_pkt_len(dp, skb, key, a, last); dp 1368 net/openvswitch/actions.c static int clone_execute(struct datapath *dp, struct sk_buff *skb, dp 1397 net/openvswitch/actions.c err = do_execute_actions(dp, skb, clone, dp 1425 net/openvswitch/actions.c ovs_dp_name(dp)); dp 1428 net/openvswitch/actions.c ovs_dp_name(dp)); dp 1435 net/openvswitch/actions.c static void process_deferred_actions(struct datapath *dp) dp 1452 net/openvswitch/actions.c do_execute_actions(dp, skb, key, actions, actions_len); dp 1462 net/openvswitch/actions.c int ovs_execute_actions(struct datapath *dp, struct sk_buff *skb, dp 1471 net/openvswitch/actions.c ovs_dp_name(dp)); dp 1478 net/openvswitch/actions.c err = do_execute_actions(dp, skb, key, dp 1482 net/openvswitch/actions.c process_deferred_actions(dp); dp 124 net/openvswitch/datapath.c static int queue_gso_packets(struct datapath *dp, struct sk_buff *, dp 128 net/openvswitch/datapath.c static int queue_userspace_packet(struct datapath *dp, struct sk_buff *, dp 134 net/openvswitch/datapath.c const char *ovs_dp_name(const struct datapath *dp) dp 136 net/openvswitch/datapath.c struct vport *vport = ovs_vport_ovsl_rcu(dp, OVSP_LOCAL); dp 140 net/openvswitch/datapath.c static int get_dpifindex(const struct datapath *dp) dp 147 net/openvswitch/datapath.c local = ovs_vport_rcu(dp, OVSP_LOCAL); dp 160 net/openvswitch/datapath.c struct datapath *dp = container_of(rcu, struct datapath, rcu); dp 162 net/openvswitch/datapath.c ovs_flow_tbl_destroy(&dp->table); dp 163 net/openvswitch/datapath.c free_percpu(dp->stats_percpu); dp 164 net/openvswitch/datapath.c kfree(dp->ports); dp 165 net/openvswitch/datapath.c ovs_meters_exit(dp); dp 166 net/openvswitch/datapath.c kfree(dp); dp 169 net/openvswitch/datapath.c static struct hlist_head *vport_hash_bucket(const struct datapath *dp, dp 172 net/openvswitch/datapath.c return &dp->ports[port_no & (DP_VPORT_HASH_BUCKETS - 1)]; dp 176 net/openvswitch/datapath.c struct vport *ovs_lookup_vport(const struct datapath *dp, u16 port_no) dp 181 net/openvswitch/datapath.c head = vport_hash_bucket(dp, port_no); dp 196 net/openvswitch/datapath.c struct datapath *dp = parms->dp; dp 197 net/openvswitch/datapath.c struct hlist_head *head = vport_hash_bucket(dp, vport->port_no); dp 219 net/openvswitch/datapath.c struct datapath *dp = p->dp; dp 227 net/openvswitch/datapath.c stats = this_cpu_ptr(dp->stats_percpu); dp 230 net/openvswitch/datapath.c flow = ovs_flow_tbl_lookup_stats(&dp->table, key, &n_mask_hit); dp 238 net/openvswitch/datapath.c error = ovs_dp_upcall(dp, skb, key, &upcall, 0); dp 249 net/openvswitch/datapath.c error = ovs_execute_actions(dp, skb, sf_acts, key); dp 252 net/openvswitch/datapath.c ovs_dp_name(dp), error); dp 264 net/openvswitch/datapath.c int ovs_dp_upcall(struct datapath *dp, struct sk_buff *skb, dp 278 net/openvswitch/datapath.c err = queue_userspace_packet(dp, skb, key, upcall_info, cutlen); dp 280 net/openvswitch/datapath.c err = queue_gso_packets(dp, skb, key, upcall_info, cutlen); dp 287 net/openvswitch/datapath.c stats = this_cpu_ptr(dp->stats_percpu); dp 296 net/openvswitch/datapath.c static int queue_gso_packets(struct datapath *dp, struct sk_buff *skb, dp 328 net/openvswitch/datapath.c err = queue_userspace_packet(dp, skb, key, upcall_info, cutlen); dp 373 net/openvswitch/datapath.c static void pad_packet(struct datapath *dp, struct sk_buff *skb) dp 375 net/openvswitch/datapath.c if (!(dp->user_features & OVS_DP_F_UNALIGNED)) { dp 383 net/openvswitch/datapath.c static int queue_userspace_packet(struct datapath *dp, struct sk_buff *skb, dp 396 net/openvswitch/datapath.c dp_ifindex = get_dpifindex(dp); dp 426 net/openvswitch/datapath.c if (dp->user_features & OVS_DP_F_UNALIGNED) dp 493 net/openvswitch/datapath.c pad_packet(dp, user_skb); dp 503 net/openvswitch/datapath.c pad_packet(dp, user_skb); dp 519 net/openvswitch/datapath.c pad_packet(dp, user_skb); dp 523 net/openvswitch/datapath.c err = genlmsg_unicast(ovs_dp_get_net(dp), user_skb, upcall_info->portid); dp 542 net/openvswitch/datapath.c struct datapath *dp; dp 591 net/openvswitch/datapath.c dp = get_dp_rcu(net, ovs_header->dp_ifindex); dp 593 net/openvswitch/datapath.c if (!dp) dp 596 net/openvswitch/datapath.c input_vport = ovs_vport_rcu(dp, flow->key.phy.in_port); dp 598 net/openvswitch/datapath.c input_vport = ovs_vport_rcu(dp, OVSP_LOCAL); dp 608 net/openvswitch/datapath.c err = ovs_execute_actions(dp, packet, sf_acts, &flow->key); dp 654 net/openvswitch/datapath.c static void get_dp_stats(const struct datapath *dp, struct ovs_dp_stats *stats, dp 661 net/openvswitch/datapath.c stats->n_flows = ovs_flow_tbl_count(&dp->table); dp 662 net/openvswitch/datapath.c mega_stats->n_masks = ovs_flow_tbl_num_masks(&dp->table); dp 671 net/openvswitch/datapath.c percpu_stats = per_cpu_ptr(dp->stats_percpu, i); dp 904 net/openvswitch/datapath.c struct datapath *dp; dp 966 net/openvswitch/datapath.c dp = get_dp(net, ovs_header->dp_ifindex); dp 967 net/openvswitch/datapath.c if (unlikely(!dp)) { dp 974 net/openvswitch/datapath.c flow = ovs_flow_tbl_lookup_ufid(&dp->table, &new_flow->id); dp 976 net/openvswitch/datapath.c flow = ovs_flow_tbl_lookup(&dp->table, &new_flow->key); dp 981 net/openvswitch/datapath.c error = ovs_flow_tbl_insert(&dp->table, new_flow, &mask); dp 1016 net/openvswitch/datapath.c flow = ovs_flow_tbl_lookup_exact(&dp->table, dp 1143 net/openvswitch/datapath.c struct datapath *dp; dp 1175 net/openvswitch/datapath.c dp = get_dp(net, ovs_header->dp_ifindex); dp 1176 net/openvswitch/datapath.c if (unlikely(!dp)) { dp 1182 net/openvswitch/datapath.c flow = ovs_flow_tbl_lookup_ufid(&dp->table, &sfid); dp 1184 net/openvswitch/datapath.c flow = ovs_flow_tbl_lookup_exact(&dp->table, &match); dp 1245 net/openvswitch/datapath.c struct datapath *dp; dp 1267 net/openvswitch/datapath.c dp = get_dp(sock_net(skb->sk), ovs_header->dp_ifindex); dp 1268 net/openvswitch/datapath.c if (!dp) { dp 1274 net/openvswitch/datapath.c flow = ovs_flow_tbl_lookup_ufid(&dp->table, &ufid); dp 1276 net/openvswitch/datapath.c flow = ovs_flow_tbl_lookup_exact(&dp->table, &match); dp 1304 net/openvswitch/datapath.c struct datapath *dp; dp 1322 net/openvswitch/datapath.c dp = get_dp(sock_net(skb->sk), ovs_header->dp_ifindex); dp 1323 net/openvswitch/datapath.c if (unlikely(!dp)) { dp 1329 net/openvswitch/datapath.c err = ovs_flow_tbl_flush(&dp->table); dp 1334 net/openvswitch/datapath.c flow = ovs_flow_tbl_lookup_ufid(&dp->table, &ufid); dp 1336 net/openvswitch/datapath.c flow = ovs_flow_tbl_lookup_exact(&dp->table, &match); dp 1342 net/openvswitch/datapath.c ovs_flow_tbl_remove(&dp->table, flow); dp 1380 net/openvswitch/datapath.c struct datapath *dp; dp 1391 net/openvswitch/datapath.c dp = get_dp_rcu(sock_net(skb->sk), ovs_header->dp_ifindex); dp 1392 net/openvswitch/datapath.c if (!dp) { dp 1397 net/openvswitch/datapath.c ti = rcu_dereference(dp->table.ti); dp 1483 net/openvswitch/datapath.c static int ovs_dp_cmd_fill_info(struct datapath *dp, struct sk_buff *skb, dp 1496 net/openvswitch/datapath.c ovs_header->dp_ifindex = get_dpifindex(dp); dp 1498 net/openvswitch/datapath.c err = nla_put_string(skb, OVS_DP_ATTR_NAME, ovs_dp_name(dp)); dp 1502 net/openvswitch/datapath.c get_dp_stats(dp, &dp_stats, &dp_megaflow_stats); dp 1512 net/openvswitch/datapath.c if (nla_put_u32(skb, OVS_DP_ATTR_USER_FEATURES, dp->user_features)) dp 1534 net/openvswitch/datapath.c struct datapath *dp; dp 1537 net/openvswitch/datapath.c dp = get_dp(net, ovs_header->dp_ifindex); dp 1542 net/openvswitch/datapath.c dp = vport && vport->port_no == OVSP_LOCAL ? vport->dp : NULL; dp 1544 net/openvswitch/datapath.c return dp ? dp : ERR_PTR(-ENODEV); dp 1549 net/openvswitch/datapath.c struct datapath *dp; dp 1551 net/openvswitch/datapath.c dp = lookup_datapath(sock_net(skb->sk), info->userhdr, info->attrs); dp 1552 net/openvswitch/datapath.c if (IS_ERR(dp)) dp 1555 net/openvswitch/datapath.c WARN(dp->user_features, "Dropping previously announced user features\n"); dp 1556 net/openvswitch/datapath.c dp->user_features = 0; dp 1561 net/openvswitch/datapath.c static int ovs_dp_change(struct datapath *dp, struct nlattr *a[]) dp 1579 net/openvswitch/datapath.c dp->user_features = user_features; dp 1581 net/openvswitch/datapath.c if (dp->user_features & OVS_DP_F_TC_RECIRC_SHARING) dp 1594 net/openvswitch/datapath.c struct datapath *dp; dp 1608 net/openvswitch/datapath.c dp = kzalloc(sizeof(*dp), GFP_KERNEL); dp 1609 net/openvswitch/datapath.c if (dp == NULL) dp 1612 net/openvswitch/datapath.c ovs_dp_set_net(dp, sock_net(skb->sk)); dp 1615 net/openvswitch/datapath.c err = ovs_flow_tbl_init(&dp->table); dp 1619 net/openvswitch/datapath.c dp->stats_percpu = netdev_alloc_pcpu_stats(struct dp_stats_percpu); dp 1620 net/openvswitch/datapath.c if (!dp->stats_percpu) { dp 1625 net/openvswitch/datapath.c dp->ports = kmalloc_array(DP_VPORT_HASH_BUCKETS, dp 1628 net/openvswitch/datapath.c if (!dp->ports) { dp 1634 net/openvswitch/datapath.c INIT_HLIST_HEAD(&dp->ports[i]); dp 1636 net/openvswitch/datapath.c err = ovs_meters_init(dp); dp 1644 net/openvswitch/datapath.c parms.dp = dp; dp 1648 net/openvswitch/datapath.c err = ovs_dp_change(dp, a); dp 1674 net/openvswitch/datapath.c err = ovs_dp_cmd_fill_info(dp, reply, info->snd_portid, dp 1678 net/openvswitch/datapath.c ovs_net = net_generic(ovs_dp_get_net(dp), ovs_net_id); dp 1679 net/openvswitch/datapath.c list_add_tail_rcu(&dp->list_node, &ovs_net->dps); dp 1687 net/openvswitch/datapath.c ovs_meters_exit(dp); dp 1689 net/openvswitch/datapath.c kfree(dp->ports); dp 1691 net/openvswitch/datapath.c free_percpu(dp->stats_percpu); dp 1693 net/openvswitch/datapath.c ovs_flow_tbl_destroy(&dp->table); dp 1695 net/openvswitch/datapath.c kfree(dp); dp 1703 net/openvswitch/datapath.c static void __dp_destroy(struct datapath *dp) dp 1711 net/openvswitch/datapath.c hlist_for_each_entry_safe(vport, n, &dp->ports[i], dp_hash_node) dp 1716 net/openvswitch/datapath.c list_del_rcu(&dp->list_node); dp 1721 net/openvswitch/datapath.c ovs_dp_detach_port(ovs_vport_ovsl(dp, OVSP_LOCAL)); dp 1724 net/openvswitch/datapath.c call_rcu(&dp->rcu, destroy_dp_rcu); dp 1730 net/openvswitch/datapath.c struct datapath *dp; dp 1738 net/openvswitch/datapath.c dp = lookup_datapath(sock_net(skb->sk), info->userhdr, info->attrs); dp 1739 net/openvswitch/datapath.c err = PTR_ERR(dp); dp 1740 net/openvswitch/datapath.c if (IS_ERR(dp)) dp 1743 net/openvswitch/datapath.c err = ovs_dp_cmd_fill_info(dp, reply, info->snd_portid, dp 1747 net/openvswitch/datapath.c __dp_destroy(dp); dp 1763 net/openvswitch/datapath.c struct datapath *dp; dp 1771 net/openvswitch/datapath.c dp = lookup_datapath(sock_net(skb->sk), info->userhdr, info->attrs); dp 1772 net/openvswitch/datapath.c err = PTR_ERR(dp); dp 1773 net/openvswitch/datapath.c if (IS_ERR(dp)) dp 1776 net/openvswitch/datapath.c err = ovs_dp_change(dp, info->attrs); dp 1780 net/openvswitch/datapath.c err = ovs_dp_cmd_fill_info(dp, reply, info->snd_portid, dp 1798 net/openvswitch/datapath.c struct datapath *dp; dp 1806 net/openvswitch/datapath.c dp = lookup_datapath(sock_net(skb->sk), info->userhdr, info->attrs); dp 1807 net/openvswitch/datapath.c if (IS_ERR(dp)) { dp 1808 net/openvswitch/datapath.c err = PTR_ERR(dp); dp 1811 net/openvswitch/datapath.c err = ovs_dp_cmd_fill_info(dp, reply, info->snd_portid, dp 1827 net/openvswitch/datapath.c struct datapath *dp; dp 1832 net/openvswitch/datapath.c list_for_each_entry(dp, &ovs_net->dps, list_node) { dp 1834 net/openvswitch/datapath.c ovs_dp_cmd_fill_info(dp, skb, NETLINK_CB(cb->skb).portid, dp 1906 net/openvswitch/datapath.c ovs_header->dp_ifindex = get_dpifindex(vport->dp); dp 1973 net/openvswitch/datapath.c struct datapath *dp; dp 1983 net/openvswitch/datapath.c ovs_header->dp_ifindex != get_dpifindex(vport->dp)) dp 1992 net/openvswitch/datapath.c dp = get_dp(net, ovs_header->dp_ifindex); dp 1993 net/openvswitch/datapath.c if (!dp) dp 1996 net/openvswitch/datapath.c vport = ovs_vport_ovsl_rcu(dp, port_no); dp 2005 net/openvswitch/datapath.c static unsigned int ovs_get_max_headroom(struct datapath *dp) dp 2013 net/openvswitch/datapath.c hlist_for_each_entry_rcu(vport, &dp->ports[i], dp_hash_node) { dp 2025 net/openvswitch/datapath.c static void ovs_update_headroom(struct datapath *dp, unsigned int new_headroom) dp 2030 net/openvswitch/datapath.c dp->max_headroom = new_headroom; dp 2032 net/openvswitch/datapath.c hlist_for_each_entry_rcu(vport, &dp->ports[i], dp_hash_node) dp 2043 net/openvswitch/datapath.c struct datapath *dp; dp 2065 net/openvswitch/datapath.c dp = get_dp(sock_net(skb->sk), ovs_header->dp_ifindex); dp 2067 net/openvswitch/datapath.c if (!dp) dp 2071 net/openvswitch/datapath.c vport = ovs_vport_ovsl(dp, port_no); dp 2081 net/openvswitch/datapath.c vport = ovs_vport_ovsl(dp, port_no); dp 2090 net/openvswitch/datapath.c parms.dp = dp; dp 2108 net/openvswitch/datapath.c if (new_headroom > dp->max_headroom) dp 2109 net/openvswitch/datapath.c ovs_update_headroom(dp, new_headroom); dp 2111 net/openvswitch/datapath.c netdev_set_rx_headroom(vport->dev, dp->max_headroom); dp 2183 net/openvswitch/datapath.c struct datapath *dp; dp 2209 net/openvswitch/datapath.c dp = vport->dp; dp 2210 net/openvswitch/datapath.c if (netdev_get_fwd_headroom(vport->dev) == dp->max_headroom) dp 2217 net/openvswitch/datapath.c new_headroom = ovs_get_max_headroom(dp); dp 2219 net/openvswitch/datapath.c if (new_headroom < dp->max_headroom) dp 2220 net/openvswitch/datapath.c ovs_update_headroom(dp, new_headroom); dp 2267 net/openvswitch/datapath.c struct datapath *dp; dp 2272 net/openvswitch/datapath.c dp = get_dp_rcu(sock_net(skb->sk), ovs_header->dp_ifindex); dp 2273 net/openvswitch/datapath.c if (!dp) { dp 2281 net/openvswitch/datapath.c hlist_for_each_entry_rcu(vport, &dp->ports[i], dp_hash_node) { dp 2406 net/openvswitch/datapath.c struct datapath *dp; dp 2408 net/openvswitch/datapath.c list_for_each_entry(dp, &ovs_net->dps, list_node) { dp 2414 net/openvswitch/datapath.c hlist_for_each_entry(vport, &dp->ports[i], dp_hash_node) { dp 2427 net/openvswitch/datapath.c struct datapath *dp, *dp_next; dp 2437 net/openvswitch/datapath.c list_for_each_entry_safe(dp, dp_next, &ovs_net->dps, list_node) dp 2438 net/openvswitch/datapath.c __dp_destroy(dp); dp 158 net/openvswitch/datapath.h static inline struct net *ovs_dp_get_net(const struct datapath *dp) dp 160 net/openvswitch/datapath.h return read_pnet(&dp->net); dp 163 net/openvswitch/datapath.h static inline void ovs_dp_set_net(struct datapath *dp, struct net *net) dp 165 net/openvswitch/datapath.h write_pnet(&dp->net, net); dp 168 net/openvswitch/datapath.h struct vport *ovs_lookup_vport(const struct datapath *dp, u16 port_no); dp 170 net/openvswitch/datapath.h static inline struct vport *ovs_vport_rcu(const struct datapath *dp, int port_no) dp 173 net/openvswitch/datapath.h return ovs_lookup_vport(dp, port_no); dp 176 net/openvswitch/datapath.h static inline struct vport *ovs_vport_ovsl_rcu(const struct datapath *dp, int port_no) dp 179 net/openvswitch/datapath.h return ovs_lookup_vport(dp, port_no); dp 182 net/openvswitch/datapath.h static inline struct vport *ovs_vport_ovsl(const struct datapath *dp, int port_no) dp 185 net/openvswitch/datapath.h return ovs_lookup_vport(dp, port_no); dp 197 net/openvswitch/datapath.h return vport->dp; dp 208 net/openvswitch/datapath.h struct datapath *dp; dp 212 net/openvswitch/datapath.h dp = get_dp_rcu(net, dp_ifindex); dp 215 net/openvswitch/datapath.h return dp; dp 229 net/openvswitch/datapath.h const char *ovs_dp_name(const struct datapath *dp); dp 233 net/openvswitch/datapath.h int ovs_execute_actions(struct datapath *dp, struct sk_buff *skb, dp 17 net/openvswitch/dp_notify.c struct datapath *dp; dp 19 net/openvswitch/dp_notify.c dp = vport->dp; dp 20 net/openvswitch/dp_notify.c notify = ovs_vport_cmd_build_info(vport, ovs_dp_get_net(dp), dp 24 net/openvswitch/dp_notify.c genl_set_err(&dp_vport_genl_family, ovs_dp_get_net(dp), 0, dp 30 net/openvswitch/dp_notify.c ovs_dp_get_net(dp), notify, 0, dp 37 net/openvswitch/dp_notify.c struct datapath *dp; dp 40 net/openvswitch/dp_notify.c list_for_each_entry(dp, &ovs_net->dps, list_node) { dp 47 net/openvswitch/dp_notify.c hlist_for_each_entry_safe(vport, n, &dp->ports[i], dp_hash_node) { dp 50 net/openvswitch/meter.c static struct hlist_head *meter_hash_bucket(const struct datapath *dp, dp 53 net/openvswitch/meter.c return &dp->meters[meter_id & (METER_HASH_BUCKETS - 1)]; dp 57 net/openvswitch/meter.c static struct dp_meter *lookup_meter(const struct datapath *dp, dp 63 net/openvswitch/meter.c head = meter_hash_bucket(dp, meter_id); dp 71 net/openvswitch/meter.c static void attach_meter(struct datapath *dp, struct dp_meter *meter) dp 73 net/openvswitch/meter.c struct hlist_head *head = meter_hash_bucket(dp, meter->id); dp 275 net/openvswitch/meter.c struct datapath *dp; dp 296 net/openvswitch/meter.c dp = get_dp(sock_net(skb->sk), ovs_header->dp_ifindex); dp 297 net/openvswitch/meter.c if (!dp) { dp 305 net/openvswitch/meter.c old_meter = lookup_meter(dp, meter_id); dp 307 net/openvswitch/meter.c attach_meter(dp, meter); dp 343 net/openvswitch/meter.c struct datapath *dp; dp 360 net/openvswitch/meter.c dp = get_dp(sock_net(skb->sk), ovs_header->dp_ifindex); dp 361 net/openvswitch/meter.c if (!dp) { dp 367 net/openvswitch/meter.c meter = lookup_meter(dp, meter_id); dp 396 net/openvswitch/meter.c struct datapath *dp; dp 412 net/openvswitch/meter.c dp = get_dp(sock_net(skb->sk), ovs_header->dp_ifindex); dp 413 net/openvswitch/meter.c if (!dp) { dp 418 net/openvswitch/meter.c old_meter = lookup_meter(dp, meter_id); dp 442 net/openvswitch/meter.c bool ovs_meter_execute(struct datapath *dp, struct sk_buff *skb, dp 454 net/openvswitch/meter.c meter = lookup_meter(dp, meter_id); dp 570 net/openvswitch/meter.c int ovs_meters_init(struct datapath *dp) dp 574 net/openvswitch/meter.c dp->meters = kmalloc_array(METER_HASH_BUCKETS, dp 577 net/openvswitch/meter.c if (!dp->meters) dp 581 net/openvswitch/meter.c INIT_HLIST_HEAD(&dp->meters[i]); dp 586 net/openvswitch/meter.c void ovs_meters_exit(struct datapath *dp) dp 591 net/openvswitch/meter.c struct hlist_head *head = &dp->meters[i]; dp 599 net/openvswitch/meter.c kfree(dp->meters); dp 46 net/openvswitch/meter.h int ovs_meters_init(struct datapath *dp); dp 47 net/openvswitch/meter.h void ovs_meters_exit(struct datapath *dp); dp 48 net/openvswitch/meter.h bool ovs_meter_execute(struct datapath *dp, struct sk_buff *skb, dp 53 net/openvswitch/vport-geneve.c struct net *net = ovs_dp_get_net(parms->dp); dp 41 net/openvswitch/vport-gre.c struct net *net = ovs_dp_get_net(parms->dp); dp 182 net/openvswitch/vport-internal_dev.c dev_net_set(vport->dev, ovs_dp_get_net(vport->dp)); dp 69 net/openvswitch/vport-netdev.c static struct net_device *get_dpdev(const struct datapath *dp) dp 73 net/openvswitch/vport-netdev.c local = ovs_vport_ovsl(dp, OVSP_LOCAL); dp 81 net/openvswitch/vport-netdev.c vport->dev = dev_get_by_name(ovs_dp_get_net(vport->dp), name); dp 97 net/openvswitch/vport-netdev.c get_dpdev(vport->dp), dp 115 net/openvswitch/vport-netdev.c netdev_upper_dev_unlink(vport->dev, get_dpdev(vport->dp)); dp 73 net/openvswitch/vport-vxlan.c struct net *net = ovs_dp_get_net(parms->dp); dp 101 net/openvswitch/vport.c net_eq(ovs_dp_get_net(vport->dp), net)) dp 134 net/openvswitch/vport.c vport->dp = parms->dp; dp 205 net/openvswitch/vport.c bucket = hash_bucket(ovs_dp_get_net(vport->dp), dp 380 net/openvswitch/vport.c if (vport->dp->user_features & OVS_DP_F_VPORT_PIDS) dp 433 net/openvswitch/vport.c if (unlikely(dev_net(skb->dev) != ovs_dp_get_net(vport->dp))) { dp 72 net/openvswitch/vport.h struct datapath *dp; dp 100 net/openvswitch/vport.h struct datapath *dp; dp 101 net/rds/ib_cm.c const union rds_ib_conn_priv *dp = NULL; dp 109 net/rds/ib_cm.c dp = event->param.conn.private_data; dp 113 net/rds/ib_cm.c major = dp->ricp_v6.dp_protocol_major; dp 114 net/rds/ib_cm.c minor = dp->ricp_v6.dp_protocol_minor; dp 115 net/rds/ib_cm.c credit = dp->ricp_v6.dp_credit; dp 121 net/rds/ib_cm.c ack_seq = get_unaligned(&dp->ricp_v6.dp_ack_seq); dp 125 net/rds/ib_cm.c major = dp->ricp_v4.dp_protocol_major; dp 126 net/rds/ib_cm.c minor = dp->ricp_v4.dp_protocol_minor; dp 127 net/rds/ib_cm.c credit = dp->ricp_v4.dp_credit; dp 128 net/rds/ib_cm.c ack_seq = get_unaligned(&dp->ricp_v4.dp_ack_seq); dp 186 net/rds/ib_cm.c if (dp) { dp 198 net/rds/ib_cm.c union rds_ib_conn_priv *dp, dp 216 net/rds/ib_cm.c if (dp) { dp 217 net/rds/ib_cm.c memset(dp, 0, sizeof(*dp)); dp 219 net/rds/ib_cm.c dp->ricp_v6.dp_saddr = conn->c_laddr; dp 220 net/rds/ib_cm.c dp->ricp_v6.dp_daddr = conn->c_faddr; dp 221 net/rds/ib_cm.c dp->ricp_v6.dp_protocol_major = dp 223 net/rds/ib_cm.c dp->ricp_v6.dp_protocol_minor = dp 225 net/rds/ib_cm.c dp->ricp_v6.dp_protocol_minor_mask = dp 227 net/rds/ib_cm.c dp->ricp_v6.dp_ack_seq = dp 229 net/rds/ib_cm.c dp->ricp_v6.dp_cmn.ricpc_dp_toss = conn->c_tos; dp 231 net/rds/ib_cm.c conn_param->private_data = &dp->ricp_v6; dp 232 net/rds/ib_cm.c conn_param->private_data_len = sizeof(dp->ricp_v6); dp 234 net/rds/ib_cm.c dp->ricp_v4.dp_saddr = conn->c_laddr.s6_addr32[3]; dp 235 net/rds/ib_cm.c dp->ricp_v4.dp_daddr = conn->c_faddr.s6_addr32[3]; dp 236 net/rds/ib_cm.c dp->ricp_v4.dp_protocol_major = dp 238 net/rds/ib_cm.c dp->ricp_v4.dp_protocol_minor = dp 240 net/rds/ib_cm.c dp->ricp_v4.dp_protocol_minor_mask = dp 242 net/rds/ib_cm.c dp->ricp_v4.dp_ack_seq = dp 244 net/rds/ib_cm.c dp->ricp_v4.dp_cmn.ricpc_dp_toss = conn->c_tos; dp 246 net/rds/ib_cm.c conn_param->private_data = &dp->ricp_v4; dp 247 net/rds/ib_cm.c conn_param->private_data_len = sizeof(dp->ricp_v4); dp 257 net/rds/ib_cm.c dp->ricp_v6.dp_credit = cpu_to_be32(credits); dp 259 net/rds/ib_cm.c dp->ricp_v4.dp_credit = cpu_to_be32(credits); dp 634 net/rds/ib_cm.c const union rds_ib_conn_priv *dp = event->param.conn.private_data; dp 659 net/rds/ib_cm.c major = dp->ricp_v6.dp_protocol_major; dp 660 net/rds/ib_cm.c minor = dp->ricp_v6.dp_protocol_minor; dp 661 net/rds/ib_cm.c mask = dp->ricp_v6.dp_protocol_minor_mask; dp 664 net/rds/ib_cm.c major = dp->ricp_v4.dp_protocol_major; dp 665 net/rds/ib_cm.c minor = dp->ricp_v4.dp_protocol_minor; dp 666 net/rds/ib_cm.c mask = dp->ricp_v4.dp_protocol_minor_mask; dp 684 net/rds/ib_cm.c &dp->ricp_v6.dp_saddr, major, minor); dp 687 net/rds/ib_cm.c &dp->ricp_v4.dp_saddr, major, minor); dp 730 net/rds/ib_cm.c const union rds_ib_conn_priv *dp; dp 748 net/rds/ib_cm.c dp = event->param.conn.private_data; dp 751 net/rds/ib_cm.c dp_cmn = &dp->ricp_v6.dp_cmn; dp 752 net/rds/ib_cm.c saddr6 = &dp->ricp_v6.dp_saddr; dp 753 net/rds/ib_cm.c daddr6 = &dp->ricp_v6.dp_daddr; dp 780 net/rds/ib_cm.c dp_cmn = &dp->ricp_v4.dp_cmn; dp 781 net/rds/ib_cm.c ipv6_addr_set_v4mapped(dp->ricp_v4.dp_saddr, &s_mapped_addr); dp 782 net/rds/ib_cm.c ipv6_addr_set_v4mapped(dp->ricp_v4.dp_daddr, &d_mapped_addr); dp 873 net/rds/ib_cm.c union rds_ib_conn_priv dp; dp 887 net/rds/ib_cm.c rds_ib_cm_fill_conn_param(conn, &conn_param, &dp, dp 170 net/sched/sch_gred.c u16 dp = tc_index_to_dp(skb); dp 172 net/sched/sch_gred.c if (dp >= t->DPs || (q = t->tab[dp]) == NULL) { dp 173 net/sched/sch_gred.c dp = t->def; dp 175 net/sched/sch_gred.c q = t->tab[dp]; dp 190 net/sched/sch_gred.c skb->tc_index = (skb->tc_index & ~GRED_VQ_MASK) | dp; dp 269 net/sched/sch_gred.c u16 dp = tc_index_to_dp(skb); dp 271 net/sched/sch_gred.c if (dp >= t->DPs || (q = t->tab[dp]) == NULL) { dp 474 net/sched/sch_gred.c static inline int gred_change_vq(struct Qdisc *sch, int dp, dp 481 net/sched/sch_gred.c struct gred_sched_data *q = table->tab[dp]; dp 489 net/sched/sch_gred.c table->tab[dp] = q = *prealloc; dp 496 net/sched/sch_gred.c q->DP = dp; dp 534 net/sched/sch_gred.c u32 dp; dp 539 net/sched/sch_gred.c dp = nla_get_u32(tb[TCA_GRED_VQ_DP]); dp 542 net/sched/sch_gred.c table->tab[dp]->red_flags = nla_get_u32(tb[TCA_GRED_VQ_FLAGS]); dp 565 net/sched/sch_gred.c u32 dp; dp 576 net/sched/sch_gred.c dp = nla_get_u32(tb[TCA_GRED_VQ_DP]); dp 577 net/sched/sch_gred.c if (dp >= table->DPs) { dp 581 net/sched/sch_gred.c if (dp != cdp && !table->tab[dp]) { dp 727 net/sctp/sm_make_chunk.c struct sctp_datahdr dp; dp 732 net/sctp/sm_make_chunk.c memset(&dp, 0, sizeof(dp)); dp 733 net/sctp/sm_make_chunk.c dp.ppid = sinfo->sinfo_ppid; dp 734 net/sctp/sm_make_chunk.c dp.stream = htons(sinfo->sinfo_stream); dp 740 net/sctp/sm_make_chunk.c retval = sctp_make_data(asoc, flags, sizeof(dp) + len, gfp); dp 744 net/sctp/sm_make_chunk.c retval->subh.data_hdr = sctp_addto_chunk(retval, sizeof(dp), &dp); dp 30 net/sctp/stream_interleave.c struct sctp_idatahdr dp; dp 32 net/sctp/stream_interleave.c memset(&dp, 0, sizeof(dp)); dp 33 net/sctp/stream_interleave.c dp.stream = htons(sinfo->sinfo_stream); dp 38 net/sctp/stream_interleave.c retval = sctp_make_idata(asoc, flags, sizeof(dp) + len, gfp); dp 42 net/sctp/stream_interleave.c retval->subh.idata_hdr = sctp_addto_chunk(retval, sizeof(dp), &dp); dp 266 samples/mic/mpssd/mpssd.c static void set_dp(struct mic_info *mic, int type, void *dp) dp 270 samples/mic/mpssd/mpssd.c mic->mic_console.console_dp = dp; dp 273 samples/mic/mpssd/mpssd.c mic->mic_net.net_dp = dp; dp 276 samples/mic/mpssd/mpssd.c mic->mic_virtblk.block_dp = dp; dp 302 samples/mic/mpssd/mpssd.c void *dp = get_dp(mic, type); dp 306 samples/mic/mpssd/mpssd.c d = dp + i; dp 1730 samples/mic/mpssd/mpssd.c DIR *dp; dp 1733 samples/mic/mpssd/mpssd.c dp = opendir(MICSYSFSDIR); dp 1734 samples/mic/mpssd/mpssd.c if (!dp) dp 1737 samples/mic/mpssd/mpssd.c while ((file = readdir(dp)) != NULL) { dp 1753 samples/mic/mpssd/mpssd.c closedir(dp); dp 280 security/smack/smack_lsm.c struct dentry *dp) dp 293 security/smack/smack_lsm.c rc = __vfs_getxattr(dp, ip, name, buffer, SMK_LONGLABEL); dp 3266 security/smack/smack_lsm.c struct dentry *dp; dp 3404 security/smack/smack_lsm.c dp = dget(opt_dentry); dp 3405 security/smack/smack_lsm.c skp = smk_fetch(XATTR_NAME_SMACK, inode, dp); dp 3424 security/smack/smack_lsm.c rc = __vfs_setxattr(dp, inode, dp 3429 security/smack/smack_lsm.c rc = __vfs_getxattr(dp, inode, dp 3442 security/smack/smack_lsm.c skp = smk_fetch(XATTR_NAME_SMACKEXEC, inode, dp); dp 3448 security/smack/smack_lsm.c skp = smk_fetch(XATTR_NAME_SMACKMMAP, inode, dp); dp 3454 security/smack/smack_lsm.c dput(dp); dp 407 security/tomoyo/util.c unsigned char *dp = buffer; dp 414 security/tomoyo/util.c *dp++ = ' '; dp 417 security/tomoyo/util.c *dp++ = *sp++; dp 421 security/tomoyo/util.c *dp = '\0'; dp 134 sound/core/seq/oss/seq_oss.c struct seq_oss_devinfo *dp; dp 136 sound/core/seq/oss/seq_oss.c if ((dp = file->private_data) == NULL) dp 140 sound/core/seq/oss/seq_oss.c snd_seq_oss_release(dp); dp 149 sound/core/seq/oss/seq_oss.c struct seq_oss_devinfo *dp; dp 150 sound/core/seq/oss/seq_oss.c dp = file->private_data; dp 151 sound/core/seq/oss/seq_oss.c if (snd_BUG_ON(!dp)) dp 153 sound/core/seq/oss/seq_oss.c return snd_seq_oss_read(dp, buf, count); dp 160 sound/core/seq/oss/seq_oss.c struct seq_oss_devinfo *dp; dp 161 sound/core/seq/oss/seq_oss.c dp = file->private_data; dp 162 sound/core/seq/oss/seq_oss.c if (snd_BUG_ON(!dp)) dp 164 sound/core/seq/oss/seq_oss.c return snd_seq_oss_write(dp, buf, count, file); dp 170 sound/core/seq/oss/seq_oss.c struct seq_oss_devinfo *dp; dp 171 sound/core/seq/oss/seq_oss.c dp = file->private_data; dp 172 sound/core/seq/oss/seq_oss.c if (snd_BUG_ON(!dp)) dp 174 sound/core/seq/oss/seq_oss.c return snd_seq_oss_ioctl(dp, cmd, arg); dp 190 sound/core/seq/oss/seq_oss.c struct seq_oss_devinfo *dp; dp 191 sound/core/seq/oss/seq_oss.c dp = file->private_data; dp 192 sound/core/seq/oss/seq_oss.c if (snd_BUG_ON(!dp)) dp 194 sound/core/seq/oss/seq_oss.c return snd_seq_oss_poll(dp, file, wait); dp 111 sound/core/seq/oss/seq_oss_device.h void snd_seq_oss_release(struct seq_oss_devinfo *dp); dp 112 sound/core/seq/oss/seq_oss_device.h int snd_seq_oss_ioctl(struct seq_oss_devinfo *dp, unsigned int cmd, unsigned long arg); dp 114 sound/core/seq/oss/seq_oss_device.h int snd_seq_oss_write(struct seq_oss_devinfo *dp, const char __user *buf, int count, struct file *opt); dp 115 sound/core/seq/oss/seq_oss_device.h __poll_t snd_seq_oss_poll(struct seq_oss_devinfo *dp, struct file *file, poll_table * wait); dp 117 sound/core/seq/oss/seq_oss_device.h void snd_seq_oss_reset(struct seq_oss_devinfo *dp); dp 120 sound/core/seq/oss/seq_oss_device.h void snd_seq_oss_process_queue(struct seq_oss_devinfo *dp, abstime_t time); dp 136 sound/core/seq/oss/seq_oss_device.h snd_seq_oss_dispatch(struct seq_oss_devinfo *dp, struct snd_seq_event *ev, int atomic, int hop) dp 138 sound/core/seq/oss/seq_oss_device.h return snd_seq_kernel_client_dispatch(dp->cseq, ev, atomic, hop); dp 143 sound/core/seq/oss/seq_oss_device.h snd_seq_oss_control(struct seq_oss_devinfo *dp, unsigned int type, void *arg) dp 147 sound/core/seq/oss/seq_oss_device.h snd_seq_client_ioctl_lock(dp->cseq); dp 148 sound/core/seq/oss/seq_oss_device.h err = snd_seq_kernel_client_ctl(dp->cseq, type, arg); dp 149 sound/core/seq/oss/seq_oss_device.h snd_seq_client_ioctl_unlock(dp->cseq); dp 155 sound/core/seq/oss/seq_oss_device.h snd_seq_oss_fill_addr(struct seq_oss_devinfo *dp, struct snd_seq_event *ev, dp 158 sound/core/seq/oss/seq_oss_device.h ev->queue = dp->queue; dp 159 sound/core/seq/oss/seq_oss_device.h ev->source = dp->addr; dp 22 sound/core/seq/oss/seq_oss_event.c static int extended_event(struct seq_oss_devinfo *dp, union evrec *q, struct snd_seq_event *ev); dp 23 sound/core/seq/oss/seq_oss_event.c static int chn_voice_event(struct seq_oss_devinfo *dp, union evrec *event_rec, struct snd_seq_event *ev); dp 24 sound/core/seq/oss/seq_oss_event.c static int chn_common_event(struct seq_oss_devinfo *dp, union evrec *event_rec, struct snd_seq_event *ev); dp 25 sound/core/seq/oss/seq_oss_event.c static int timing_event(struct seq_oss_devinfo *dp, union evrec *event_rec, struct snd_seq_event *ev); dp 26 sound/core/seq/oss/seq_oss_event.c static int local_event(struct seq_oss_devinfo *dp, union evrec *event_rec, struct snd_seq_event *ev); dp 27 sound/core/seq/oss/seq_oss_event.c static int old_event(struct seq_oss_devinfo *dp, union evrec *q, struct snd_seq_event *ev); dp 28 sound/core/seq/oss/seq_oss_event.c static int note_on_event(struct seq_oss_devinfo *dp, int dev, int ch, int note, int vel, struct snd_seq_event *ev); dp 29 sound/core/seq/oss/seq_oss_event.c static int note_off_event(struct seq_oss_devinfo *dp, int dev, int ch, int note, int vel, struct snd_seq_event *ev); dp 30 sound/core/seq/oss/seq_oss_event.c static int set_note_event(struct seq_oss_devinfo *dp, int dev, int type, int ch, int note, int vel, struct snd_seq_event *ev); dp 31 sound/core/seq/oss/seq_oss_event.c static int set_control_event(struct seq_oss_devinfo *dp, int dev, int type, int ch, int param, int val, struct snd_seq_event *ev); dp 32 sound/core/seq/oss/seq_oss_event.c static int set_echo_event(struct seq_oss_devinfo *dp, union evrec *rec, struct snd_seq_event *ev); dp 42 sound/core/seq/oss/seq_oss_event.c snd_seq_oss_process_event(struct seq_oss_devinfo *dp, union evrec *q, struct snd_seq_event *ev) dp 46 sound/core/seq/oss/seq_oss_event.c return extended_event(dp, q, ev); dp 49 sound/core/seq/oss/seq_oss_event.c return chn_voice_event(dp, q, ev); dp 52 sound/core/seq/oss/seq_oss_event.c return chn_common_event(dp, q, ev); dp 55 sound/core/seq/oss/seq_oss_event.c return timing_event(dp, q, ev); dp 58 sound/core/seq/oss/seq_oss_event.c return local_event(dp, q, ev); dp 61 sound/core/seq/oss/seq_oss_event.c return snd_seq_oss_synth_sysex(dp, q->x.dev, q->x.buf, ev); dp 64 sound/core/seq/oss/seq_oss_event.c if (dp->seq_mode == SNDRV_SEQ_OSS_MODE_MUSIC) dp 67 sound/core/seq/oss/seq_oss_event.c if (! is_write_mode(dp->file_mode)) dp 69 sound/core/seq/oss/seq_oss_event.c if (snd_seq_oss_midi_open(dp, q->s.dev, SNDRV_SEQ_OSS_FILE_WRITE)) dp 71 sound/core/seq/oss/seq_oss_event.c if (snd_seq_oss_midi_filemode(dp, q->s.dev) & SNDRV_SEQ_OSS_FILE_WRITE) dp 72 sound/core/seq/oss/seq_oss_event.c return snd_seq_oss_midi_putc(dp, q->s.dev, q->s.parm1, ev); dp 76 sound/core/seq/oss/seq_oss_event.c if (dp->seq_mode == SNDRV_SEQ_OSS_MODE_MUSIC) dp 78 sound/core/seq/oss/seq_oss_event.c return set_echo_event(dp, q, ev); dp 81 sound/core/seq/oss/seq_oss_event.c if (dp->seq_mode == SNDRV_SEQ_OSS_MODE_MUSIC) dp 83 sound/core/seq/oss/seq_oss_event.c return snd_seq_oss_synth_raw_event(dp, q->c[1], q->c, ev); dp 86 sound/core/seq/oss/seq_oss_event.c if (dp->seq_mode == SNDRV_SEQ_OSS_MODE_MUSIC) dp 88 sound/core/seq/oss/seq_oss_event.c return old_event(dp, q, ev); dp 95 sound/core/seq/oss/seq_oss_event.c old_event(struct seq_oss_devinfo *dp, union evrec *q, struct snd_seq_event *ev) dp 99 sound/core/seq/oss/seq_oss_event.c return note_off_event(dp, 0, q->n.chn, q->n.note, q->n.vel, ev); dp 102 sound/core/seq/oss/seq_oss_event.c return note_on_event(dp, 0, q->n.chn, q->n.note, q->n.vel, ev); dp 109 sound/core/seq/oss/seq_oss_event.c return set_control_event(dp, 0, SNDRV_SEQ_EVENT_PGMCHANGE, dp 113 sound/core/seq/oss/seq_oss_event.c return snd_seq_oss_timer_reset(dp->timer); dp 121 sound/core/seq/oss/seq_oss_event.c extended_event(struct seq_oss_devinfo *dp, union evrec *q, struct snd_seq_event *ev) dp 127 sound/core/seq/oss/seq_oss_event.c return note_off_event(dp, q->e.dev, q->e.chn, q->e.p1, q->e.p2, ev); dp 130 sound/core/seq/oss/seq_oss_event.c return note_on_event(dp, q->e.dev, q->e.chn, q->e.p1, q->e.p2, ev); dp 133 sound/core/seq/oss/seq_oss_event.c return set_control_event(dp, q->e.dev, SNDRV_SEQ_EVENT_PGMCHANGE, dp 137 sound/core/seq/oss/seq_oss_event.c return set_control_event(dp, q->e.dev, SNDRV_SEQ_EVENT_CHANPRESS, dp 144 sound/core/seq/oss/seq_oss_event.c return set_control_event(dp, q->e.dev, SNDRV_SEQ_EVENT_CONTROLLER, dp 152 sound/core/seq/oss/seq_oss_event.c return set_control_event(dp, q->e.dev, dp 157 sound/core/seq/oss/seq_oss_event.c return set_control_event(dp, q->e.dev, dp 161 sound/core/seq/oss/seq_oss_event.c return set_control_event(dp, q->e.dev, dp 167 sound/core/seq/oss/seq_oss_event.c return snd_seq_oss_synth_raw_event(dp, q->e.dev, q->c, ev); dp 175 sound/core/seq/oss/seq_oss_event.c chn_voice_event(struct seq_oss_devinfo *dp, union evrec *q, struct snd_seq_event *ev) dp 181 sound/core/seq/oss/seq_oss_event.c return note_on_event(dp, q->v.dev, q->v.chn, q->v.note, q->v.parm, ev); dp 184 sound/core/seq/oss/seq_oss_event.c return note_off_event(dp, q->v.dev, q->v.chn, q->v.note, q->v.parm, ev); dp 187 sound/core/seq/oss/seq_oss_event.c return set_note_event(dp, q->v.dev, SNDRV_SEQ_EVENT_KEYPRESS, dp 196 sound/core/seq/oss/seq_oss_event.c chn_common_event(struct seq_oss_devinfo *dp, union evrec *q, struct snd_seq_event *ev) dp 202 sound/core/seq/oss/seq_oss_event.c return set_control_event(dp, q->l.dev, SNDRV_SEQ_EVENT_PGMCHANGE, dp 206 sound/core/seq/oss/seq_oss_event.c return set_control_event(dp, q->l.dev, SNDRV_SEQ_EVENT_CONTROLLER, dp 211 sound/core/seq/oss/seq_oss_event.c return set_control_event(dp, q->l.dev, SNDRV_SEQ_EVENT_PITCHBEND, dp 215 sound/core/seq/oss/seq_oss_event.c return set_control_event(dp, q->l.dev, SNDRV_SEQ_EVENT_CHANPRESS, dp 223 sound/core/seq/oss/seq_oss_event.c timing_event(struct seq_oss_devinfo *dp, union evrec *q, struct snd_seq_event *ev) dp 227 sound/core/seq/oss/seq_oss_event.c if (dp->seq_mode == SNDRV_SEQ_OSS_MODE_MUSIC) dp 228 sound/core/seq/oss/seq_oss_event.c return set_echo_event(dp, q, ev); dp 234 sound/core/seq/oss/seq_oss_event.c return set_echo_event(dp, &tmp, ev); dp 238 sound/core/seq/oss/seq_oss_event.c if (dp->seq_mode) dp 239 sound/core/seq/oss/seq_oss_event.c return snd_seq_oss_timer_stop(dp->timer); dp 243 sound/core/seq/oss/seq_oss_event.c if (dp->seq_mode) dp 244 sound/core/seq/oss/seq_oss_event.c return snd_seq_oss_timer_continue(dp->timer); dp 248 sound/core/seq/oss/seq_oss_event.c if (dp->seq_mode) dp 249 sound/core/seq/oss/seq_oss_event.c return snd_seq_oss_timer_tempo(dp->timer, q->t.time); dp 258 sound/core/seq/oss/seq_oss_event.c local_event(struct seq_oss_devinfo *dp, union evrec *q, struct snd_seq_event *ev) dp 274 sound/core/seq/oss/seq_oss_event.c note_on_event(struct seq_oss_devinfo *dp, int dev, int ch, int note, int vel, struct snd_seq_event *ev) dp 278 sound/core/seq/oss/seq_oss_event.c info = snd_seq_oss_synth_info(dp, dev); dp 286 sound/core/seq/oss/seq_oss_event.c return set_note_event(dp, dev, SNDRV_SEQ_EVENT_NOTEON, ch, note, vel, ev); dp 304 sound/core/seq/oss/seq_oss_event.c return set_note_event(dp, dev, type, ch, info->ch[ch].note, vel, ev); dp 310 sound/core/seq/oss/seq_oss_event.c set_note_event(dp, dev, SNDRV_SEQ_EVENT_NOTEOFF, ch, info->ch[ch].note, 0, ev); dp 315 sound/core/seq/oss/seq_oss_event.c return set_note_event(dp, dev, SNDRV_SEQ_EVENT_NOTEON, ch, note, vel, ev); dp 320 sound/core/seq/oss/seq_oss_event.c return set_note_event(dp, dev, SNDRV_SEQ_EVENT_NOTEON, ch, note, vel, ev); dp 324 sound/core/seq/oss/seq_oss_event.c return set_note_event(dp, dev, SNDRV_SEQ_EVENT_KEYPRESS, ch, note - 128, vel, ev); dp 326 sound/core/seq/oss/seq_oss_event.c return set_note_event(dp, dev, SNDRV_SEQ_EVENT_NOTEON, ch, note, vel, ev); dp 335 sound/core/seq/oss/seq_oss_event.c note_off_event(struct seq_oss_devinfo *dp, int dev, int ch, int note, int vel, struct snd_seq_event *ev) dp 339 sound/core/seq/oss/seq_oss_event.c info = snd_seq_oss_synth_info(dp, dev); dp 347 sound/core/seq/oss/seq_oss_event.c return set_note_event(dp, dev, SNDRV_SEQ_EVENT_NOTEON, ch, note, vel, ev); dp 355 sound/core/seq/oss/seq_oss_event.c return set_note_event(dp, dev, SNDRV_SEQ_EVENT_NOTEOFF, ch, note, vel, ev); dp 362 sound/core/seq/oss/seq_oss_event.c return set_note_event(dp, dev, SNDRV_SEQ_EVENT_NOTEOFF, ch, note, vel, ev); dp 372 sound/core/seq/oss/seq_oss_event.c set_note_event(struct seq_oss_devinfo *dp, int dev, int type, int ch, int note, int vel, struct snd_seq_event *ev) dp 374 sound/core/seq/oss/seq_oss_event.c if (!snd_seq_oss_synth_info(dp, dev)) dp 378 sound/core/seq/oss/seq_oss_event.c snd_seq_oss_synth_addr(dp, dev, ev); dp 390 sound/core/seq/oss/seq_oss_event.c set_control_event(struct seq_oss_devinfo *dp, int dev, int type, int ch, int param, int val, struct snd_seq_event *ev) dp 392 sound/core/seq/oss/seq_oss_event.c if (!snd_seq_oss_synth_info(dp, dev)) dp 396 sound/core/seq/oss/seq_oss_event.c snd_seq_oss_synth_addr(dp, dev, ev); dp 408 sound/core/seq/oss/seq_oss_event.c set_echo_event(struct seq_oss_devinfo *dp, union evrec *rec, struct snd_seq_event *ev) dp 412 sound/core/seq/oss/seq_oss_event.c snd_seq_oss_fill_addr(dp, ev, dp->addr.client, dp->addr.port); dp 425 sound/core/seq/oss/seq_oss_event.c struct seq_oss_devinfo *dp = (struct seq_oss_devinfo *)private_data; dp 431 sound/core/seq/oss/seq_oss_event.c if (ev->source.client != dp->cseq) dp 437 sound/core/seq/oss/seq_oss_event.c snd_seq_oss_writeq_wakeup(dp->writeq, rec->t.time); dp 441 sound/core/seq/oss/seq_oss_event.c if (dp->readq == NULL) dp 443 sound/core/seq/oss/seq_oss_event.c snd_seq_oss_readq_put_event(dp->readq, rec); dp 94 sound/core/seq/oss/seq_oss_event.h int snd_seq_oss_process_event(struct seq_oss_devinfo *dp, union evrec *q, struct snd_seq_event *ev); dp 42 sound/core/seq/oss/seq_oss_init.c static int create_port(struct seq_oss_devinfo *dp); dp 43 sound/core/seq/oss/seq_oss_init.c static int delete_port(struct seq_oss_devinfo *dp); dp 44 sound/core/seq/oss/seq_oss_init.c static int alloc_seq_queue(struct seq_oss_devinfo *dp); dp 175 sound/core/seq/oss/seq_oss_init.c struct seq_oss_devinfo *dp; dp 177 sound/core/seq/oss/seq_oss_init.c dp = kzalloc(sizeof(*dp), GFP_KERNEL); dp 178 sound/core/seq/oss/seq_oss_init.c if (!dp) dp 181 sound/core/seq/oss/seq_oss_init.c dp->cseq = system_client; dp 182 sound/core/seq/oss/seq_oss_init.c dp->port = -1; dp 183 sound/core/seq/oss/seq_oss_init.c dp->queue = -1; dp 190 sound/core/seq/oss/seq_oss_init.c dp->index = i; dp 198 sound/core/seq/oss/seq_oss_init.c snd_seq_oss_synth_setup(dp); dp 199 sound/core/seq/oss/seq_oss_init.c snd_seq_oss_midi_setup(dp); dp 201 sound/core/seq/oss/seq_oss_init.c if (dp->synth_opened == 0 && dp->max_mididev == 0) { dp 208 sound/core/seq/oss/seq_oss_init.c rc = create_port(dp); dp 215 sound/core/seq/oss/seq_oss_init.c rc = alloc_seq_queue(dp); dp 220 sound/core/seq/oss/seq_oss_init.c dp->addr.client = dp->cseq; dp 221 sound/core/seq/oss/seq_oss_init.c dp->addr.port = dp->port; dp 225 sound/core/seq/oss/seq_oss_init.c dp->seq_mode = level; dp 228 sound/core/seq/oss/seq_oss_init.c dp->file_mode = translate_mode(file); dp 231 sound/core/seq/oss/seq_oss_init.c if (is_read_mode(dp->file_mode)) { dp 232 sound/core/seq/oss/seq_oss_init.c dp->readq = snd_seq_oss_readq_new(dp, maxqlen); dp 233 sound/core/seq/oss/seq_oss_init.c if (!dp->readq) { dp 240 sound/core/seq/oss/seq_oss_init.c if (is_write_mode(dp->file_mode)) { dp 241 sound/core/seq/oss/seq_oss_init.c dp->writeq = snd_seq_oss_writeq_new(dp, maxqlen); dp 242 sound/core/seq/oss/seq_oss_init.c if (!dp->writeq) { dp 249 sound/core/seq/oss/seq_oss_init.c dp->timer = snd_seq_oss_timer_new(dp); dp 250 sound/core/seq/oss/seq_oss_init.c if (!dp->timer) { dp 257 sound/core/seq/oss/seq_oss_init.c file->private_data = dp; dp 261 sound/core/seq/oss/seq_oss_init.c snd_seq_oss_synth_setup_midi(dp); dp 262 sound/core/seq/oss/seq_oss_init.c else if (is_read_mode(dp->file_mode)) dp 263 sound/core/seq/oss/seq_oss_init.c snd_seq_oss_midi_open_all(dp, SNDRV_SEQ_OSS_FILE_READ); dp 265 sound/core/seq/oss/seq_oss_init.c client_table[dp->index] = dp; dp 271 sound/core/seq/oss/seq_oss_init.c snd_seq_oss_synth_cleanup(dp); dp 272 sound/core/seq/oss/seq_oss_init.c snd_seq_oss_midi_cleanup(dp); dp 273 sound/core/seq/oss/seq_oss_init.c delete_seq_queue(dp->queue); dp 274 sound/core/seq/oss/seq_oss_init.c delete_port(dp); dp 300 sound/core/seq/oss/seq_oss_init.c create_port(struct seq_oss_devinfo *dp) dp 307 sound/core/seq/oss/seq_oss_init.c port.addr.client = dp->cseq; dp 308 sound/core/seq/oss/seq_oss_init.c sprintf(port.name, "Sequencer-%d", dp->index); dp 316 sound/core/seq/oss/seq_oss_init.c callback.private_data = dp; dp 325 sound/core/seq/oss/seq_oss_init.c dp->port = port.addr.port; dp 334 sound/core/seq/oss/seq_oss_init.c delete_port(struct seq_oss_devinfo *dp) dp 336 sound/core/seq/oss/seq_oss_init.c if (dp->port < 0) { dp 337 sound/core/seq/oss/seq_oss_init.c kfree(dp); dp 341 sound/core/seq/oss/seq_oss_init.c return snd_seq_event_port_detach(dp->cseq, dp->port); dp 348 sound/core/seq/oss/seq_oss_init.c alloc_seq_queue(struct seq_oss_devinfo *dp) dp 359 sound/core/seq/oss/seq_oss_init.c dp->queue = qinfo.queue; dp 389 sound/core/seq/oss/seq_oss_init.c struct seq_oss_devinfo *dp = (struct seq_oss_devinfo *)private; dp 391 sound/core/seq/oss/seq_oss_init.c snd_seq_oss_timer_delete(dp->timer); dp 393 sound/core/seq/oss/seq_oss_init.c snd_seq_oss_writeq_delete(dp->writeq); dp 395 sound/core/seq/oss/seq_oss_init.c snd_seq_oss_readq_delete(dp->readq); dp 397 sound/core/seq/oss/seq_oss_init.c kfree(dp); dp 405 sound/core/seq/oss/seq_oss_init.c snd_seq_oss_release(struct seq_oss_devinfo *dp) dp 409 sound/core/seq/oss/seq_oss_init.c client_table[dp->index] = NULL; dp 412 sound/core/seq/oss/seq_oss_init.c snd_seq_oss_reset(dp); dp 414 sound/core/seq/oss/seq_oss_init.c snd_seq_oss_synth_cleanup(dp); dp 415 sound/core/seq/oss/seq_oss_init.c snd_seq_oss_midi_cleanup(dp); dp 418 sound/core/seq/oss/seq_oss_init.c queue = dp->queue; dp 419 sound/core/seq/oss/seq_oss_init.c if (dp->port >= 0) dp 420 sound/core/seq/oss/seq_oss_init.c delete_port(dp); dp 429 sound/core/seq/oss/seq_oss_init.c snd_seq_oss_reset(struct seq_oss_devinfo *dp) dp 434 sound/core/seq/oss/seq_oss_init.c for (i = 0; i < dp->max_synthdev; i++) dp 435 sound/core/seq/oss/seq_oss_init.c snd_seq_oss_synth_reset(dp, i); dp 438 sound/core/seq/oss/seq_oss_init.c if (dp->seq_mode != SNDRV_SEQ_OSS_MODE_MUSIC) { dp 439 sound/core/seq/oss/seq_oss_init.c for (i = 0; i < dp->max_mididev; i++) dp 440 sound/core/seq/oss/seq_oss_init.c snd_seq_oss_midi_reset(dp, i); dp 444 sound/core/seq/oss/seq_oss_init.c if (dp->readq) dp 445 sound/core/seq/oss/seq_oss_init.c snd_seq_oss_readq_clear(dp->readq); dp 446 sound/core/seq/oss/seq_oss_init.c if (dp->writeq) dp 447 sound/core/seq/oss/seq_oss_init.c snd_seq_oss_writeq_clear(dp->writeq); dp 450 sound/core/seq/oss/seq_oss_init.c snd_seq_oss_timer_stop(dp->timer); dp 480 sound/core/seq/oss/seq_oss_init.c struct seq_oss_devinfo *dp; dp 488 sound/core/seq/oss/seq_oss_init.c if ((dp = client_table[i]) == NULL) { dp 492 sound/core/seq/oss/seq_oss_init.c snd_iprintf(buf, "port %d : queue %d\n", dp->port, dp->queue); dp 494 sound/core/seq/oss/seq_oss_init.c (dp->seq_mode ? "music" : "synth"), dp 495 sound/core/seq/oss/seq_oss_init.c filemode_str(dp->file_mode)); dp 496 sound/core/seq/oss/seq_oss_init.c if (dp->seq_mode) dp 498 sound/core/seq/oss/seq_oss_init.c dp->timer->oss_tempo, dp->timer->oss_timebase); dp 500 sound/core/seq/oss/seq_oss_init.c if (is_read_mode(dp->file_mode) && dp->readq) dp 501 sound/core/seq/oss/seq_oss_init.c snd_seq_oss_readq_info_read(dp->readq, buf); dp 18 sound/core/seq/oss/seq_oss_ioctl.c static int snd_seq_oss_synth_info_user(struct seq_oss_devinfo *dp, void __user *arg) dp 24 sound/core/seq/oss/seq_oss_ioctl.c if (snd_seq_oss_synth_make_info(dp, info.device, &info) < 0) dp 31 sound/core/seq/oss/seq_oss_ioctl.c static int snd_seq_oss_midi_info_user(struct seq_oss_devinfo *dp, void __user *arg) dp 37 sound/core/seq/oss/seq_oss_ioctl.c if (snd_seq_oss_midi_make_info(dp, info.device, &info) < 0) dp 44 sound/core/seq/oss/seq_oss_ioctl.c static int snd_seq_oss_oob_user(struct seq_oss_devinfo *dp, void __user *arg) dp 52 sound/core/seq/oss/seq_oss_ioctl.c snd_seq_oss_fill_addr(dp, &tmpev, dp->addr.client, dp->addr.port); dp 54 sound/core/seq/oss/seq_oss_ioctl.c if (! snd_seq_oss_process_event(dp, (union evrec *)ev, &tmpev)) { dp 55 sound/core/seq/oss/seq_oss_ioctl.c snd_seq_oss_dispatch(dp, &tmpev, 0, 0); dp 61 sound/core/seq/oss/seq_oss_ioctl.c snd_seq_oss_ioctl(struct seq_oss_devinfo *dp, unsigned int cmd, unsigned long carg) dp 77 sound/core/seq/oss/seq_oss_ioctl.c return snd_seq_oss_timer_ioctl(dp->timer, cmd, arg); dp 80 sound/core/seq/oss/seq_oss_ioctl.c snd_seq_oss_reset(dp); dp 84 sound/core/seq/oss/seq_oss_ioctl.c if (! is_write_mode(dp->file_mode) || dp->writeq == NULL) dp 86 sound/core/seq/oss/seq_oss_ioctl.c while (snd_seq_oss_writeq_sync(dp->writeq)) dp 93 sound/core/seq/oss/seq_oss_ioctl.c snd_seq_oss_reset(dp); dp 99 sound/core/seq/oss/seq_oss_ioctl.c return snd_seq_oss_midi_open(dp, dev, dp->file_mode); dp 102 sound/core/seq/oss/seq_oss_ioctl.c if (dp->readq == NULL || ! is_read_mode(dp->file_mode)) dp 104 sound/core/seq/oss/seq_oss_ioctl.c return put_user(dp->readq->qlen, p) ? -EFAULT : 0; dp 107 sound/core/seq/oss/seq_oss_ioctl.c if (! is_write_mode(dp->file_mode) || dp->writeq == NULL) dp 109 sound/core/seq/oss/seq_oss_ioctl.c return put_user(snd_seq_oss_writeq_get_free_size(dp->writeq), p) ? -EFAULT : 0; dp 112 sound/core/seq/oss/seq_oss_ioctl.c return put_user(snd_seq_oss_timer_cur_tick(dp->timer), p) ? -EFAULT : 0; dp 117 sound/core/seq/oss/seq_oss_ioctl.c return snd_seq_oss_synth_ioctl(dp, dev, cmd, carg); dp 120 sound/core/seq/oss/seq_oss_ioctl.c return put_user(dp->max_synthdev, p) ? -EFAULT : 0; dp 123 sound/core/seq/oss/seq_oss_ioctl.c return put_user(dp->max_mididev, p) ? -EFAULT : 0; dp 128 sound/core/seq/oss/seq_oss_ioctl.c val = snd_seq_oss_synth_ioctl(dp, dev, cmd, carg); dp 134 sound/core/seq/oss/seq_oss_ioctl.c snd_seq_oss_synth_ioctl(dp, dev, cmd, carg); dp 139 sound/core/seq/oss/seq_oss_ioctl.c return snd_seq_oss_synth_info_user(dp, arg); dp 142 sound/core/seq/oss/seq_oss_ioctl.c return snd_seq_oss_oob_user(dp, arg); dp 145 sound/core/seq/oss/seq_oss_ioctl.c return snd_seq_oss_midi_info_user(dp, arg); dp 148 sound/core/seq/oss/seq_oss_ioctl.c if (! is_write_mode(dp->file_mode)) dp 154 sound/core/seq/oss/seq_oss_ioctl.c if (val >= dp->writeq->maxlen) dp 155 sound/core/seq/oss/seq_oss_ioctl.c val = dp->writeq->maxlen - 1; dp 156 sound/core/seq/oss/seq_oss_ioctl.c snd_seq_oss_writeq_set_output(dp->writeq, val); dp 160 sound/core/seq/oss/seq_oss_ioctl.c if (dp->readq == NULL || !is_read_mode(dp->file_mode)) dp 168 sound/core/seq/oss/seq_oss_ioctl.c dp->readq->pre_event_timeout = val; dp 172 sound/core/seq/oss/seq_oss_ioctl.c if (! is_write_mode(dp->file_mode)) dp 174 sound/core/seq/oss/seq_oss_ioctl.c return snd_seq_oss_synth_ioctl(dp, 0, cmd, carg); dp 55 sound/core/seq/oss/seq_oss_midi.c static struct seq_oss_midi *get_mididev(struct seq_oss_devinfo *dp, int dev); dp 56 sound/core/seq/oss/seq_oss_midi.c static int send_synth_event(struct seq_oss_devinfo *dp, struct snd_seq_event *ev, int dev); dp 57 sound/core/seq/oss/seq_oss_midi.c static int send_midi_event(struct seq_oss_devinfo *dp, struct snd_seq_event *ev, struct seq_oss_midi *mdev); dp 268 sound/core/seq/oss/seq_oss_midi.c snd_seq_oss_midi_setup(struct seq_oss_devinfo *dp) dp 270 sound/core/seq/oss/seq_oss_midi.c dp->max_mididev = max_midi_devs; dp 277 sound/core/seq/oss/seq_oss_midi.c snd_seq_oss_midi_cleanup(struct seq_oss_devinfo *dp) dp 280 sound/core/seq/oss/seq_oss_midi.c for (i = 0; i < dp->max_mididev; i++) dp 281 sound/core/seq/oss/seq_oss_midi.c snd_seq_oss_midi_close(dp, i); dp 282 sound/core/seq/oss/seq_oss_midi.c dp->max_mididev = 0; dp 290 sound/core/seq/oss/seq_oss_midi.c snd_seq_oss_midi_open_all(struct seq_oss_devinfo *dp, int file_mode) dp 293 sound/core/seq/oss/seq_oss_midi.c for (i = 0; i < dp->max_mididev; i++) dp 294 sound/core/seq/oss/seq_oss_midi.c snd_seq_oss_midi_open(dp, i, file_mode); dp 302 sound/core/seq/oss/seq_oss_midi.c get_mididev(struct seq_oss_devinfo *dp, int dev) dp 304 sound/core/seq/oss/seq_oss_midi.c if (dev < 0 || dev >= dp->max_mididev) dp 306 sound/core/seq/oss/seq_oss_midi.c dev = array_index_nospec(dev, dp->max_mididev); dp 315 sound/core/seq/oss/seq_oss_midi.c snd_seq_oss_midi_open(struct seq_oss_devinfo *dp, int dev, int fmode) dp 321 sound/core/seq/oss/seq_oss_midi.c if ((mdev = get_mididev(dp, dev)) == NULL) dp 325 sound/core/seq/oss/seq_oss_midi.c if (mdev->opened && mdev->devinfo != dp) { dp 352 sound/core/seq/oss/seq_oss_midi.c subs.sender = dp->addr; dp 355 sound/core/seq/oss/seq_oss_midi.c if (snd_seq_kernel_client_ctl(dp->cseq, SNDRV_SEQ_IOCTL_SUBSCRIBE_PORT, &subs) >= 0) dp 361 sound/core/seq/oss/seq_oss_midi.c subs.dest = dp->addr; dp 363 sound/core/seq/oss/seq_oss_midi.c subs.queue = dp->queue; /* queue for timestamps */ dp 364 sound/core/seq/oss/seq_oss_midi.c if (snd_seq_kernel_client_ctl(dp->cseq, SNDRV_SEQ_IOCTL_SUBSCRIBE_PORT, &subs) >= 0) dp 373 sound/core/seq/oss/seq_oss_midi.c mdev->devinfo = dp; dp 382 sound/core/seq/oss/seq_oss_midi.c snd_seq_oss_midi_close(struct seq_oss_devinfo *dp, int dev) dp 387 sound/core/seq/oss/seq_oss_midi.c if ((mdev = get_mididev(dp, dev)) == NULL) dp 389 sound/core/seq/oss/seq_oss_midi.c if (! mdev->opened || mdev->devinfo != dp) { dp 396 sound/core/seq/oss/seq_oss_midi.c subs.sender = dp->addr; dp 399 sound/core/seq/oss/seq_oss_midi.c snd_seq_kernel_client_ctl(dp->cseq, SNDRV_SEQ_IOCTL_UNSUBSCRIBE_PORT, &subs); dp 404 sound/core/seq/oss/seq_oss_midi.c subs.dest = dp->addr; dp 405 sound/core/seq/oss/seq_oss_midi.c snd_seq_kernel_client_ctl(dp->cseq, SNDRV_SEQ_IOCTL_UNSUBSCRIBE_PORT, &subs); dp 419 sound/core/seq/oss/seq_oss_midi.c snd_seq_oss_midi_filemode(struct seq_oss_devinfo *dp, int dev) dp 424 sound/core/seq/oss/seq_oss_midi.c if ((mdev = get_mididev(dp, dev)) == NULL) dp 442 sound/core/seq/oss/seq_oss_midi.c snd_seq_oss_midi_reset(struct seq_oss_devinfo *dp, int dev) dp 446 sound/core/seq/oss/seq_oss_midi.c if ((mdev = get_mididev(dp, dev)) == NULL) dp 460 sound/core/seq/oss/seq_oss_midi.c ev.queue = dp->queue; dp 461 sound/core/seq/oss/seq_oss_midi.c ev.source.port = dp->port; dp 462 sound/core/seq/oss/seq_oss_midi.c if (dp->seq_mode == SNDRV_SEQ_OSS_MODE_SYNTH) { dp 464 sound/core/seq/oss/seq_oss_midi.c snd_seq_oss_dispatch(dp, &ev, 0, 0); dp 470 sound/core/seq/oss/seq_oss_midi.c snd_seq_oss_dispatch(dp, &ev, 0, 0); dp 471 sound/core/seq/oss/seq_oss_midi.c if (dp->seq_mode == SNDRV_SEQ_OSS_MODE_MUSIC) { dp 474 sound/core/seq/oss/seq_oss_midi.c snd_seq_oss_dispatch(dp, &ev, 0, 0); dp 477 sound/core/seq/oss/seq_oss_midi.c snd_seq_oss_dispatch(dp, &ev, 0, 0); dp 490 sound/core/seq/oss/seq_oss_midi.c snd_seq_oss_midi_get_addr(struct seq_oss_devinfo *dp, int dev, struct snd_seq_addr *addr) dp 494 sound/core/seq/oss/seq_oss_midi.c if ((mdev = get_mididev(dp, dev)) == NULL) dp 508 sound/core/seq/oss/seq_oss_midi.c struct seq_oss_devinfo *dp = (struct seq_oss_devinfo *)private_data; dp 512 sound/core/seq/oss/seq_oss_midi.c if (dp->readq == NULL) dp 521 sound/core/seq/oss/seq_oss_midi.c if (dp->seq_mode == SNDRV_SEQ_OSS_MODE_MUSIC) dp 522 sound/core/seq/oss/seq_oss_midi.c rc = send_synth_event(dp, ev, mdev->seq_device); dp 524 sound/core/seq/oss/seq_oss_midi.c rc = send_midi_event(dp, ev, mdev); dp 534 sound/core/seq/oss/seq_oss_midi.c send_synth_event(struct seq_oss_devinfo *dp, struct snd_seq_event *ev, int dev) dp 585 sound/core/seq/oss/seq_oss_midi.c snd_seq_oss_readq_put_timestamp(dp->readq, ev->time.tick, dp->seq_mode); dp 586 sound/core/seq/oss/seq_oss_midi.c snd_seq_oss_readq_put_event(dp->readq, &ossev); dp 595 sound/core/seq/oss/seq_oss_midi.c send_midi_event(struct seq_oss_devinfo *dp, struct snd_seq_event *ev, struct seq_oss_midi *mdev) dp 600 sound/core/seq/oss/seq_oss_midi.c snd_seq_oss_readq_put_timestamp(dp->readq, ev->time.tick, dp->seq_mode); dp 601 sound/core/seq/oss/seq_oss_midi.c if (!dp->timer->running) dp 602 sound/core/seq/oss/seq_oss_midi.c len = snd_seq_oss_timer_start(dp->timer); dp 604 sound/core/seq/oss/seq_oss_midi.c snd_seq_oss_readq_sysex(dp->readq, mdev->seq_device, ev); dp 609 sound/core/seq/oss/seq_oss_midi.c snd_seq_oss_readq_puts(dp->readq, mdev->seq_device, msg, len); dp 622 sound/core/seq/oss/seq_oss_midi.c snd_seq_oss_midi_putc(struct seq_oss_devinfo *dp, int dev, unsigned char c, struct snd_seq_event *ev) dp 626 sound/core/seq/oss/seq_oss_midi.c if ((mdev = get_mididev(dp, dev)) == NULL) dp 629 sound/core/seq/oss/seq_oss_midi.c snd_seq_oss_fill_addr(dp, ev, mdev->client, mdev->port); dp 641 sound/core/seq/oss/seq_oss_midi.c snd_seq_oss_midi_make_info(struct seq_oss_devinfo *dp, int dev, struct midi_info *inf) dp 645 sound/core/seq/oss/seq_oss_midi.c if ((mdev = get_mididev(dp, dev)) == NULL) dp 21 sound/core/seq/oss/seq_oss_midi.h void snd_seq_oss_midi_setup(struct seq_oss_devinfo *dp); dp 22 sound/core/seq/oss/seq_oss_midi.h void snd_seq_oss_midi_cleanup(struct seq_oss_devinfo *dp); dp 24 sound/core/seq/oss/seq_oss_midi.h int snd_seq_oss_midi_open(struct seq_oss_devinfo *dp, int dev, int file_mode); dp 25 sound/core/seq/oss/seq_oss_midi.h void snd_seq_oss_midi_open_all(struct seq_oss_devinfo *dp, int file_mode); dp 26 sound/core/seq/oss/seq_oss_midi.h int snd_seq_oss_midi_close(struct seq_oss_devinfo *dp, int dev); dp 27 sound/core/seq/oss/seq_oss_midi.h void snd_seq_oss_midi_reset(struct seq_oss_devinfo *dp, int dev); dp 28 sound/core/seq/oss/seq_oss_midi.h int snd_seq_oss_midi_putc(struct seq_oss_devinfo *dp, int dev, unsigned char c, dp 31 sound/core/seq/oss/seq_oss_midi.h int snd_seq_oss_midi_filemode(struct seq_oss_devinfo *dp, int dev); dp 32 sound/core/seq/oss/seq_oss_midi.h int snd_seq_oss_midi_make_info(struct seq_oss_devinfo *dp, int dev, struct midi_info *inf); dp 33 sound/core/seq/oss/seq_oss_midi.h void snd_seq_oss_midi_get_addr(struct seq_oss_devinfo *dp, int dev, struct snd_seq_addr *addr); dp 33 sound/core/seq/oss/seq_oss_readq.c snd_seq_oss_readq_new(struct seq_oss_devinfo *dp, int maxlen) dp 29 sound/core/seq/oss/seq_oss_readq.h struct seq_oss_readq *snd_seq_oss_readq_new(struct seq_oss_devinfo *dp, int maxlen); dp 23 sound/core/seq/oss/seq_oss_rw.c static int insert_queue(struct seq_oss_devinfo *dp, union evrec *rec, struct file *opt); dp 31 sound/core/seq/oss/seq_oss_rw.c snd_seq_oss_read(struct seq_oss_devinfo *dp, char __user *buf, int count) dp 33 sound/core/seq/oss/seq_oss_rw.c struct seq_oss_readq *readq = dp->readq; dp 39 sound/core/seq/oss/seq_oss_rw.c if (readq == NULL || ! is_read_mode(dp->file_mode)) dp 46 sound/core/seq/oss/seq_oss_rw.c !is_nonblock_mode(dp->file_mode) && result == 0) { dp 83 sound/core/seq/oss/seq_oss_rw.c snd_seq_oss_write(struct seq_oss_devinfo *dp, const char __user *buf, int count, struct file *opt) dp 89 sound/core/seq/oss/seq_oss_rw.c if (! is_write_mode(dp->file_mode) || dp->writeq == NULL) dp 105 sound/core/seq/oss/seq_oss_rw.c return snd_seq_oss_synth_load_patch(dp, rec.s.dev, dp 111 sound/core/seq/oss/seq_oss_rw.c dp->seq_mode == SNDRV_SEQ_OSS_MODE_MUSIC) { dp 127 sound/core/seq/oss/seq_oss_rw.c if (dp->seq_mode == SNDRV_SEQ_OSS_MODE_MUSIC) { dp 135 sound/core/seq/oss/seq_oss_rw.c if ((err = insert_queue(dp, &rec, opt)) < 0) dp 151 sound/core/seq/oss/seq_oss_rw.c insert_queue(struct seq_oss_devinfo *dp, union evrec *rec, struct file *opt) dp 157 sound/core/seq/oss/seq_oss_rw.c if (snd_seq_oss_process_timer_event(dp->timer, rec)) dp 164 sound/core/seq/oss/seq_oss_rw.c snd_seq_oss_fill_addr(dp, &event, dp->addr.client, dp->addr.port); dp 166 sound/core/seq/oss/seq_oss_rw.c if (snd_seq_oss_process_event(dp, rec, &event)) dp 169 sound/core/seq/oss/seq_oss_rw.c event.time.tick = snd_seq_oss_timer_cur_tick(dp->timer); dp 170 sound/core/seq/oss/seq_oss_rw.c if (dp->timer->realtime || !dp->timer->running) dp 171 sound/core/seq/oss/seq_oss_rw.c snd_seq_oss_dispatch(dp, &event, 0, 0); dp 173 sound/core/seq/oss/seq_oss_rw.c rc = snd_seq_kernel_client_enqueue(dp->cseq, &event, opt, dp 174 sound/core/seq/oss/seq_oss_rw.c !is_nonblock_mode(dp->file_mode)); dp 184 sound/core/seq/oss/seq_oss_rw.c snd_seq_oss_poll(struct seq_oss_devinfo *dp, struct file *file, poll_table * wait) dp 189 sound/core/seq/oss/seq_oss_rw.c if (dp->readq && is_read_mode(dp->file_mode)) { dp 190 sound/core/seq/oss/seq_oss_rw.c if (snd_seq_oss_readq_poll(dp->readq, file, wait)) dp 195 sound/core/seq/oss/seq_oss_rw.c if (dp->writeq && is_write_mode(dp->file_mode)) { dp 196 sound/core/seq/oss/seq_oss_rw.c if (snd_seq_kernel_client_write_poll(dp->cseq, file, wait)) dp 73 sound/core/seq/oss/seq_oss_synth.c static struct seq_oss_synth *get_synthdev(struct seq_oss_devinfo *dp, int dev); dp 200 sound/core/seq/oss/seq_oss_synth.c snd_seq_oss_synth_setup(struct seq_oss_devinfo *dp) dp 206 sound/core/seq/oss/seq_oss_synth.c dp->max_synthdev = max_synth_devs; dp 207 sound/core/seq/oss/seq_oss_synth.c dp->synth_opened = 0; dp 208 sound/core/seq/oss/seq_oss_synth.c memset(dp->synths, 0, sizeof(dp->synths)); dp 209 sound/core/seq/oss/seq_oss_synth.c for (i = 0; i < dp->max_synthdev; i++) { dp 217 sound/core/seq/oss/seq_oss_synth.c info = &dp->synths[i]; dp 218 sound/core/seq/oss/seq_oss_synth.c info->arg.app_index = dp->port; dp 219 sound/core/seq/oss/seq_oss_synth.c info->arg.file_mode = dp->file_mode; dp 220 sound/core/seq/oss/seq_oss_synth.c info->arg.seq_mode = dp->seq_mode; dp 221 sound/core/seq/oss/seq_oss_synth.c if (dp->seq_mode == SNDRV_SEQ_OSS_MODE_SYNTH) dp 248 sound/core/seq/oss/seq_oss_synth.c dp->synth_opened++; dp 259 sound/core/seq/oss/seq_oss_synth.c snd_seq_oss_synth_setup_midi(struct seq_oss_devinfo *dp) dp 263 sound/core/seq/oss/seq_oss_synth.c if (dp->max_synthdev >= SNDRV_SEQ_OSS_MAX_SYNTH_DEVS) dp 266 sound/core/seq/oss/seq_oss_synth.c for (i = 0; i < dp->max_mididev; i++) { dp 268 sound/core/seq/oss/seq_oss_synth.c info = &dp->synths[dp->max_synthdev]; dp 269 sound/core/seq/oss/seq_oss_synth.c if (snd_seq_oss_midi_open(dp, i, dp->file_mode) < 0) dp 271 sound/core/seq/oss/seq_oss_synth.c info->arg.app_index = dp->port; dp 272 sound/core/seq/oss/seq_oss_synth.c info->arg.file_mode = dp->file_mode; dp 273 sound/core/seq/oss/seq_oss_synth.c info->arg.seq_mode = dp->seq_mode; dp 278 sound/core/seq/oss/seq_oss_synth.c snd_seq_oss_midi_get_addr(dp, i, &info->arg.addr); dp 281 sound/core/seq/oss/seq_oss_synth.c dp->max_synthdev++; dp 282 sound/core/seq/oss/seq_oss_synth.c if (dp->max_synthdev >= SNDRV_SEQ_OSS_MAX_SYNTH_DEVS) dp 293 sound/core/seq/oss/seq_oss_synth.c snd_seq_oss_synth_cleanup(struct seq_oss_devinfo *dp) dp 299 sound/core/seq/oss/seq_oss_synth.c if (snd_BUG_ON(dp->max_synthdev > SNDRV_SEQ_OSS_MAX_SYNTH_DEVS)) dp 301 sound/core/seq/oss/seq_oss_synth.c for (i = 0; i < dp->max_synthdev; i++) { dp 302 sound/core/seq/oss/seq_oss_synth.c info = &dp->synths[i]; dp 307 sound/core/seq/oss/seq_oss_synth.c snd_seq_oss_midi_close(dp, info->midi_mapped); dp 326 sound/core/seq/oss/seq_oss_synth.c dp->synth_opened = 0; dp 327 sound/core/seq/oss/seq_oss_synth.c dp->max_synthdev = 0; dp 331 sound/core/seq/oss/seq_oss_synth.c get_synthinfo_nospec(struct seq_oss_devinfo *dp, int dev) dp 333 sound/core/seq/oss/seq_oss_synth.c if (dev < 0 || dev >= dp->max_synthdev) dp 336 sound/core/seq/oss/seq_oss_synth.c return &dp->synths[dev]; dp 343 sound/core/seq/oss/seq_oss_synth.c get_synthdev(struct seq_oss_devinfo *dp, int dev) dp 346 sound/core/seq/oss/seq_oss_synth.c struct seq_oss_synthinfo *info = get_synthinfo_nospec(dp, dev); dp 390 sound/core/seq/oss/seq_oss_synth.c snd_seq_oss_synth_reset(struct seq_oss_devinfo *dp, int dev) dp 395 sound/core/seq/oss/seq_oss_synth.c info = get_synthinfo_nospec(dp, dev); dp 404 sound/core/seq/oss/seq_oss_synth.c snd_seq_oss_midi_reset(dp, info->midi_mapped); dp 406 sound/core/seq/oss/seq_oss_synth.c snd_seq_oss_midi_close(dp, dev); dp 407 sound/core/seq/oss/seq_oss_synth.c if (snd_seq_oss_midi_open(dp, info->midi_mapped, dp 408 sound/core/seq/oss/seq_oss_synth.c dp->file_mode) < 0) { dp 427 sound/core/seq/oss/seq_oss_synth.c snd_seq_oss_fill_addr(dp, &ev, info->arg.addr.client, dp 430 sound/core/seq/oss/seq_oss_synth.c snd_seq_oss_dispatch(dp, &ev, 0, 0); dp 441 sound/core/seq/oss/seq_oss_synth.c snd_seq_oss_synth_load_patch(struct seq_oss_devinfo *dp, int dev, int fmt, dp 448 sound/core/seq/oss/seq_oss_synth.c info = get_synthinfo_nospec(dp, dev); dp 454 sound/core/seq/oss/seq_oss_synth.c if ((rec = get_synthdev(dp, dev)) == NULL) dp 469 sound/core/seq/oss/seq_oss_synth.c snd_seq_oss_synth_info(struct seq_oss_devinfo *dp, int dev) dp 473 sound/core/seq/oss/seq_oss_synth.c rec = get_synthdev(dp, dev); dp 476 sound/core/seq/oss/seq_oss_synth.c return get_synthinfo_nospec(dp, dev); dp 488 sound/core/seq/oss/seq_oss_synth.c snd_seq_oss_synth_sysex(struct seq_oss_devinfo *dp, int dev, unsigned char *buf, struct snd_seq_event *ev) dp 495 sound/core/seq/oss/seq_oss_synth.c info = snd_seq_oss_synth_info(dp, dev); dp 532 sound/core/seq/oss/seq_oss_synth.c if (snd_seq_oss_synth_addr(dp, dev, ev)) dp 547 sound/core/seq/oss/seq_oss_synth.c snd_seq_oss_synth_addr(struct seq_oss_devinfo *dp, int dev, struct snd_seq_event *ev) dp 549 sound/core/seq/oss/seq_oss_synth.c struct seq_oss_synthinfo *info = snd_seq_oss_synth_info(dp, dev); dp 553 sound/core/seq/oss/seq_oss_synth.c snd_seq_oss_fill_addr(dp, ev, info->arg.addr.client, dp 563 sound/core/seq/oss/seq_oss_synth.c snd_seq_oss_synth_ioctl(struct seq_oss_devinfo *dp, int dev, unsigned int cmd, unsigned long addr) dp 569 sound/core/seq/oss/seq_oss_synth.c info = get_synthinfo_nospec(dp, dev); dp 572 sound/core/seq/oss/seq_oss_synth.c if ((rec = get_synthdev(dp, dev)) == NULL) dp 587 sound/core/seq/oss/seq_oss_synth.c snd_seq_oss_synth_raw_event(struct seq_oss_devinfo *dp, int dev, unsigned char *data, struct snd_seq_event *ev) dp 591 sound/core/seq/oss/seq_oss_synth.c info = snd_seq_oss_synth_info(dp, dev); dp 596 sound/core/seq/oss/seq_oss_synth.c return snd_seq_oss_synth_addr(dp, dev, ev); dp 604 sound/core/seq/oss/seq_oss_synth.c snd_seq_oss_synth_make_info(struct seq_oss_devinfo *dp, int dev, struct synth_info *inf) dp 607 sound/core/seq/oss/seq_oss_synth.c struct seq_oss_synthinfo *info = get_synthinfo_nospec(dp, dev); dp 614 sound/core/seq/oss/seq_oss_synth.c snd_seq_oss_midi_make_info(dp, info->midi_mapped, &minf); dp 621 sound/core/seq/oss/seq_oss_synth.c if ((rec = get_synthdev(dp, dev)) == NULL) dp 20 sound/core/seq/oss/seq_oss_synth.h void snd_seq_oss_synth_setup(struct seq_oss_devinfo *dp); dp 21 sound/core/seq/oss/seq_oss_synth.h void snd_seq_oss_synth_setup_midi(struct seq_oss_devinfo *dp); dp 22 sound/core/seq/oss/seq_oss_synth.h void snd_seq_oss_synth_cleanup(struct seq_oss_devinfo *dp); dp 24 sound/core/seq/oss/seq_oss_synth.h void snd_seq_oss_synth_reset(struct seq_oss_devinfo *dp, int dev); dp 25 sound/core/seq/oss/seq_oss_synth.h int snd_seq_oss_synth_load_patch(struct seq_oss_devinfo *dp, int dev, int fmt, dp 27 sound/core/seq/oss/seq_oss_synth.h struct seq_oss_synthinfo *snd_seq_oss_synth_info(struct seq_oss_devinfo *dp, dp 29 sound/core/seq/oss/seq_oss_synth.h int snd_seq_oss_synth_sysex(struct seq_oss_devinfo *dp, int dev, unsigned char *buf, dp 31 sound/core/seq/oss/seq_oss_synth.h int snd_seq_oss_synth_addr(struct seq_oss_devinfo *dp, int dev, struct snd_seq_event *ev); dp 32 sound/core/seq/oss/seq_oss_synth.h int snd_seq_oss_synth_ioctl(struct seq_oss_devinfo *dp, int dev, unsigned int cmd, dp 34 sound/core/seq/oss/seq_oss_synth.h int snd_seq_oss_synth_raw_event(struct seq_oss_devinfo *dp, int dev, dp 37 sound/core/seq/oss/seq_oss_synth.h int snd_seq_oss_synth_make_info(struct seq_oss_devinfo *dp, int dev, struct synth_info *inf); dp 25 sound/core/seq/oss/seq_oss_timer.c static int send_timer_event(struct seq_oss_devinfo *dp, int type, int value); dp 33 sound/core/seq/oss/seq_oss_timer.c snd_seq_oss_timer_new(struct seq_oss_devinfo *dp) dp 41 sound/core/seq/oss/seq_oss_timer.c rec->dp = dp; dp 127 sound/core/seq/oss/seq_oss_timer.c send_timer_event(struct seq_oss_devinfo *dp, int type, int value) dp 133 sound/core/seq/oss/seq_oss_timer.c ev.source.client = dp->cseq; dp 137 sound/core/seq/oss/seq_oss_timer.c ev.queue = dp->queue; dp 138 sound/core/seq/oss/seq_oss_timer.c ev.data.queue.queue = dp->queue; dp 140 sound/core/seq/oss/seq_oss_timer.c return snd_seq_kernel_client_dispatch(dp->cseq, &ev, 1, 0); dp 149 sound/core/seq/oss/seq_oss_timer.c struct seq_oss_devinfo *dp = timer->dp; dp 156 sound/core/seq/oss/seq_oss_timer.c tmprec.queue = dp->queue; dp 159 sound/core/seq/oss/seq_oss_timer.c snd_seq_set_queue_tempo(dp->cseq, &tmprec); dp 161 sound/core/seq/oss/seq_oss_timer.c send_timer_event(dp, SNDRV_SEQ_EVENT_START, 0); dp 176 sound/core/seq/oss/seq_oss_timer.c send_timer_event(timer->dp, SNDRV_SEQ_EVENT_STOP, 0); dp 190 sound/core/seq/oss/seq_oss_timer.c send_timer_event(timer->dp, SNDRV_SEQ_EVENT_CONTINUE, 0); dp 209 sound/core/seq/oss/seq_oss_timer.c send_timer_event(timer->dp, SNDRV_SEQ_EVENT_TEMPO, timer->tempo); dp 232 sound/core/seq/oss/seq_oss_timer.c if (timer->dp->seq_mode == SNDRV_SEQ_OSS_MODE_SYNTH) dp 18 sound/core/seq/oss/seq_oss_timer.h struct seq_oss_devinfo *dp; dp 27 sound/core/seq/oss/seq_oss_timer.h struct seq_oss_timer *snd_seq_oss_timer_new(struct seq_oss_devinfo *dp); dp 28 sound/core/seq/oss/seq_oss_timer.h void snd_seq_oss_timer_delete(struct seq_oss_timer *dp); dp 25 sound/core/seq/oss/seq_oss_writeq.c snd_seq_oss_writeq_new(struct seq_oss_devinfo *dp, int maxlen) dp 32 sound/core/seq/oss/seq_oss_writeq.c q->dp = dp; dp 40 sound/core/seq/oss/seq_oss_writeq.c pool.client = dp->cseq; dp 44 sound/core/seq/oss/seq_oss_writeq.c snd_seq_oss_control(dp, SNDRV_SEQ_IOCTL_SET_CLIENT_POOL, &pool); dp 72 sound/core/seq/oss/seq_oss_writeq.c snd_seq_oss_control(q->dp, SNDRV_SEQ_IOCTL_REMOVE_EVENTS, &reset); dp 84 sound/core/seq/oss/seq_oss_writeq.c struct seq_oss_devinfo *dp = q->dp; dp 87 sound/core/seq/oss/seq_oss_writeq.c time = snd_seq_oss_timer_cur_tick(dp->timer); dp 101 sound/core/seq/oss/seq_oss_writeq.c snd_seq_oss_fill_addr(dp, &ev, dp->addr.client, dp->addr.port); dp 106 sound/core/seq/oss/seq_oss_writeq.c snd_seq_kernel_client_enqueue(dp->cseq, &ev, NULL, true); dp 141 sound/core/seq/oss/seq_oss_writeq.c pool.client = q->dp->cseq; dp 142 sound/core/seq/oss/seq_oss_writeq.c snd_seq_oss_control(q->dp, SNDRV_SEQ_IOCTL_GET_CLIENT_POOL, &pool); dp 154 sound/core/seq/oss/seq_oss_writeq.c pool.client = q->dp->cseq; dp 155 sound/core/seq/oss/seq_oss_writeq.c snd_seq_oss_control(q->dp, SNDRV_SEQ_IOCTL_GET_CLIENT_POOL, &pool); dp 157 sound/core/seq/oss/seq_oss_writeq.c snd_seq_oss_control(q->dp, SNDRV_SEQ_IOCTL_SET_CLIENT_POOL, &pool); dp 16 sound/core/seq/oss/seq_oss_writeq.h struct seq_oss_devinfo *dp; dp 28 sound/core/seq/oss/seq_oss_writeq.h struct seq_oss_writeq *snd_seq_oss_writeq_new(struct seq_oss_devinfo *dp, int maxlen); dp 109 sound/core/seq/seq_ports.h struct snd_seq_client *d, struct snd_seq_client_port *dp, dp 115 sound/core/seq/seq_ports.h struct snd_seq_client *d, struct snd_seq_client_port *dp, dp 226 sound/pci/hda/patch_hdmi.c struct dp_audio_infoframe dp; dp 680 sound/pci/hda/patch_hdmi.c struct dp_audio_infoframe *dp_ai = &ai.dp; dp 166 tools/gpio/lsgpio.c DIR *dp; dp 169 tools/gpio/lsgpio.c dp = opendir("/dev"); dp 170 tools/gpio/lsgpio.c if (!dp) { dp 176 tools/gpio/lsgpio.c while (ent = readdir(dp), ent) { dp 185 tools/gpio/lsgpio.c if (closedir(dp) == -1) { dp 205 tools/iio/iio_generic_buffer.c DIR *dp; dp 212 tools/iio/iio_generic_buffer.c dp = opendir(scanelemdir); dp 213 tools/iio/iio_generic_buffer.c if (!dp) { dp 220 tools/iio/iio_generic_buffer.c while (ent = readdir(dp), ent) { dp 233 tools/iio/iio_generic_buffer.c if (closedir(dp) == -1) { dp 92 tools/iio/iio_utils.c DIR *dp; dp 113 tools/iio/iio_utils.c dp = opendir(scan_el_dir); dp 114 tools/iio/iio_utils.c if (!dp) { dp 120 tools/iio/iio_utils.c while (ent = readdir(dp), ent) dp 193 tools/iio/iio_utils.c if (closedir(dp) == -1) dp 222 tools/iio/iio_utils.c DIR *dp; dp 238 tools/iio/iio_utils.c dp = opendir(device_dir); dp 239 tools/iio/iio_utils.c if (!dp) { dp 245 tools/iio/iio_utils.c while (ent = readdir(dp), ent) dp 272 tools/iio/iio_utils.c if (closedir(dp) == -1) dp 314 tools/iio/iio_utils.c DIR *dp; dp 328 tools/iio/iio_utils.c dp = opendir(scan_el_dir); dp 329 tools/iio/iio_utils.c if (!dp) { dp 334 tools/iio/iio_utils.c while (ent = readdir(dp), ent) dp 378 tools/iio/iio_utils.c seekdir(dp, 0); dp 379 tools/iio/iio_utils.c while (ent = readdir(dp), ent) { dp 512 tools/iio/iio_utils.c if (closedir(dp) == -1) { dp 532 tools/iio/iio_utils.c if (dp) dp 533 tools/iio/iio_utils.c if (closedir(dp) == -1) dp 569 tools/iio/iio_utils.c DIR *dp; dp 573 tools/iio/iio_utils.c dp = opendir(iio_dir); dp 574 tools/iio/iio_utils.c if (!dp) { dp 579 tools/iio/iio_utils.c while (ent = readdir(dp), ent) { dp 635 tools/iio/iio_utils.c if (closedir(dp) == -1) dp 643 tools/iio/iio_utils.c if (closedir(dp) == -1) dp 649 tools/iio/iio_utils.c if (closedir(dp) == -1) dp 42 tools/iio/lsiio.c DIR *dp; dp 45 tools/iio/lsiio.c dp = opendir(dev_dir_name); dp 46 tools/iio/lsiio.c if (!dp) dp 49 tools/iio/lsiio.c while (ent = readdir(dp), ent) dp 55 tools/iio/lsiio.c return (closedir(dp) == -1) ? -errno : 0; dp 105 tools/iio/lsiio.c DIR *dp; dp 107 tools/iio/lsiio.c dp = opendir(iio_dir); dp 108 tools/iio/lsiio.c if (!dp) { dp 113 tools/iio/lsiio.c while (ent = readdir(dp), ent) { dp 134 tools/iio/lsiio.c rewinddir(dp); dp 135 tools/iio/lsiio.c while (ent = readdir(dp), ent) { dp 155 tools/iio/lsiio.c return (closedir(dp) == -1) ? -errno : 0; dp 158 tools/iio/lsiio.c if (closedir(dp) == -1) dp 132 tools/testing/selftests/gpio/gpio-mockup-chardev.c DIR *dp; dp 142 tools/testing/selftests/gpio/gpio-mockup-chardev.c dp = opendir("/dev"); dp 143 tools/testing/selftests/gpio/gpio-mockup-chardev.c if (!dp) { dp 150 tools/testing/selftests/gpio/gpio-mockup-chardev.c while (ent = readdir(dp), ent) { dp 188 tools/testing/selftests/gpio/gpio-mockup-chardev.c closedir(dp); dp 43 tools/testing/selftests/powerpc/dscr/dscr_sysfs_test.c struct dirent *dp; dp 52 tools/testing/selftests/powerpc/dscr/dscr_sysfs_test.c while ((dp = readdir(sysfs))) { dp 55 tools/testing/selftests/powerpc/dscr/dscr_sysfs_test.c if (!(dp->d_type & DT_DIR)) dp 57 tools/testing/selftests/powerpc/dscr/dscr_sysfs_test.c if (!strcmp(dp->d_name, "cpuidle")) dp 59 tools/testing/selftests/powerpc/dscr/dscr_sysfs_test.c if (!strstr(dp->d_name, "cpu")) dp 62 tools/testing/selftests/powerpc/dscr/dscr_sysfs_test.c len = snprintf(file, LEN_MAX, "%s%s/dscr", CPU_PATH, dp->d_name);