target 127 arch/alpha/include/asm/core_wildfire.h wildfire_64 target; target 176 arch/alpha/include/asm/core_wildfire.h wildfire_64 target; target 905 arch/alpha/kernel/core_cia.c const char *master, *target; target 910 arch/alpha/kernel/core_cia.c target = target_st_desc[(cia->pci_err0 >> 20) & 0xF]; target 923 arch/alpha/kernel/core_cia.c master, target); target 521 arch/alpha/kernel/core_wildfire.c pca->pca_int[i].target.csr, target 596 arch/alpha/kernel/core_wildfire.c i, iop->iop_dev_int[i].target.csr); target 78 arch/alpha/kernel/sys_wildfire.c target0 = (unsigned long *) &pca->pca_int[0].target; target 79 arch/alpha/kernel/sys_wildfire.c target1 = (unsigned long *) &pca->pca_int[1].target; target 80 arch/alpha/kernel/sys_wildfire.c target2 = (unsigned long *) &pca->pca_int[2].target; target 81 arch/alpha/kernel/sys_wildfire.c target3 = (unsigned long *) &pca->pca_int[3].target; target 81 arch/arc/include/asm/disasm.h int target; target 108 arch/arc/include/asm/disasm.h *cregs, unsigned long *fall_thru, unsigned long *target); target 77 arch/arc/kernel/disasm.c state->target = fieldA + (addr & ~0x3); target 97 arch/arc/kernel/disasm.c state->target = fieldA + (addr & ~0x3); target 183 arch/arc/kernel/disasm.c state->target = fieldC; target 187 arch/arc/kernel/disasm.c state->target = get_reg(fieldC, regs, cregs); target 203 arch/arc/kernel/disasm.c state->target = fieldC; target 304 arch/arc/kernel/disasm.c state->target = get_reg(FIELD_S_B(state->words[0]), target 318 arch/arc/kernel/disasm.c state->target = get_reg(31, regs, cregs); target 402 arch/arc/kernel/disasm.c state->target = FIELD_S_s8(state->words[0]) + (addr & ~0x03); target 411 arch/arc/kernel/disasm.c state->target = fieldA + (addr & ~0x03); target 417 arch/arc/kernel/disasm.c state->target = FIELD_S_s13(state->words[0]) + (addr & ~0x03); target 513 arch/arc/kernel/disasm.c *tgt_if_br = instr.target; target 19 arch/arc/kernel/ptrace.c static int genregs_get(struct task_struct *target, target 24 arch/arc/kernel/ptrace.c const struct pt_regs *ptregs = task_pt_regs(target); target 25 arch/arc/kernel/ptrace.c const struct callee_regs *cregs = task_callee_regs(target); target 88 arch/arc/kernel/ptrace.c REG_O_ONE(efa, &target->thread.fault_address); target 92 arch/arc/kernel/ptrace.c stop_pc_val = target->thread.fault_address; target 105 arch/arc/kernel/ptrace.c static int genregs_set(struct task_struct *target, target 110 arch/arc/kernel/ptrace.c const struct pt_regs *ptregs = task_pt_regs(target); target 111 arch/arc/kernel/ptrace.c const struct callee_regs *cregs = task_callee_regs(target); target 185 arch/arc/kernel/ptrace.c static int arcv2regs_get(struct task_struct *target, target 190 arch/arc/kernel/ptrace.c const struct pt_regs *regs = task_pt_regs(target); target 208 arch/arc/kernel/ptrace.c static int arcv2regs_set(struct task_struct *target, target 213 arch/arc/kernel/ptrace.c const struct pt_regs *regs = task_pt_regs(target); target 44 arch/arm/include/asm/jump_label.h jump_label_t target; target 13 arch/arm/include/asm/kvm_coproc.h unsigned target; target 165 arch/arm/include/asm/kvm_host.h int target; /* Processor target */ target 105 arch/arm/include/uapi/asm/kvm.h __u32 target; target 642 arch/arm/kernel/hw_breakpoint.c if (!bp->hw.target) target 15 arch/arm/kernel/jump_label.c insn = arm_gen_branch(entry->code, entry->target); target 571 arch/arm/kernel/ptrace.c static int gpr_get(struct task_struct *target, target 576 arch/arm/kernel/ptrace.c struct pt_regs *regs = task_pt_regs(target); target 583 arch/arm/kernel/ptrace.c static int gpr_set(struct task_struct *target, target 589 arch/arm/kernel/ptrace.c struct pt_regs newregs = *task_pt_regs(target); target 600 arch/arm/kernel/ptrace.c *task_pt_regs(target) = newregs; target 604 arch/arm/kernel/ptrace.c static int fpa_get(struct task_struct *target, target 610 arch/arm/kernel/ptrace.c &task_thread_info(target)->fpstate, target 614 arch/arm/kernel/ptrace.c static int fpa_set(struct task_struct *target, target 619 arch/arm/kernel/ptrace.c struct thread_info *thread = task_thread_info(target); target 651 arch/arm/kernel/ptrace.c static int vfp_get(struct task_struct *target, target 657 arch/arm/kernel/ptrace.c struct thread_info *thread = task_thread_info(target); target 688 arch/arm/kernel/ptrace.c static int vfp_set(struct task_struct *target, target 694 arch/arm/kernel/ptrace.c struct thread_info *thread = task_thread_info(target); target 524 arch/arm/kernel/smp.c static void smp_cross_call(const struct cpumask *target, unsigned int ipinr) target 526 arch/arm/kernel/smp.c trace_ipi_raise_rcuidle(target, ipi_types[ipinr]); target 527 arch/arm/kernel/smp.c __smp_cross_call(target, ipinr); target 531 arch/arm/kvm/coproc.c target_tables[table->target] = table; target 535 arch/arm/kvm/coproc.c static const struct coproc_reg *get_target_table(unsigned target, size_t *num) target 539 arch/arm/kvm/coproc.c table = target_tables[target]; target 581 arch/arm/kvm/coproc.c table = get_target_table(vcpu->arch.target, &num); target 780 arch/arm/kvm/coproc.c table = get_target_table(vcpu->arch.target, &num); target 1335 arch/arm/kvm/coproc.c i1 = get_target_table(vcpu->arch.target, &num); target 1449 arch/arm/kvm/coproc.c table = get_target_table(vcpu->arch.target, &num); target 29 arch/arm/kvm/coproc_a15.c .target = KVM_ARM_TARGET_CORTEX_A15, target 32 arch/arm/kvm/coproc_a7.c .target = KVM_ARM_TARGET_CORTEX_A7, target 289 arch/arm/kvm/guest.c int target = kvm_target_cpu(); target 291 arch/arm/kvm/guest.c if (target < 0) target 302 arch/arm/kvm/guest.c init->target = (__u32)target; target 45 arch/arm/kvm/reset.c switch (vcpu->arch.target) { target 217 arch/arm/mach-imx/mmdc.c int target; target 222 arch/arm/mach-imx/mmdc.c target = cpumask_any_but(cpu_online_mask, cpu); target 223 arch/arm/mach-imx/mmdc.c if (target >= nr_cpu_ids) target 226 arch/arm/mach-imx/mmdc.c perf_pmu_migrate_context(&pmu_mmdc->pmu, cpu, target); target 227 arch/arm/mach-imx/mmdc.c cpumask_set_cpu(target, &pmu_mmdc->cpu); target 426 arch/arm/mm/cache-l2x0-pmu.c unsigned int target; target 431 arch/arm/mm/cache-l2x0-pmu.c target = cpumask_any_but(cpu_online_mask, cpu); target 432 arch/arm/mm/cache-l2x0-pmu.c if (target >= nr_cpu_ids) target 435 arch/arm/mm/cache-l2x0-pmu.c perf_pmu_migrate_context(l2x0_pmu, cpu, target); target 436 arch/arm/mm/cache-l2x0-pmu.c cpumask_set_cpu(target, &pmu_cpu); target 196 arch/arm/net/bpf_jit_32.c u32 *target; target 224 arch/arm/net/bpf_jit_32.c if (ctx->target != NULL) target 225 arch/arm/net/bpf_jit_32.c ctx->target[ctx->idx] = inst; target 360 arch/arm/net/bpf_jit_32.c if (ctx->target == NULL) { target 380 arch/arm/net/bpf_jit_32.c ctx->target[offset / 4] = k; target 403 arch/arm/net/bpf_jit_32.c if (ctx->target == NULL) target 457 arch/arm/net/bpf_jit_32.c if (ctx->target == NULL) target 1837 arch/arm/net/bpf_jit_32.c if (ctx->target == NULL) target 1842 arch/arm/net/bpf_jit_32.c if (ctx->target == NULL) target 1857 arch/arm/net/bpf_jit_32.c if (ctx->target[i] == __opcode_to_mem_arm(ARM_INST_UDF)) target 1976 arch/arm/net/bpf_jit_32.c ctx.target = (u32 *) image_ptr; target 1999 arch/arm/net/bpf_jit_32.c flush_icache_range((u32)header, (u32)(ctx.target + ctx.idx)); target 2003 arch/arm/net/bpf_jit_32.c bpf_jit_dump(prog->len, image_size, 2, ctx.target); target 2006 arch/arm/net/bpf_jit_32.c prog->bpf_func = (void *)ctx.target; target 39 arch/arm/plat-orion/include/plat/addr-map.h const u8 target; target 49 arch/arm/plat-orion/include/plat/addr-map.h const u32 size, const u8 target, target 52 arch/arm64/include/asm/fpsimd.h extern void fpsimd_flush_task_state(struct task_struct *target); target 455 arch/arm64/include/asm/insn.h enum aarch64_insn_prfm_target target, target 27 arch/arm64/include/asm/kvm_coproc.h void kvm_register_target_sys_reg_table(unsigned int target, target 326 arch/arm64/include/asm/kvm_host.h int target; target 111 arch/arm64/include/uapi/asm/kvm.h __u32 target; target 72 arch/arm64/kernel/alternative.c unsigned long target; target 74 arch/arm64/kernel/alternative.c target = (unsigned long)altinsnptr + offset; target 81 arch/arm64/kernel/alternative.c if (branch_insn_requires_update(alt, target)) { target 82 arch/arm64/kernel/alternative.c offset = target - (unsigned long)insnptr; target 87 arch/arm64/kernel/alternative.c unsigned long target; target 95 arch/arm64/kernel/alternative.c target = align_down(altinsnptr, SZ_4K) + orig_offset; target 96 arch/arm64/kernel/alternative.c new_offset = target - align_down(insnptr, SZ_4K); target 162 arch/arm64/kernel/hw_breakpoint.c struct task_struct *tsk = bp->hw.target; target 569 arch/arm64/kernel/hw_breakpoint.c if (hw->ctrl.privilege == AARCH64_BREAKPOINT_EL1 && bp->hw.target) target 767 arch/arm64/kernel/insn.c enum aarch64_insn_prfm_target target, target 787 arch/arm64/kernel/insn.c switch (target) { target 797 arch/arm64/kernel/insn.c pr_err("%s: unknown prfm target encoding %d\n", __func__, target); target 821 arch/arm64/kernel/insn.c enum aarch64_insn_prfm_target target, target 828 arch/arm64/kernel/insn.c insn = aarch64_insn_encode_prfm_imm(type, target, policy, insn); target 476 arch/arm64/kernel/ptrace.c static int hw_break_get(struct task_struct *target, target 507 arch/arm64/kernel/ptrace.c ret = ptrace_hbp_get_addr(note_type, target, idx, &addr); target 516 arch/arm64/kernel/ptrace.c ret = ptrace_hbp_get_ctrl(note_type, target, idx, &ctrl); target 537 arch/arm64/kernel/ptrace.c static int hw_break_set(struct task_struct *target, target 562 arch/arm64/kernel/ptrace.c ret = ptrace_hbp_set_addr(note_type, target, idx, addr); target 573 arch/arm64/kernel/ptrace.c ret = ptrace_hbp_set_ctrl(note_type, target, idx, ctrl); target 591 arch/arm64/kernel/ptrace.c static int gpr_get(struct task_struct *target, target 596 arch/arm64/kernel/ptrace.c struct user_pt_regs *uregs = &task_pt_regs(target)->user_regs; target 600 arch/arm64/kernel/ptrace.c static int gpr_set(struct task_struct *target, const struct user_regset *regset, target 605 arch/arm64/kernel/ptrace.c struct user_pt_regs newregs = task_pt_regs(target)->user_regs; target 611 arch/arm64/kernel/ptrace.c if (!valid_user_regs(&newregs, target)) target 614 arch/arm64/kernel/ptrace.c task_pt_regs(target)->user_regs = newregs; target 618 arch/arm64/kernel/ptrace.c static int fpr_active(struct task_struct *target, const struct user_regset *regset) target 628 arch/arm64/kernel/ptrace.c static int __fpr_get(struct task_struct *target, target 635 arch/arm64/kernel/ptrace.c sve_sync_to_fpsimd(target); target 637 arch/arm64/kernel/ptrace.c uregs = &target->thread.uw.fpsimd_state; target 643 arch/arm64/kernel/ptrace.c static int fpr_get(struct task_struct *target, const struct user_regset *regset, target 650 arch/arm64/kernel/ptrace.c if (target == current) target 653 arch/arm64/kernel/ptrace.c return __fpr_get(target, regset, pos, count, kbuf, ubuf, 0); target 656 arch/arm64/kernel/ptrace.c static int __fpr_set(struct task_struct *target, target 669 arch/arm64/kernel/ptrace.c sve_sync_to_fpsimd(target); target 671 arch/arm64/kernel/ptrace.c newstate = target->thread.uw.fpsimd_state; target 678 arch/arm64/kernel/ptrace.c target->thread.uw.fpsimd_state = newstate; target 683 arch/arm64/kernel/ptrace.c static int fpr_set(struct task_struct *target, const struct user_regset *regset, target 692 arch/arm64/kernel/ptrace.c ret = __fpr_set(target, regset, pos, count, kbuf, ubuf, 0); target 696 arch/arm64/kernel/ptrace.c sve_sync_from_fpsimd_zeropad(target); target 697 arch/arm64/kernel/ptrace.c fpsimd_flush_task_state(target); target 702 arch/arm64/kernel/ptrace.c static int tls_get(struct task_struct *target, const struct user_regset *regset, target 706 arch/arm64/kernel/ptrace.c unsigned long *tls = &target->thread.uw.tp_value; target 708 arch/arm64/kernel/ptrace.c if (target == current) target 714 arch/arm64/kernel/ptrace.c static int tls_set(struct task_struct *target, const struct user_regset *regset, target 719 arch/arm64/kernel/ptrace.c unsigned long tls = target->thread.uw.tp_value; target 725 arch/arm64/kernel/ptrace.c target->thread.uw.tp_value = tls; target 729 arch/arm64/kernel/ptrace.c static int system_call_get(struct task_struct *target, target 734 arch/arm64/kernel/ptrace.c int syscallno = task_pt_regs(target)->syscallno; target 740 arch/arm64/kernel/ptrace.c static int system_call_set(struct task_struct *target, target 745 arch/arm64/kernel/ptrace.c int syscallno = task_pt_regs(target)->syscallno; target 752 arch/arm64/kernel/ptrace.c task_pt_regs(target)->syscallno = syscallno; target 759 arch/arm64/kernel/ptrace.c struct task_struct *target) target 765 arch/arm64/kernel/ptrace.c header->flags = test_tsk_thread_flag(target, TIF_SVE) ? target 767 arch/arm64/kernel/ptrace.c if (test_tsk_thread_flag(target, TIF_SVE_VL_INHERIT)) target 770 arch/arm64/kernel/ptrace.c header->vl = target->thread.sve_vl; target 784 arch/arm64/kernel/ptrace.c static unsigned int sve_get_size(struct task_struct *target, target 792 arch/arm64/kernel/ptrace.c sve_init_header_from_task(&header, target); target 796 arch/arm64/kernel/ptrace.c static int sve_get(struct task_struct *target, target 810 arch/arm64/kernel/ptrace.c sve_init_header_from_task(&header, target); target 818 arch/arm64/kernel/ptrace.c if (target == current) target 825 arch/arm64/kernel/ptrace.c return __fpr_get(target, regset, pos, count, kbuf, ubuf, target 834 arch/arm64/kernel/ptrace.c target->thread.sve_state, target 853 arch/arm64/kernel/ptrace.c &target->thread.uw.fpsimd_state.fpsr, target 864 arch/arm64/kernel/ptrace.c static int sve_set(struct task_struct *target, target 889 arch/arm64/kernel/ptrace.c ret = sve_set_vector_length(target, header.vl, target 895 arch/arm64/kernel/ptrace.c vq = sve_vq_from_vl(target->thread.sve_vl); target 901 arch/arm64/kernel/ptrace.c ret = __fpr_set(target, regset, pos, count, kbuf, ubuf, target 903 arch/arm64/kernel/ptrace.c clear_tsk_thread_flag(target, TIF_SVE); target 919 arch/arm64/kernel/ptrace.c sve_alloc(target); target 926 arch/arm64/kernel/ptrace.c fpsimd_sync_to_sve(target); target 927 arch/arm64/kernel/ptrace.c set_tsk_thread_flag(target, TIF_SVE); target 933 arch/arm64/kernel/ptrace.c target->thread.sve_state, target 952 arch/arm64/kernel/ptrace.c &target->thread.uw.fpsimd_state.fpsr, target 956 arch/arm64/kernel/ptrace.c fpsimd_flush_task_state(target); target 963 arch/arm64/kernel/ptrace.c static int pac_mask_get(struct task_struct *target, target 1019 arch/arm64/kernel/ptrace.c static int pac_address_keys_get(struct task_struct *target, target 1024 arch/arm64/kernel/ptrace.c struct ptrauth_keys *keys = &target->thread.keys_user; target 1036 arch/arm64/kernel/ptrace.c static int pac_address_keys_set(struct task_struct *target, target 1041 arch/arm64/kernel/ptrace.c struct ptrauth_keys *keys = &target->thread.keys_user; target 1070 arch/arm64/kernel/ptrace.c static int pac_generic_keys_get(struct task_struct *target, target 1075 arch/arm64/kernel/ptrace.c struct ptrauth_keys *keys = &target->thread.keys_user; target 1087 arch/arm64/kernel/ptrace.c static int pac_generic_keys_set(struct task_struct *target, target 1092 arch/arm64/kernel/ptrace.c struct ptrauth_keys *keys = &target->thread.keys_user; target 1241 arch/arm64/kernel/ptrace.c static int compat_gpr_get(struct task_struct *target, target 1264 arch/arm64/kernel/ptrace.c reg = task_pt_regs(target)->pc; target 1267 arch/arm64/kernel/ptrace.c reg = task_pt_regs(target)->pstate; target 1271 arch/arm64/kernel/ptrace.c reg = task_pt_regs(target)->orig_x0; target 1274 arch/arm64/kernel/ptrace.c reg = task_pt_regs(target)->regs[idx]; target 1294 arch/arm64/kernel/ptrace.c static int compat_gpr_set(struct task_struct *target, target 1312 arch/arm64/kernel/ptrace.c newregs = *task_pt_regs(target); target 1348 arch/arm64/kernel/ptrace.c if (valid_user_regs(&newregs.user_regs, target)) target 1349 arch/arm64/kernel/ptrace.c *task_pt_regs(target) = newregs; target 1356 arch/arm64/kernel/ptrace.c static int compat_vfp_get(struct task_struct *target, target 1368 arch/arm64/kernel/ptrace.c uregs = &target->thread.uw.fpsimd_state; target 1370 arch/arm64/kernel/ptrace.c if (target == current) target 1392 arch/arm64/kernel/ptrace.c static int compat_vfp_set(struct task_struct *target, target 1404 arch/arm64/kernel/ptrace.c uregs = &target->thread.uw.fpsimd_state; target 1419 arch/arm64/kernel/ptrace.c fpsimd_flush_task_state(target); target 1423 arch/arm64/kernel/ptrace.c static int compat_tls_get(struct task_struct *target, target 1427 arch/arm64/kernel/ptrace.c compat_ulong_t tls = (compat_ulong_t)target->thread.uw.tp_value; target 1431 arch/arm64/kernel/ptrace.c static int compat_tls_set(struct task_struct *target, target 1437 arch/arm64/kernel/ptrace.c compat_ulong_t tls = target->thread.uw.tp_value; target 1443 arch/arm64/kernel/ptrace.c target->thread.uw.tp_value = tls; target 774 arch/arm64/kernel/smp.c static void smp_cross_call(const struct cpumask *target, unsigned int ipinr) target 776 arch/arm64/kernel/smp.c trace_ipi_raise(target, ipi_types[ipinr]); target 777 arch/arm64/kernel/smp.c __smp_cross_call(target, ipinr); target 778 arch/arm64/kvm/guest.c int target = kvm_target_cpu(); target 780 arch/arm64/kvm/guest.c if (target < 0) target 791 arch/arm64/kvm/guest.c init->target = (__u32)target; target 288 arch/arm64/kvm/reset.c switch (vcpu->arch.target) { target 1987 arch/arm64/kvm/sys_regs.c void kvm_register_target_sys_reg_table(unsigned int target, target 1990 arch/arm64/kvm/sys_regs.c target_tables[target] = table; target 1994 arch/arm64/kvm/sys_regs.c static const struct sys_reg_desc *get_target_table(unsigned target, target 2000 arch/arm64/kvm/sys_regs.c table = target_tables[target]; target 2211 arch/arm64/kvm/sys_regs.c target_specific = get_target_table(vcpu->arch.target, false, &num); target 2222 arch/arm64/kvm/sys_regs.c target_specific = get_target_table(vcpu->arch.target, false, &num); target 2248 arch/arm64/kvm/sys_regs.c table = get_target_table(vcpu->arch.target, true, &num); target 2372 arch/arm64/kvm/sys_regs.c table = get_target_table(vcpu->arch.target, true, &num); target 2681 arch/arm64/kvm/sys_regs.c i1 = get_target_table(vcpu->arch.target, true, &num); target 2801 arch/arm64/kvm/sys_regs.c table = get_target_table(vcpu->arch.target, true, &num); target 58 arch/c6x/kernel/ptrace.c static int gpr_get(struct task_struct *target, target 63 arch/c6x/kernel/ptrace.c struct pt_regs *regs = task_pt_regs(target); target 89 arch/csky/kernel/ftrace.c static int ftrace_modify_code(unsigned long hook, unsigned long target, target 97 arch/csky/kernel/ftrace.c make_jbsr(target, hook, call, nolr); target 72 arch/csky/kernel/ptrace.c static int gpr_get(struct task_struct *target, target 79 arch/csky/kernel/ptrace.c regs = task_pt_regs(target); target 82 arch/csky/kernel/ptrace.c regs->tls = task_thread_info(target)->tp_value; target 87 arch/csky/kernel/ptrace.c static int gpr_set(struct task_struct *target, target 99 arch/csky/kernel/ptrace.c regs.sr = task_pt_regs(target)->sr; target 101 arch/csky/kernel/ptrace.c regs.dcsr = task_pt_regs(target)->dcsr; target 103 arch/csky/kernel/ptrace.c task_thread_info(target)->tp_value = regs.tls; target 105 arch/csky/kernel/ptrace.c *task_pt_regs(target) = regs; target 110 arch/csky/kernel/ptrace.c static int fpr_get(struct task_struct *target, target 115 arch/csky/kernel/ptrace.c struct user_fp *regs = (struct user_fp *)&target->thread.user_fp; target 135 arch/csky/kernel/ptrace.c static int fpr_set(struct task_struct *target, target 141 arch/csky/kernel/ptrace.c struct user_fp *regs = (struct user_fp *)&target->thread.user_fp; target 88 arch/h8300/kernel/ptrace.c static int regs_get(struct task_struct *target, target 100 arch/h8300/kernel/ptrace.c *reg++ = h8300_get_reg(target, r); target 106 arch/h8300/kernel/ptrace.c static int regs_set(struct task_struct *target, target 119 arch/h8300/kernel/ptrace.c *reg++ = h8300_get_reg(target, r); target 128 arch/h8300/kernel/ptrace.c h8300_put_reg(target, r, *reg++); target 36 arch/hexagon/kernel/ptrace.c static int genregs_get(struct task_struct *target, target 43 arch/hexagon/kernel/ptrace.c struct pt_regs *regs = task_pt_regs(target); target 92 arch/hexagon/kernel/ptrace.c static int genregs_set(struct task_struct *target, target 99 arch/hexagon/kernel/ptrace.c struct pt_regs *regs = task_pt_regs(target); target 1266 arch/ia64/kernel/ptrace.c struct task_struct *target; target 1278 arch/ia64/kernel/ptrace.c access_elf_gpreg(struct task_struct *target, struct unw_frame_info *info, target 1286 arch/ia64/kernel/ptrace.c pt = task_pt_regs(target); target 1326 arch/ia64/kernel/ptrace.c access_elf_breg(struct task_struct *target, struct unw_frame_info *info, target 1332 arch/ia64/kernel/ptrace.c pt = task_pt_regs(target); target 1354 arch/ia64/kernel/ptrace.c access_elf_areg(struct task_struct *target, struct unw_frame_info *info, target 1361 arch/ia64/kernel/ptrace.c pt = task_pt_regs(target); target 1396 arch/ia64/kernel/ptrace.c urbs_end = ia64_get_user_rbs_end(target, pt, &cfm); target 1400 arch/ia64/kernel/ptrace.c convert_to_non_syscall(target, target 1449 arch/ia64/kernel/ptrace.c urbs_end = ia64_get_user_rbs_end(target, pt, &cfm); target 1453 arch/ia64/kernel/ptrace.c convert_to_non_syscall(target, target 1475 arch/ia64/kernel/ptrace.c return access_nat_bits(target, pt, info, target 1491 arch/ia64/kernel/ptrace.c access_elf_reg(struct task_struct *target, struct unw_frame_info *info, target 1495 arch/ia64/kernel/ptrace.c return access_elf_gpreg(target, info, addr, data, write_access); target 1497 arch/ia64/kernel/ptrace.c return access_elf_breg(target, info, addr, data, write_access); target 1499 arch/ia64/kernel/ptrace.c return access_elf_areg(target, info, addr, data, write_access); target 1541 arch/ia64/kernel/ptrace.c if (access_elf_reg(dst->target, info, i, target 1555 arch/ia64/kernel/ptrace.c pt = task_pt_regs(dst->target); target 1570 arch/ia64/kernel/ptrace.c if (access_elf_reg(dst->target, info, i, target 1591 arch/ia64/kernel/ptrace.c if (access_elf_reg(dst->target, info, i, target 1632 arch/ia64/kernel/ptrace.c if (access_elf_reg(dst->target, info, i, target 1643 arch/ia64/kernel/ptrace.c pt = task_pt_regs(dst->target); target 1661 arch/ia64/kernel/ptrace.c if (access_elf_reg(dst->target, info, i, target 1682 arch/ia64/kernel/ptrace.c if (access_elf_reg(dst->target, info, i, target 1695 arch/ia64/kernel/ptrace.c struct task_struct *task = dst->target; target 1734 arch/ia64/kernel/ptrace.c ia64_flush_fph(dst->target); target 1739 arch/ia64/kernel/ptrace.c &dst->target->thread.fph, target 1814 arch/ia64/kernel/ptrace.c ia64_sync_fph(dst->target); target 1818 arch/ia64/kernel/ptrace.c &dst->target->thread.fph, target 1825 arch/ia64/kernel/ptrace.c struct task_struct *target, target 1830 arch/ia64/kernel/ptrace.c struct regset_getset info = { .target = target, .regset = regset, target 1835 arch/ia64/kernel/ptrace.c if (target == current) target 1840 arch/ia64/kernel/ptrace.c unw_init_from_blocked_task(&ufi, target); target 1848 arch/ia64/kernel/ptrace.c gpregs_get(struct task_struct *target, target 1853 arch/ia64/kernel/ptrace.c return do_regset_call(do_gpregs_get, target, regset, pos, count, target 1857 arch/ia64/kernel/ptrace.c static int gpregs_set(struct task_struct *target, target 1862 arch/ia64/kernel/ptrace.c return do_regset_call(do_gpregs_set, target, regset, pos, count, target 1877 arch/ia64/kernel/ptrace.c gpregs_writeback(struct task_struct *target, target 1881 arch/ia64/kernel/ptrace.c if (test_and_set_tsk_thread_flag(target, TIF_RESTORE_RSE)) target 1883 arch/ia64/kernel/ptrace.c set_notify_resume(target); target 1884 arch/ia64/kernel/ptrace.c return do_regset_call(do_gpregs_writeback, target, regset, 0, 0, target 1889 arch/ia64/kernel/ptrace.c fpregs_active(struct task_struct *target, const struct user_regset *regset) target 1891 arch/ia64/kernel/ptrace.c return (target->thread.flags & IA64_THREAD_FPH_VALID) ? 128 : 32; target 1894 arch/ia64/kernel/ptrace.c static int fpregs_get(struct task_struct *target, target 1899 arch/ia64/kernel/ptrace.c return do_regset_call(do_fpregs_get, target, regset, pos, count, target 1903 arch/ia64/kernel/ptrace.c static int fpregs_set(struct task_struct *target, target 1908 arch/ia64/kernel/ptrace.c return do_regset_call(do_fpregs_set, target, regset, pos, count, target 98 arch/mips/ar7/clock.c static void approximate(int base, int target, int *prediv, target 101 arch/mips/ar7/clock.c int i, j, k, freq, res = target; target 105 arch/mips/ar7/clock.c freq = abs(base / j * i / k - target); target 115 arch/mips/ar7/clock.c static void calculate(int base, int target, int *prediv, int *postdiv, target 122 arch/mips/ar7/clock.c tmp_gcd = gcd(target, tmp_base); target 123 arch/mips/ar7/clock.c *mul = target / tmp_gcd; target 131 arch/mips/ar7/clock.c if (base / *prediv * *mul / *postdiv != target) { target 132 arch/mips/ar7/clock.c approximate(base, target, prediv, postdiv, mul); target 136 arch/mips/ar7/clock.c target, tmp_freq); target 206 arch/mips/include/asm/fpu.h static inline bool init_fp_ctx(struct task_struct *target) target 209 arch/mips/include/asm/fpu.h if (tsk_used_math(target)) target 213 arch/mips/include/asm/fpu.h memset(&target->thread.fpu.fpr, ~0, sizeof(target->thread.fpu.fpr)); target 222 arch/mips/include/asm/fpu.h set_stopped_child_used_math(target); target 299 arch/mips/include/asm/fpu.h static inline bool init_fp_ctx(struct task_struct *target) target 70 arch/mips/include/asm/jump_label.h jump_label_t target; target 305 arch/mips/include/asm/uasm.h u32 *first, u32 *end, u32 *target); target 628 arch/mips/include/uapi/asm/inst.h __BITFIELD_FIELD(unsigned int target : 26, target 236 arch/mips/kernel/branch.c *contpc |= (insn.j_format.target << 2); target 247 arch/mips/kernel/branch.c *contpc |= (insn.j_format.target << 1); target 569 arch/mips/kernel/branch.c epc |= (insn.j_format.target << 2); target 48 arch/mips/kernel/jump_label.c BUG_ON((e->target & J_ALIGN_MASK) != J_ISA_BIT); target 52 arch/mips/kernel/jump_label.c offset = e->target - ((unsigned long)insn_p + 4); target 63 arch/mips/kernel/jump_label.c insn.j_format.target = offset; target 69 arch/mips/kernel/jump_label.c WARN_ON((e->target & ~J_RANGE_MASK) != target 73 arch/mips/kernel/jump_label.c insn.j_format.target = e->target >> J_RANGE_SHIFT; target 200 arch/mips/kernel/process.c #define J_TARGET(pc,target) \ target 201 arch/mips/kernel/process.c (((unsigned long)(pc) & 0xf0000000) | ((target) << 2)) target 452 arch/mips/kernel/process.c return J_TARGET(ip, ip->j_format.target); target 212 arch/mips/kernel/ptrace.c static int gpr32_get(struct task_struct *target, target 217 arch/mips/kernel/ptrace.c struct pt_regs *regs = task_pt_regs(target); target 225 arch/mips/kernel/ptrace.c static int gpr32_set(struct task_struct *target, target 230 arch/mips/kernel/ptrace.c struct pt_regs *regs = task_pt_regs(target); target 270 arch/mips/kernel/ptrace.c mips_syscall_update_nr(target, regs); target 279 arch/mips/kernel/ptrace.c static int gpr64_get(struct task_struct *target, target 284 arch/mips/kernel/ptrace.c struct pt_regs *regs = task_pt_regs(target); target 292 arch/mips/kernel/ptrace.c static int gpr64_set(struct task_struct *target, target 297 arch/mips/kernel/ptrace.c struct pt_regs *regs = task_pt_regs(target); target 333 arch/mips/kernel/ptrace.c mips_syscall_update_nr(target, regs); target 412 arch/mips/kernel/ptrace.c static int fpr_get_fpa(struct task_struct *target, target 417 arch/mips/kernel/ptrace.c &target->thread.fpu, target 427 arch/mips/kernel/ptrace.c static int fpr_get_msa(struct task_struct *target, target 437 arch/mips/kernel/ptrace.c fpr_val = get_fpr64(&target->thread.fpu.fpr[i], 0); target 453 arch/mips/kernel/ptrace.c static int fpr_get(struct task_struct *target, target 462 arch/mips/kernel/ptrace.c if (sizeof(target->thread.fpu.fpr[0]) == sizeof(elf_fpreg_t)) target 463 arch/mips/kernel/ptrace.c err = fpr_get_fpa(target, &pos, &count, &kbuf, &ubuf); target 465 arch/mips/kernel/ptrace.c err = fpr_get_msa(target, &pos, &count, &kbuf, &ubuf); target 470 arch/mips/kernel/ptrace.c &target->thread.fpu.fcr31, target 487 arch/mips/kernel/ptrace.c static int fpr_set_fpa(struct task_struct *target, target 492 arch/mips/kernel/ptrace.c &target->thread.fpu, target 502 arch/mips/kernel/ptrace.c static int fpr_set_msa(struct task_struct *target, target 517 arch/mips/kernel/ptrace.c set_fpr64(&target->thread.fpu.fpr[i], 0, fpr_val); target 535 arch/mips/kernel/ptrace.c static int fpr_set(struct task_struct *target, target 550 arch/mips/kernel/ptrace.c init_fp_ctx(target); target 552 arch/mips/kernel/ptrace.c if (sizeof(target->thread.fpu.fpr[0]) == sizeof(elf_fpreg_t)) target 553 arch/mips/kernel/ptrace.c err = fpr_set_fpa(target, &pos, &count, &kbuf, &ubuf); target 555 arch/mips/kernel/ptrace.c err = fpr_set_msa(target, &pos, &count, &kbuf, &ubuf); target 566 arch/mips/kernel/ptrace.c ptrace_setfcr31(target, fcr31); target 578 arch/mips/kernel/ptrace.c static int fp_mode_get(struct task_struct *target, target 585 arch/mips/kernel/ptrace.c fp_mode = mips_get_process_fp_mode(target); target 599 arch/mips/kernel/ptrace.c static int fp_mode_set(struct task_struct *target, target 618 arch/mips/kernel/ptrace.c err = mips_set_process_fp_mode(target, fp_mode); target 634 arch/mips/kernel/ptrace.c static int copy_pad_fprs(struct task_struct *target, target 651 arch/mips/kernel/ptrace.c &target->thread.fpu.fpr[i], target 666 arch/mips/kernel/ptrace.c static int msa_get(struct task_struct *target, target 674 arch/mips/kernel/ptrace.c .fcsr = target->thread.fpu.fcr31, target 676 arch/mips/kernel/ptrace.c .msacsr = target->thread.fpu.msacsr, target 680 arch/mips/kernel/ptrace.c if (!tsk_used_math(target)) { target 682 arch/mips/kernel/ptrace.c err = copy_pad_fprs(target, regset, &pos, &count, target 684 arch/mips/kernel/ptrace.c } else if (!test_tsk_thread_flag(target, TIF_MSA_CTX_LIVE)) { target 686 arch/mips/kernel/ptrace.c err = copy_pad_fprs(target, regset, &pos, &count, target 688 arch/mips/kernel/ptrace.c } else if (sizeof(target->thread.fpu.fpr[0]) == regset->size) { target 691 arch/mips/kernel/ptrace.c &target->thread.fpu.fpr, target 695 arch/mips/kernel/ptrace.c err = copy_pad_fprs(target, regset, &pos, &count, target 697 arch/mips/kernel/ptrace.c sizeof(target->thread.fpu.fpr[0])); target 706 arch/mips/kernel/ptrace.c static int msa_set(struct task_struct *target, target 716 arch/mips/kernel/ptrace.c init_fp_ctx(target); target 718 arch/mips/kernel/ptrace.c if (sizeof(target->thread.fpu.fpr[0]) == regset->size) { target 721 arch/mips/kernel/ptrace.c &target->thread.fpu.fpr, target 726 arch/mips/kernel/ptrace.c sizeof(target->thread.fpu.fpr[0])); target 731 arch/mips/kernel/ptrace.c &target->thread.fpu.fpr[i], target 740 arch/mips/kernel/ptrace.c target->thread.fpu.fcr31 = ctrl_regs.fcsr & ~FPU_CSR_ALL_X; target 741 arch/mips/kernel/ptrace.c target->thread.fpu.msacsr = ctrl_regs.msacsr & ~MSA_CSR_CAUSEF; target 754 arch/mips/kernel/ptrace.c static int dsp32_get(struct task_struct *target, target 776 arch/mips/kernel/ptrace.c dspregs[i] = target->thread.dsp.dspr[i]; target 779 arch/mips/kernel/ptrace.c dspregs[i] = target->thread.dsp.dspcontrol; target 789 arch/mips/kernel/ptrace.c static int dsp32_set(struct task_struct *target, target 817 arch/mips/kernel/ptrace.c target->thread.dsp.dspr[i] = (s32)dspregs[i]; target 820 arch/mips/kernel/ptrace.c target->thread.dsp.dspcontrol = (s32)dspregs[i]; target 834 arch/mips/kernel/ptrace.c static int dsp64_get(struct task_struct *target, target 856 arch/mips/kernel/ptrace.c dspregs[i] = target->thread.dsp.dspr[i]; target 859 arch/mips/kernel/ptrace.c dspregs[i] = target->thread.dsp.dspcontrol; target 869 arch/mips/kernel/ptrace.c static int dsp64_set(struct task_struct *target, target 897 arch/mips/kernel/ptrace.c target->thread.dsp.dspr[i] = dspregs[i]; target 900 arch/mips/kernel/ptrace.c target->thread.dsp.dspcontrol = dspregs[i]; target 912 arch/mips/kernel/ptrace.c static int dsp_active(struct task_struct *target, target 120 arch/mips/kernel/relocate.c unsigned long target = (insn & 0xffff) << 16; /* high 16bits of target */ target 122 arch/mips/kernel/relocate.c target += offset; target 124 arch/mips/kernel/relocate.c *loc_new = (insn & ~0xffff) | ((target >> 16) & 0xffff); target 148 arch/mips/kvm/emulate.c epc |= (insn.j_format.target << 2); target 515 arch/mips/math-emu/cp1emu.c *contpc |= (insn.j_format.target << 2); target 533 arch/mips/mm/uasm.c u32 *first, u32 *end, u32 *target) target 535 arch/mips/mm/uasm.c long off = (long)(target - first); target 537 arch/mips/mm/uasm.c memcpy(target, first, (end - first) * sizeof(u32)); target 106 arch/mips/net/ebpf_jit.c u32 *target; target 128 arch/mips/net/ebpf_jit.c if ((ctx)->target != NULL) { \ target 129 arch/mips/net/ebpf_jit.c u32 *p = &(ctx)->target[ctx->idx]; \ target 146 arch/mips/net/ebpf_jit.c if (!ctx->target) target 149 arch/mips/net/ebpf_jit.c base_va = (unsigned long)ctx->target; target 664 arch/mips/net/ebpf_jit.c unsigned int target = 0; target 1106 arch/mips/net/ebpf_jit.c target = j_target(ctx, exit_idx); target 1107 arch/mips/net/ebpf_jit.c if (target == (unsigned int)-1) target 1123 arch/mips/net/ebpf_jit.c emit_instr(ctx, j, target); target 1130 arch/mips/net/ebpf_jit.c target = j_target(ctx, this_idx + insn->off + 1); target 1131 arch/mips/net/ebpf_jit.c if (target == (unsigned int)-1) target 1147 arch/mips/net/ebpf_jit.c emit_instr(ctx, j, target); target 1289 arch/mips/net/ebpf_jit.c target = j_target(ctx, this_idx + insn->off + 1); target 1290 arch/mips/net/ebpf_jit.c if (target == (unsigned int)-1) target 1292 arch/mips/net/ebpf_jit.c emit_instr(ctx, j, target); target 1532 arch/mips/net/ebpf_jit.c if (ctx->target == NULL) target 1541 arch/mips/net/ebpf_jit.c if (ctx->target == NULL) target 1549 arch/mips/net/ebpf_jit.c if (ctx->target == NULL) target 1893 arch/mips/net/ebpf_jit.c ctx.target = (u32 *)image_ptr; target 1905 arch/mips/net/ebpf_jit.c flush_icache_range((unsigned long)ctx.target, target 1906 arch/mips/net/ebpf_jit.c (unsigned long)&ctx.target[ctx.idx]); target 1910 arch/mips/net/ebpf_jit.c bpf_jit_dump(prog->len, image_size, 2, ctx.target); target 1913 arch/mips/net/ebpf_jit.c prog->bpf_func = (void *)ctx.target; target 101 arch/mips/pci/pci-vr41xx.c struct pci_target_address_conversion *target; target 178 arch/mips/pci/pci-vr41xx.c target = setup->target_memory1; target 179 arch/mips/pci/pci-vr41xx.c val = TARGET_MSK(target->address_mask) | target 181 arch/mips/pci/pci-vr41xx.c ITA(target->bus_base_address); target 190 arch/mips/pci/pci-vr41xx.c target = setup->target_memory2; target 191 arch/mips/pci/pci-vr41xx.c val = TARGET_MSK(target->address_mask) | target 193 arch/mips/pci/pci-vr41xx.c ITA(target->bus_base_address); target 14 arch/nds32/kernel/ptrace.c static int gpr_get(struct task_struct *target, target 19 arch/nds32/kernel/ptrace.c struct user_pt_regs *uregs = &task_pt_regs(target)->user_regs; target 23 arch/nds32/kernel/ptrace.c static int gpr_set(struct task_struct *target, const struct user_regset *regset, target 28 arch/nds32/kernel/ptrace.c struct user_pt_regs newregs = task_pt_regs(target)->user_regs; target 34 arch/nds32/kernel/ptrace.c task_pt_regs(target)->user_regs = newregs; target 22 arch/nios2/kernel/ptrace.c static int genregs_get(struct task_struct *target, target 27 arch/nios2/kernel/ptrace.c const struct pt_regs *regs = task_pt_regs(target); target 68 arch/nios2/kernel/ptrace.c static int genregs_set(struct task_struct *target, target 73 arch/nios2/kernel/ptrace.c struct pt_regs *regs = task_pt_regs(target); target 46 arch/openrisc/kernel/ptrace.c static int genregs_get(struct task_struct *target, target 51 arch/openrisc/kernel/ptrace.c const struct pt_regs *regs = task_pt_regs(target); target 76 arch/openrisc/kernel/ptrace.c static int genregs_set(struct task_struct *target, target 81 arch/openrisc/kernel/ptrace.c struct pt_regs *regs = task_pt_regs(target); target 28 arch/parisc/kernel/jump_label.c void *target = (void *)jump_entry_target(entry); target 29 arch/parisc/kernel/jump_label.c int distance = target - addr; target 393 arch/parisc/kernel/ptrace.c static int fpr_get(struct task_struct *target, target 398 arch/parisc/kernel/ptrace.c struct pt_regs *regs = task_regs(target); target 422 arch/parisc/kernel/ptrace.c static int fpr_set(struct task_struct *target, target 427 arch/parisc/kernel/ptrace.c struct pt_regs *regs = task_regs(target); target 529 arch/parisc/kernel/ptrace.c static int gpr_get(struct task_struct *target, target 534 arch/parisc/kernel/ptrace.c struct pt_regs *regs = task_regs(target); target 557 arch/parisc/kernel/ptrace.c static int gpr_set(struct task_struct *target, target 562 arch/parisc/kernel/ptrace.c struct pt_regs *regs = task_regs(target); target 609 arch/parisc/kernel/ptrace.c static int gpr32_get(struct task_struct *target, target 614 arch/parisc/kernel/ptrace.c struct pt_regs *regs = task_regs(target); target 638 arch/parisc/kernel/ptrace.c static int gpr32_set(struct task_struct *target, target 643 arch/parisc/kernel/ptrace.c struct pt_regs *regs = task_regs(target); target 94 arch/parisc/math-emu/decode_exc.c int target, exception_index = 1; target 210 arch/parisc/math-emu/decode_exc.c target = current_ir & fivebits; target 223 arch/parisc/math-emu/decode_exc.c Sgl_decrement(Fpu_sgl(target)); target 226 arch/parisc/math-emu/decode_exc.c sgl_denormalize(&Fpu_sgl(target),&inexact,Rounding_mode()); target 235 arch/parisc/math-emu/decode_exc.c Dbl_decrement(Fpu_dblp1(target),Fpu_dblp2(target)); target 238 arch/parisc/math-emu/decode_exc.c dbl_denormalize(&Fpu_dblp1(target),&Fpu_dblp2(target), target 288 arch/parisc/math-emu/decode_exc.c target = current_ir & fivebits; target 291 arch/parisc/math-emu/decode_exc.c Sgl_setoverflow(Fpu_sgl(target)); target 294 arch/parisc/math-emu/decode_exc.c Dbl_setoverflow(Fpu_dblp1(target),Fpu_dblp2(target)); target 26 arch/powerpc/include/asm/code-patching.h unsigned long target, int flags); target 28 arch/powerpc/include/asm/code-patching.h unsigned long target, int flags); target 29 arch/powerpc/include/asm/code-patching.h int patch_branch(unsigned int *addr, unsigned long target, int flags); target 43 arch/powerpc/include/asm/code-patching.h static inline int patch_branch_site(s32 *site, unsigned long target, int flags) target 45 arch/powerpc/include/asm/code-patching.h return patch_branch((unsigned int *)patch_site_addr(site), target, flags); target 607 arch/powerpc/include/asm/cpm1.h int cpm1_clk_setup(enum cpm_clk_target target, int clock, int mode); target 1136 arch/powerpc/include/asm/cpm2.h extern int cpm2_clk_setup(enum cpm_clk_target target, int clock, int mode); target 1137 arch/powerpc/include/asm/cpm2.h extern int cpm2_smc_clk_setup(enum cpm_clk_target target, int clock); target 343 arch/powerpc/include/asm/fsl_hcalls.h uint64_t target; /**< guest physical address to copy to */ target 358 arch/powerpc/include/asm/fsl_hcalls.h unsigned int target, phys_addr_t sg_list, unsigned int count) target 369 arch/powerpc/include/asm/fsl_hcalls.h r4 = target; target 134 arch/powerpc/include/asm/icswx.h struct data_descriptor_entry target; target 54 arch/powerpc/include/asm/jump_label.h jump_label_t target; target 761 arch/powerpc/include/asm/kvm_ppc.h long int kvmppc_rm_h_confer(struct kvm_vcpu *vcpu, int target, target 96 arch/powerpc/include/asm/module.h unsigned long *target); target 478 arch/powerpc/include/asm/mpic.h void smp_mpic_message_pass(int target, int msg); target 231 arch/powerpc/include/asm/pmac_pfunc.h extern int pmf_call_function(struct device_node *target, const char *name, target 240 arch/powerpc/include/asm/pmac_pfunc.h extern struct pmf_function *pmf_find_function(struct device_node *target, target 48 arch/powerpc/include/asm/xive.h int target; target 113 arch/powerpc/include/asm/xive.h extern int xive_native_configure_irq(u32 hw_irq, u32 target, u8 prio, u32 sw_irq); target 16 arch/powerpc/kernel/jump_label.c patch_branch(addr, entry->target, 0); target 149 arch/powerpc/kernel/module_64.c unsigned long *target) target 177 arch/powerpc/kernel/module_64.c *target = stub_func_addr(funcdata); target 337 arch/powerpc/kernel/ptrace.c static int gpr_get(struct task_struct *target, const struct user_regset *regset, target 343 arch/powerpc/kernel/ptrace.c if (target->thread.regs == NULL) target 346 arch/powerpc/kernel/ptrace.c if (!FULL_REGS(target->thread.regs)) { target 349 arch/powerpc/kernel/ptrace.c target->thread.regs->gpr[i] = NV_REG_POISON; target 353 arch/powerpc/kernel/ptrace.c target->thread.regs, target 356 arch/powerpc/kernel/ptrace.c unsigned long msr = get_user_msr(target); target 368 arch/powerpc/kernel/ptrace.c &target->thread.regs->orig_gpr3, target 378 arch/powerpc/kernel/ptrace.c static int gpr_set(struct task_struct *target, const struct user_regset *regset, target 385 arch/powerpc/kernel/ptrace.c if (target->thread.regs == NULL) target 388 arch/powerpc/kernel/ptrace.c CHECK_FULL_REGS(target->thread.regs); target 391 arch/powerpc/kernel/ptrace.c target->thread.regs, target 399 arch/powerpc/kernel/ptrace.c ret = set_user_msr(target, reg); target 407 arch/powerpc/kernel/ptrace.c &target->thread.regs->orig_gpr3, target 422 arch/powerpc/kernel/ptrace.c ret = set_user_trap(target, reg); target 445 arch/powerpc/kernel/ptrace.c static int fpr_get(struct task_struct *target, const struct user_regset *regset, target 453 arch/powerpc/kernel/ptrace.c flush_fp_to_thread(target); target 457 arch/powerpc/kernel/ptrace.c buf[i] = target->thread.TS_FPR(i); target 458 arch/powerpc/kernel/ptrace.c buf[32] = target->thread.fp_state.fpscr; target 464 arch/powerpc/kernel/ptrace.c flush_fp_to_thread(target); target 467 arch/powerpc/kernel/ptrace.c &target->thread.fp_state, 0, -1); target 484 arch/powerpc/kernel/ptrace.c static int fpr_set(struct task_struct *target, const struct user_regset *regset, target 492 arch/powerpc/kernel/ptrace.c flush_fp_to_thread(target); target 495 arch/powerpc/kernel/ptrace.c buf[i] = target->thread.TS_FPR(i); target 496 arch/powerpc/kernel/ptrace.c buf[32] = target->thread.fp_state.fpscr; target 504 arch/powerpc/kernel/ptrace.c target->thread.TS_FPR(i) = buf[i]; target 505 arch/powerpc/kernel/ptrace.c target->thread.fp_state.fpscr = buf[32]; target 511 arch/powerpc/kernel/ptrace.c flush_fp_to_thread(target); target 514 arch/powerpc/kernel/ptrace.c &target->thread.fp_state, 0, -1); target 532 arch/powerpc/kernel/ptrace.c static int vr_active(struct task_struct *target, target 535 arch/powerpc/kernel/ptrace.c flush_altivec_to_thread(target); target 536 arch/powerpc/kernel/ptrace.c return target->thread.used_vr ? regset->n : 0; target 553 arch/powerpc/kernel/ptrace.c static int vr_get(struct task_struct *target, const struct user_regset *regset, target 559 arch/powerpc/kernel/ptrace.c flush_altivec_to_thread(target); target 565 arch/powerpc/kernel/ptrace.c &target->thread.vr_state, 0, target 578 arch/powerpc/kernel/ptrace.c vrsave.word = target->thread.vrsave; target 603 arch/powerpc/kernel/ptrace.c static int vr_set(struct task_struct *target, const struct user_regset *regset, target 609 arch/powerpc/kernel/ptrace.c flush_altivec_to_thread(target); target 615 arch/powerpc/kernel/ptrace.c &target->thread.vr_state, 0, target 628 arch/powerpc/kernel/ptrace.c vrsave.word = target->thread.vrsave; target 635 arch/powerpc/kernel/ptrace.c target->thread.vrsave = vrsave.word; target 649 arch/powerpc/kernel/ptrace.c static int vsr_active(struct task_struct *target, target 652 arch/powerpc/kernel/ptrace.c flush_vsx_to_thread(target); target 653 arch/powerpc/kernel/ptrace.c return target->thread.used_vsr ? regset->n : 0; target 668 arch/powerpc/kernel/ptrace.c static int vsr_get(struct task_struct *target, const struct user_regset *regset, target 675 arch/powerpc/kernel/ptrace.c flush_tmregs_to_thread(target); target 676 arch/powerpc/kernel/ptrace.c flush_fp_to_thread(target); target 677 arch/powerpc/kernel/ptrace.c flush_altivec_to_thread(target); target 678 arch/powerpc/kernel/ptrace.c flush_vsx_to_thread(target); target 681 arch/powerpc/kernel/ptrace.c buf[i] = target->thread.fp_state.fpr[i][TS_VSRLOWOFFSET]; target 701 arch/powerpc/kernel/ptrace.c static int vsr_set(struct task_struct *target, const struct user_regset *regset, target 708 arch/powerpc/kernel/ptrace.c flush_tmregs_to_thread(target); target 709 arch/powerpc/kernel/ptrace.c flush_fp_to_thread(target); target 710 arch/powerpc/kernel/ptrace.c flush_altivec_to_thread(target); target 711 arch/powerpc/kernel/ptrace.c flush_vsx_to_thread(target); target 714 arch/powerpc/kernel/ptrace.c buf[i] = target->thread.fp_state.fpr[i][TS_VSRLOWOFFSET]; target 720 arch/powerpc/kernel/ptrace.c target->thread.fp_state.fpr[i][TS_VSRLOWOFFSET] = buf[i]; target 738 arch/powerpc/kernel/ptrace.c static int evr_active(struct task_struct *target, target 741 arch/powerpc/kernel/ptrace.c flush_spe_to_thread(target); target 742 arch/powerpc/kernel/ptrace.c return target->thread.used_spe ? regset->n : 0; target 745 arch/powerpc/kernel/ptrace.c static int evr_get(struct task_struct *target, const struct user_regset *regset, target 751 arch/powerpc/kernel/ptrace.c flush_spe_to_thread(target); target 754 arch/powerpc/kernel/ptrace.c &target->thread.evr, target 755 arch/powerpc/kernel/ptrace.c 0, sizeof(target->thread.evr)); target 762 arch/powerpc/kernel/ptrace.c &target->thread.acc, target 763 arch/powerpc/kernel/ptrace.c sizeof(target->thread.evr), -1); target 768 arch/powerpc/kernel/ptrace.c static int evr_set(struct task_struct *target, const struct user_regset *regset, target 774 arch/powerpc/kernel/ptrace.c flush_spe_to_thread(target); target 777 arch/powerpc/kernel/ptrace.c &target->thread.evr, target 778 arch/powerpc/kernel/ptrace.c 0, sizeof(target->thread.evr)); target 785 arch/powerpc/kernel/ptrace.c &target->thread.acc, target 786 arch/powerpc/kernel/ptrace.c sizeof(target->thread.evr), -1); target 801 arch/powerpc/kernel/ptrace.c static int tm_cgpr_active(struct task_struct *target, target 807 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) target 833 arch/powerpc/kernel/ptrace.c static int tm_cgpr_get(struct task_struct *target, target 843 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) target 846 arch/powerpc/kernel/ptrace.c flush_tmregs_to_thread(target); target 847 arch/powerpc/kernel/ptrace.c flush_fp_to_thread(target); target 848 arch/powerpc/kernel/ptrace.c flush_altivec_to_thread(target); target 851 arch/powerpc/kernel/ptrace.c &target->thread.ckpt_regs, target 854 arch/powerpc/kernel/ptrace.c unsigned long msr = get_user_ckpt_msr(target); target 867 arch/powerpc/kernel/ptrace.c &target->thread.ckpt_regs.orig_gpr3, target 897 arch/powerpc/kernel/ptrace.c static int tm_cgpr_set(struct task_struct *target, target 908 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) target 911 arch/powerpc/kernel/ptrace.c flush_tmregs_to_thread(target); target 912 arch/powerpc/kernel/ptrace.c flush_fp_to_thread(target); target 913 arch/powerpc/kernel/ptrace.c flush_altivec_to_thread(target); target 916 arch/powerpc/kernel/ptrace.c &target->thread.ckpt_regs, target 924 arch/powerpc/kernel/ptrace.c ret = set_user_ckpt_msr(target, reg); target 932 arch/powerpc/kernel/ptrace.c &target->thread.ckpt_regs.orig_gpr3, target 947 arch/powerpc/kernel/ptrace.c ret = set_user_ckpt_trap(target, reg); target 966 arch/powerpc/kernel/ptrace.c static int tm_cfpr_active(struct task_struct *target, target 972 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) target 999 arch/powerpc/kernel/ptrace.c static int tm_cfpr_get(struct task_struct *target, target 1010 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) target 1013 arch/powerpc/kernel/ptrace.c flush_tmregs_to_thread(target); target 1014 arch/powerpc/kernel/ptrace.c flush_fp_to_thread(target); target 1015 arch/powerpc/kernel/ptrace.c flush_altivec_to_thread(target); target 1019 arch/powerpc/kernel/ptrace.c buf[i] = target->thread.TS_CKFPR(i); target 1020 arch/powerpc/kernel/ptrace.c buf[32] = target->thread.ckfp_state.fpscr; target 1045 arch/powerpc/kernel/ptrace.c static int tm_cfpr_set(struct task_struct *target, target 1056 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) target 1059 arch/powerpc/kernel/ptrace.c flush_tmregs_to_thread(target); target 1060 arch/powerpc/kernel/ptrace.c flush_fp_to_thread(target); target 1061 arch/powerpc/kernel/ptrace.c flush_altivec_to_thread(target); target 1064 arch/powerpc/kernel/ptrace.c buf[i] = target->thread.TS_CKFPR(i); target 1065 arch/powerpc/kernel/ptrace.c buf[32] = target->thread.ckfp_state.fpscr; target 1072 arch/powerpc/kernel/ptrace.c target->thread.TS_CKFPR(i) = buf[i]; target 1073 arch/powerpc/kernel/ptrace.c target->thread.ckfp_state.fpscr = buf[32]; target 1085 arch/powerpc/kernel/ptrace.c static int tm_cvmx_active(struct task_struct *target, target 1091 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) target 1119 arch/powerpc/kernel/ptrace.c static int tm_cvmx_get(struct task_struct *target, target 1131 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) target 1135 arch/powerpc/kernel/ptrace.c flush_tmregs_to_thread(target); target 1136 arch/powerpc/kernel/ptrace.c flush_fp_to_thread(target); target 1137 arch/powerpc/kernel/ptrace.c flush_altivec_to_thread(target); target 1140 arch/powerpc/kernel/ptrace.c &target->thread.ckvr_state, 0, target 1151 arch/powerpc/kernel/ptrace.c vrsave.word = target->thread.ckvrsave; target 1181 arch/powerpc/kernel/ptrace.c static int tm_cvmx_set(struct task_struct *target, target 1193 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) target 1196 arch/powerpc/kernel/ptrace.c flush_tmregs_to_thread(target); target 1197 arch/powerpc/kernel/ptrace.c flush_fp_to_thread(target); target 1198 arch/powerpc/kernel/ptrace.c flush_altivec_to_thread(target); target 1201 arch/powerpc/kernel/ptrace.c &target->thread.ckvr_state, 0, target 1212 arch/powerpc/kernel/ptrace.c vrsave.word = target->thread.ckvrsave; target 1216 arch/powerpc/kernel/ptrace.c target->thread.ckvrsave = vrsave.word; target 1230 arch/powerpc/kernel/ptrace.c static int tm_cvsx_active(struct task_struct *target, target 1236 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) target 1239 arch/powerpc/kernel/ptrace.c flush_vsx_to_thread(target); target 1240 arch/powerpc/kernel/ptrace.c return target->thread.used_vsr ? regset->n : 0; target 1263 arch/powerpc/kernel/ptrace.c static int tm_cvsx_get(struct task_struct *target, target 1274 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) target 1278 arch/powerpc/kernel/ptrace.c flush_tmregs_to_thread(target); target 1279 arch/powerpc/kernel/ptrace.c flush_fp_to_thread(target); target 1280 arch/powerpc/kernel/ptrace.c flush_altivec_to_thread(target); target 1281 arch/powerpc/kernel/ptrace.c flush_vsx_to_thread(target); target 1284 arch/powerpc/kernel/ptrace.c buf[i] = target->thread.ckfp_state.fpr[i][TS_VSRLOWOFFSET]; target 1311 arch/powerpc/kernel/ptrace.c static int tm_cvsx_set(struct task_struct *target, target 1322 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) target 1326 arch/powerpc/kernel/ptrace.c flush_tmregs_to_thread(target); target 1327 arch/powerpc/kernel/ptrace.c flush_fp_to_thread(target); target 1328 arch/powerpc/kernel/ptrace.c flush_altivec_to_thread(target); target 1329 arch/powerpc/kernel/ptrace.c flush_vsx_to_thread(target); target 1332 arch/powerpc/kernel/ptrace.c buf[i] = target->thread.ckfp_state.fpr[i][TS_VSRLOWOFFSET]; target 1338 arch/powerpc/kernel/ptrace.c target->thread.ckfp_state.fpr[i][TS_VSRLOWOFFSET] = buf[i]; target 1351 arch/powerpc/kernel/ptrace.c static int tm_spr_active(struct task_struct *target, target 1378 arch/powerpc/kernel/ptrace.c static int tm_spr_get(struct task_struct *target, target 1394 arch/powerpc/kernel/ptrace.c flush_tmregs_to_thread(target); target 1395 arch/powerpc/kernel/ptrace.c flush_fp_to_thread(target); target 1396 arch/powerpc/kernel/ptrace.c flush_altivec_to_thread(target); target 1400 arch/powerpc/kernel/ptrace.c &target->thread.tm_tfhar, 0, sizeof(u64)); target 1405 arch/powerpc/kernel/ptrace.c &target->thread.tm_texasr, sizeof(u64), target 1411 arch/powerpc/kernel/ptrace.c &target->thread.tm_tfiar, target 1434 arch/powerpc/kernel/ptrace.c static int tm_spr_set(struct task_struct *target, target 1450 arch/powerpc/kernel/ptrace.c flush_tmregs_to_thread(target); target 1451 arch/powerpc/kernel/ptrace.c flush_fp_to_thread(target); target 1452 arch/powerpc/kernel/ptrace.c flush_altivec_to_thread(target); target 1456 arch/powerpc/kernel/ptrace.c &target->thread.tm_tfhar, 0, sizeof(u64)); target 1461 arch/powerpc/kernel/ptrace.c &target->thread.tm_texasr, sizeof(u64), target 1467 arch/powerpc/kernel/ptrace.c &target->thread.tm_tfiar, target 1472 arch/powerpc/kernel/ptrace.c static int tm_tar_active(struct task_struct *target, target 1478 arch/powerpc/kernel/ptrace.c if (MSR_TM_ACTIVE(target->thread.regs->msr)) target 1484 arch/powerpc/kernel/ptrace.c static int tm_tar_get(struct task_struct *target, target 1494 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) target 1498 arch/powerpc/kernel/ptrace.c &target->thread.tm_tar, 0, sizeof(u64)); target 1502 arch/powerpc/kernel/ptrace.c static int tm_tar_set(struct task_struct *target, target 1512 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) target 1516 arch/powerpc/kernel/ptrace.c &target->thread.tm_tar, 0, sizeof(u64)); target 1520 arch/powerpc/kernel/ptrace.c static int tm_ppr_active(struct task_struct *target, target 1526 arch/powerpc/kernel/ptrace.c if (MSR_TM_ACTIVE(target->thread.regs->msr)) target 1533 arch/powerpc/kernel/ptrace.c static int tm_ppr_get(struct task_struct *target, target 1543 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) target 1547 arch/powerpc/kernel/ptrace.c &target->thread.tm_ppr, 0, sizeof(u64)); target 1551 arch/powerpc/kernel/ptrace.c static int tm_ppr_set(struct task_struct *target, target 1561 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) target 1565 arch/powerpc/kernel/ptrace.c &target->thread.tm_ppr, 0, sizeof(u64)); target 1569 arch/powerpc/kernel/ptrace.c static int tm_dscr_active(struct task_struct *target, target 1575 arch/powerpc/kernel/ptrace.c if (MSR_TM_ACTIVE(target->thread.regs->msr)) target 1581 arch/powerpc/kernel/ptrace.c static int tm_dscr_get(struct task_struct *target, target 1591 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) target 1595 arch/powerpc/kernel/ptrace.c &target->thread.tm_dscr, 0, sizeof(u64)); target 1599 arch/powerpc/kernel/ptrace.c static int tm_dscr_set(struct task_struct *target, target 1609 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) target 1613 arch/powerpc/kernel/ptrace.c &target->thread.tm_dscr, 0, sizeof(u64)); target 1619 arch/powerpc/kernel/ptrace.c static int ppr_get(struct task_struct *target, target 1625 arch/powerpc/kernel/ptrace.c &target->thread.regs->ppr, 0, sizeof(u64)); target 1628 arch/powerpc/kernel/ptrace.c static int ppr_set(struct task_struct *target, target 1634 arch/powerpc/kernel/ptrace.c &target->thread.regs->ppr, 0, sizeof(u64)); target 1637 arch/powerpc/kernel/ptrace.c static int dscr_get(struct task_struct *target, target 1643 arch/powerpc/kernel/ptrace.c &target->thread.dscr, 0, sizeof(u64)); target 1645 arch/powerpc/kernel/ptrace.c static int dscr_set(struct task_struct *target, target 1651 arch/powerpc/kernel/ptrace.c &target->thread.dscr, 0, sizeof(u64)); target 1655 arch/powerpc/kernel/ptrace.c static int tar_get(struct task_struct *target, target 1661 arch/powerpc/kernel/ptrace.c &target->thread.tar, 0, sizeof(u64)); target 1663 arch/powerpc/kernel/ptrace.c static int tar_set(struct task_struct *target, target 1669 arch/powerpc/kernel/ptrace.c &target->thread.tar, 0, sizeof(u64)); target 1672 arch/powerpc/kernel/ptrace.c static int ebb_active(struct task_struct *target, target 1678 arch/powerpc/kernel/ptrace.c if (target->thread.used_ebb) target 1684 arch/powerpc/kernel/ptrace.c static int ebb_get(struct task_struct *target, target 1696 arch/powerpc/kernel/ptrace.c if (!target->thread.used_ebb) target 1700 arch/powerpc/kernel/ptrace.c &target->thread.ebbrr, 0, 3 * sizeof(unsigned long)); target 1703 arch/powerpc/kernel/ptrace.c static int ebb_set(struct task_struct *target, target 1717 arch/powerpc/kernel/ptrace.c if (target->thread.used_ebb) target 1721 arch/powerpc/kernel/ptrace.c &target->thread.ebbrr, 0, sizeof(unsigned long)); target 1725 arch/powerpc/kernel/ptrace.c &target->thread.ebbhr, sizeof(unsigned long), target 1730 arch/powerpc/kernel/ptrace.c &target->thread.bescr, target 1735 arch/powerpc/kernel/ptrace.c static int pmu_active(struct task_struct *target, target 1744 arch/powerpc/kernel/ptrace.c static int pmu_get(struct task_struct *target, target 1759 arch/powerpc/kernel/ptrace.c &target->thread.siar, 0, target 1763 arch/powerpc/kernel/ptrace.c static int pmu_set(struct task_struct *target, target 1780 arch/powerpc/kernel/ptrace.c &target->thread.siar, 0, target 1785 arch/powerpc/kernel/ptrace.c &target->thread.sdar, sizeof(unsigned long), target 1790 arch/powerpc/kernel/ptrace.c &target->thread.sier, 2 * sizeof(unsigned long), target 1795 arch/powerpc/kernel/ptrace.c &target->thread.mmcr2, 3 * sizeof(unsigned long), target 1800 arch/powerpc/kernel/ptrace.c &target->thread.mmcr0, 4 * sizeof(unsigned long), target 1807 arch/powerpc/kernel/ptrace.c static int pkey_active(struct task_struct *target, target 1816 arch/powerpc/kernel/ptrace.c static int pkey_get(struct task_struct *target, target 1828 arch/powerpc/kernel/ptrace.c &target->thread.amr, 0, target 1832 arch/powerpc/kernel/ptrace.c static int pkey_set(struct task_struct *target, target 1853 arch/powerpc/kernel/ptrace.c target->thread.amr = (new_amr & target->thread.uamor) | target 1854 arch/powerpc/kernel/ptrace.c (target->thread.amr & ~target->thread.uamor); target 2019 arch/powerpc/kernel/ptrace.c static int gpr32_get_common(struct task_struct *target, target 2041 arch/powerpc/kernel/ptrace.c reg = get_user_msr(target); target 2066 arch/powerpc/kernel/ptrace.c static int gpr32_set_common(struct task_struct *target, target 2095 arch/powerpc/kernel/ptrace.c set_user_msr(target, reg); target 2121 arch/powerpc/kernel/ptrace.c set_user_trap(target, reg); target 2135 arch/powerpc/kernel/ptrace.c static int tm_cgpr32_get(struct task_struct *target, target 2140 arch/powerpc/kernel/ptrace.c return gpr32_get_common(target, regset, pos, count, kbuf, ubuf, target 2141 arch/powerpc/kernel/ptrace.c &target->thread.ckpt_regs.gpr[0]); target 2144 arch/powerpc/kernel/ptrace.c static int tm_cgpr32_set(struct task_struct *target, target 2149 arch/powerpc/kernel/ptrace.c return gpr32_set_common(target, regset, pos, count, kbuf, ubuf, target 2150 arch/powerpc/kernel/ptrace.c &target->thread.ckpt_regs.gpr[0]); target 2154 arch/powerpc/kernel/ptrace.c static int gpr32_get(struct task_struct *target, target 2161 arch/powerpc/kernel/ptrace.c if (target->thread.regs == NULL) target 2164 arch/powerpc/kernel/ptrace.c if (!FULL_REGS(target->thread.regs)) { target 2170 arch/powerpc/kernel/ptrace.c target->thread.regs->gpr[i] = NV_REG_POISON; target 2172 arch/powerpc/kernel/ptrace.c return gpr32_get_common(target, regset, pos, count, kbuf, ubuf, target 2173 arch/powerpc/kernel/ptrace.c &target->thread.regs->gpr[0]); target 2176 arch/powerpc/kernel/ptrace.c static int gpr32_set(struct task_struct *target, target 2181 arch/powerpc/kernel/ptrace.c if (target->thread.regs == NULL) target 2184 arch/powerpc/kernel/ptrace.c CHECK_FULL_REGS(target->thread.regs); target 2185 arch/powerpc/kernel/ptrace.c return gpr32_set_common(target, regset, pos, count, kbuf, ubuf, target 2186 arch/powerpc/kernel/ptrace.c &target->thread.regs->gpr[0]); target 874 arch/powerpc/kvm/book3s_hv.c static int kvm_arch_vcpu_yield_to(struct kvm_vcpu *target) target 876 arch/powerpc/kvm/book3s_hv.c struct kvmppc_vcore *vcore = target->arch.vcore; target 887 arch/powerpc/kvm/book3s_hv.c if (target->arch.state == KVMPPC_VCPU_RUNNABLE && target 890 arch/powerpc/kvm/book3s_hv.c target = vcore->runner; target 893 arch/powerpc/kvm/book3s_hv.c return kvm_vcpu_yield_to(target); target 912 arch/powerpc/kvm/book3s_hv.c unsigned long target, ret = H_SUCCESS; target 925 arch/powerpc/kvm/book3s_hv.c target = kvmppc_get_gpr(vcpu, 4); target 926 arch/powerpc/kvm/book3s_hv.c tvcpu = kvmppc_find_vcpu(vcpu->kvm, target); target 937 arch/powerpc/kvm/book3s_hv.c target = kvmppc_get_gpr(vcpu, 4); target 938 arch/powerpc/kvm/book3s_hv.c if (target == -1) target 940 arch/powerpc/kvm/book3s_hv.c tvcpu = kvmppc_find_vcpu(vcpu->kvm, target); target 132 arch/powerpc/kvm/book3s_hv_builtin.c long int kvmppc_rm_h_confer(struct kvm_vcpu *vcpu, int target, target 197 arch/powerpc/lib/code-patching.c int patch_branch(unsigned int *addr, unsigned long target, int flags) target 199 arch/powerpc/lib/code-patching.c return patch_instruction(addr, create_branch(addr, target, flags)); target 247 arch/powerpc/lib/code-patching.c unsigned long target, int flags) target 252 arch/powerpc/lib/code-patching.c offset = target; target 267 arch/powerpc/lib/code-patching.c unsigned long target, int flags) target 272 arch/powerpc/lib/code-patching.c offset = target; target 366 arch/powerpc/lib/code-patching.c unsigned long target; target 368 arch/powerpc/lib/code-patching.c target = branch_target(src); target 371 arch/powerpc/lib/code-patching.c return create_branch(dest, target, *src); target 373 arch/powerpc/lib/code-patching.c return create_cond_branch(dest, target, *src); target 52 arch/powerpc/lib/feature-fixups.c unsigned int *target = (unsigned int *)branch_target(src); target 55 arch/powerpc/lib/feature-fixups.c if (target < alt_start || target > alt_end) { target 99 arch/powerpc/perf/8xx-pmu.c unsigned long target = patch_site_addr(&patch__itlbmiss_perf); target 101 arch/powerpc/perf/8xx-pmu.c patch_branch_site(&patch__itlbmiss_exit_1, target, 0); target 103 arch/powerpc/perf/8xx-pmu.c patch_branch_site(&patch__itlbmiss_exit_2, target, 0); target 110 arch/powerpc/perf/8xx-pmu.c unsigned long target = patch_site_addr(&patch__dtlbmiss_perf); target 112 arch/powerpc/perf/8xx-pmu.c patch_branch_site(&patch__dtlbmiss_exit_1, target, 0); target 113 arch/powerpc/perf/8xx-pmu.c patch_branch_site(&patch__dtlbmiss_exit_2, target, 0); target 114 arch/powerpc/perf/8xx-pmu.c patch_branch_site(&patch__dtlbmiss_exit_3, target, 0); target 420 arch/powerpc/perf/core-book3s.c __u64 target; target 438 arch/powerpc/perf/core-book3s.c target = branch_target(&instr); target 439 arch/powerpc/perf/core-book3s.c if ((!target) || (instr & BRANCH_ABSOLUTE)) target 440 arch/powerpc/perf/core-book3s.c return target; target 443 arch/powerpc/perf/core-book3s.c return target - (unsigned long)&instr + addr; target 335 arch/powerpc/perf/imc-pmu.c int nid, target = -1; target 365 arch/powerpc/perf/imc-pmu.c target = cpumask_last(l_cpumask); target 371 arch/powerpc/perf/imc-pmu.c if (unlikely(target == cpu)) target 372 arch/powerpc/perf/imc-pmu.c target = cpumask_any_but(l_cpumask, cpu); target 378 arch/powerpc/perf/imc-pmu.c if (target >= 0 && target < nr_cpu_ids) { target 379 arch/powerpc/perf/imc-pmu.c cpumask_set_cpu(target, &nest_imc_cpumask); target 380 arch/powerpc/perf/imc-pmu.c nest_change_cpu_context(cpu, target); target 895 arch/powerpc/perf/imc-pmu.c struct task_struct *target; target 915 arch/powerpc/perf/imc-pmu.c target = event->hw.target; target 916 arch/powerpc/perf/imc-pmu.c if (!target) target 1305 arch/powerpc/perf/imc-pmu.c struct task_struct *target; target 1318 arch/powerpc/perf/imc-pmu.c target = event->hw.target; target 59 arch/powerpc/platforms/86xx/mpc86xx_smp.c unsigned long target, flags; target 74 arch/powerpc/platforms/86xx/mpc86xx_smp.c target = (unsigned long) __secondary_start_mpc86xx; target 75 arch/powerpc/platforms/86xx/mpc86xx_smp.c patch_branch(vector, target, BRANCH_SET_LINK); target 397 arch/powerpc/platforms/8xx/cpm1.c int cpm1_clk_setup(enum cpm_clk_target target, int clock, int mode) target 460 arch/powerpc/platforms/8xx/cpm1.c switch (target) { target 497 arch/powerpc/platforms/8xx/cpm1.c if (clk_map[i][0] == target && clk_map[i][1] == clock) { target 423 arch/powerpc/platforms/cell/spu_manage.c neighbour_spu(int cbe, struct device_node *target, struct device_node *avoid) target 436 arch/powerpc/platforms/cell/spu_manage.c if (vic_handles[i] == target->phandle) target 65 arch/powerpc/platforms/cell/spu_priv1_mmio.c u64 target; target 76 arch/powerpc/platforms/cell/spu_priv1_mmio.c target = iic_get_target_id(cpu); target 77 arch/powerpc/platforms/cell/spu_priv1_mmio.c route = target << 48 | target << 32 | target << 16; target 808 arch/powerpc/platforms/powermac/pfunc_core.c static struct pmf_function *__pmf_find_function(struct device_node *target, target 811 arch/powerpc/platforms/powermac/pfunc_core.c struct device_node *actor = of_node_get(target); target 823 arch/powerpc/platforms/powermac/pfunc_core.c prop = of_get_property(target, fname, NULL); target 848 arch/powerpc/platforms/powermac/pfunc_core.c if (func->phandle && target->phandle != func->phandle) target 862 arch/powerpc/platforms/powermac/pfunc_core.c int pmf_register_irq_client(struct device_node *target, target 870 arch/powerpc/platforms/powermac/pfunc_core.c func = __pmf_find_function(target, name, PMF_FLAGS_INT_GEN); target 993 arch/powerpc/platforms/powermac/pfunc_core.c struct pmf_function *pmf_find_function(struct device_node *target, target 1000 arch/powerpc/platforms/powermac/pfunc_core.c func = __pmf_find_function(target, name, PMF_FLAGS_ON_DEMAND); target 1008 arch/powerpc/platforms/powermac/pfunc_core.c int pmf_call_function(struct device_node *target, const char *name, target 1011 arch/powerpc/platforms/powermac/pfunc_core.c struct pmf_function *func = pmf_find_function(target, name); target 800 arch/powerpc/platforms/powermac/smp.c unsigned long target, flags; target 817 arch/powerpc/platforms/powermac/smp.c target = (unsigned long) __secondary_start_pmac_0 + nr * 8; target 818 arch/powerpc/platforms/powermac/smp.c patch_branch(vector, target, BRANCH_SET_LINK); target 279 arch/powerpc/platforms/pseries/cmm.c signed long active_pages_target, page_loan_request, target; target 289 arch/powerpc/platforms/pseries/cmm.c target = page_loan_request + (signed long)loaned_pages; target 291 arch/powerpc/platforms/pseries/cmm.c if (target < 0 || total_pages < min_mem_pages) target 292 arch/powerpc/platforms/pseries/cmm.c target = 0; target 294 arch/powerpc/platforms/pseries/cmm.c if (target > oom_freed_pages) target 295 arch/powerpc/platforms/pseries/cmm.c target -= oom_freed_pages; target 297 arch/powerpc/platforms/pseries/cmm.c target = 0; target 299 arch/powerpc/platforms/pseries/cmm.c active_pages_target = total_pages - target; target 302 arch/powerpc/platforms/pseries/cmm.c target = total_pages - min_mem_pages; target 304 arch/powerpc/platforms/pseries/cmm.c if (target < 0) target 305 arch/powerpc/platforms/pseries/cmm.c target = 0; target 307 arch/powerpc/platforms/pseries/cmm.c loaned_pages_target = target; target 139 arch/powerpc/sysdev/cpm2.c int cpm2_clk_setup(enum cpm_clk_target target, int clock, int mode) target 209 arch/powerpc/sysdev/cpm2.c switch (target) { target 244 arch/powerpc/sysdev/cpm2.c if (clk_map[i][0] == target && clk_map[i][1] == clock) { target 269 arch/powerpc/sysdev/cpm2.c int cpm2_smc_clk_setup(enum cpm_clk_target target, int clock) target 291 arch/powerpc/sysdev/cpm2.c switch (target) { target 308 arch/powerpc/sysdev/cpm2.c if (clk_map[i][0] == target && clk_map[i][1] == clock) { target 264 arch/powerpc/sysdev/xive/common.c u32 target; target 271 arch/powerpc/sysdev/xive/common.c rc = xive_ops->get_irq_config(hw_irq, &target, &prio, &lirq); target 278 arch/powerpc/sysdev/xive/common.c hw_irq, target, prio, lirq); target 594 arch/powerpc/sysdev/xive/common.c int target, rc; target 612 arch/powerpc/sysdev/xive/common.c target = xive_pick_irq_target(d, irq_data_get_affinity_mask(d)); target 613 arch/powerpc/sysdev/xive/common.c if (target == XIVE_INVALID_TARGET) { target 615 arch/powerpc/sysdev/xive/common.c target = xive_pick_irq_target(d, cpu_online_mask); target 616 arch/powerpc/sysdev/xive/common.c if (target == XIVE_INVALID_TARGET) target 622 arch/powerpc/sysdev/xive/common.c if (WARN_ON(target == XIVE_INVALID_TARGET || target 623 arch/powerpc/sysdev/xive/common.c target >= nr_cpu_ids)) target 624 arch/powerpc/sysdev/xive/common.c target = smp_processor_id(); target 626 arch/powerpc/sysdev/xive/common.c xd->target = target; target 633 arch/powerpc/sysdev/xive/common.c get_hard_smp_processor_id(target), target 653 arch/powerpc/sysdev/xive/common.c if (WARN_ON(xd->target == XIVE_INVALID_TARGET)) target 664 arch/powerpc/sysdev/xive/common.c get_hard_smp_processor_id(xd->target), target 667 arch/powerpc/sysdev/xive/common.c xive_dec_target_count(xd->target); target 668 arch/powerpc/sysdev/xive/common.c xd->target = XIVE_INVALID_TARGET; target 686 arch/powerpc/sysdev/xive/common.c get_hard_smp_processor_id(xd->target), target 709 arch/powerpc/sysdev/xive/common.c get_hard_smp_processor_id(xd->target), target 723 arch/powerpc/sysdev/xive/common.c u32 target, old_target; target 740 arch/powerpc/sysdev/xive/common.c if (xd->target != XIVE_INVALID_TARGET && target 741 arch/powerpc/sysdev/xive/common.c cpu_online(xd->target) && target 742 arch/powerpc/sysdev/xive/common.c cpumask_test_cpu(xd->target, cpumask)) target 746 arch/powerpc/sysdev/xive/common.c target = xive_pick_irq_target(d, cpumask); target 749 arch/powerpc/sysdev/xive/common.c if (target == XIVE_INVALID_TARGET) target 753 arch/powerpc/sysdev/xive/common.c if (WARN_ON(target >= nr_cpu_ids)) target 754 arch/powerpc/sysdev/xive/common.c target = smp_processor_id(); target 756 arch/powerpc/sysdev/xive/common.c old_target = xd->target; target 764 arch/powerpc/sysdev/xive/common.c get_hard_smp_processor_id(target), target 771 arch/powerpc/sysdev/xive/common.c pr_devel(" target: 0x%x\n", target); target 772 arch/powerpc/sysdev/xive/common.c xd->target = target; target 878 arch/powerpc/sysdev/xive/common.c if (xd->target == XIVE_INVALID_TARGET) { target 922 arch/powerpc/sysdev/xive/common.c if (xd->target == XIVE_INVALID_TARGET) { target 944 arch/powerpc/sysdev/xive/common.c get_hard_smp_processor_id(xd->target), target 1046 arch/powerpc/sysdev/xive/common.c xd->target = XIVE_INVALID_TARGET; target 100 arch/powerpc/sysdev/xive/native.c int xive_native_configure_irq(u32 hw_irq, u32 target, u8 prio, u32 sw_irq) target 105 arch/powerpc/sysdev/xive/native.c rc = opal_xive_set_irq_config(hw_irq, target, prio, sw_irq); target 114 arch/powerpc/sysdev/xive/native.c static int xive_native_get_irq_config(u32 hw_irq, u32 *target, u8 *prio, target 123 arch/powerpc/sysdev/xive/native.c *target = be64_to_cpu(vp); target 193 arch/powerpc/sysdev/xive/spapr.c unsigned long target, target 201 arch/powerpc/sysdev/xive/spapr.c flags, lisn, target, prio, sw_irq); target 206 arch/powerpc/sysdev/xive/spapr.c target, prio, sw_irq); target 211 arch/powerpc/sysdev/xive/spapr.c lisn, target, prio, rc); target 220 arch/powerpc/sysdev/xive/spapr.c unsigned long *target, target 231 arch/powerpc/sysdev/xive/spapr.c target, prio, sw_irq); target 240 arch/powerpc/sysdev/xive/spapr.c *target = retbuf[0]; target 251 arch/powerpc/sysdev/xive/spapr.c unsigned long target, target 260 arch/powerpc/sysdev/xive/spapr.c rc = plpar_hcall(H_INT_GET_QUEUE_INFO, retbuf, flags, target, target 266 arch/powerpc/sysdev/xive/spapr.c target, priority, rc); target 282 arch/powerpc/sysdev/xive/spapr.c unsigned long target, target 290 arch/powerpc/sysdev/xive/spapr.c flags, target, priority, qpage, qsize); target 293 arch/powerpc/sysdev/xive/spapr.c rc = plpar_hcall_norets(H_INT_SET_QUEUE_CONFIG, flags, target, target 299 arch/powerpc/sysdev/xive/spapr.c target, priority, qpage, rc); target 431 arch/powerpc/sysdev/xive/spapr.c static int xive_spapr_configure_irq(u32 hw_irq, u32 target, u8 prio, u32 sw_irq) target 435 arch/powerpc/sysdev/xive/spapr.c rc = plpar_int_set_source_config(XIVE_SRC_SET_EISN, hw_irq, target, target 441 arch/powerpc/sysdev/xive/spapr.c static int xive_spapr_get_irq_config(u32 hw_irq, u32 *target, u8 *prio, target 452 arch/powerpc/sysdev/xive/spapr.c *target = h_target; target 460 arch/powerpc/sysdev/xive/spapr.c static int xive_spapr_configure_queue(u32 target, struct xive_q *q, u8 prio, target 482 arch/powerpc/sysdev/xive/spapr.c rc = plpar_int_get_queue_info(0, target, prio, &esn_page, &esn_size); target 485 arch/powerpc/sysdev/xive/spapr.c target, prio); target 497 arch/powerpc/sysdev/xive/spapr.c rc = plpar_int_set_queue_config(flags, target, prio, qpage_phys, order); target 500 arch/powerpc/sysdev/xive/spapr.c target, prio); target 42 arch/powerpc/sysdev/xive/xive-internal.h int (*configure_irq)(u32 hw_irq, u32 target, u8 prio, u32 sw_irq); target 43 arch/powerpc/sysdev/xive/xive-internal.h int (*get_irq_config)(u32 hw_irq, u32 *target, u8 *prio, target 44 arch/riscv/kernel/ftrace.c static int __ftrace_modify_call(unsigned long hook_pos, unsigned long target, target 51 arch/riscv/kernel/ftrace.c make_call(hook_pos, target, call); target 31 arch/riscv/kernel/ptrace.c static int riscv_gpr_get(struct task_struct *target, target 38 arch/riscv/kernel/ptrace.c regs = task_pt_regs(target); target 42 arch/riscv/kernel/ptrace.c static int riscv_gpr_set(struct task_struct *target, target 50 arch/riscv/kernel/ptrace.c regs = task_pt_regs(target); target 56 arch/riscv/kernel/ptrace.c static int riscv_fpr_get(struct task_struct *target, target 62 arch/riscv/kernel/ptrace.c struct __riscv_d_ext_state *fstate = &target->thread.fstate; target 75 arch/riscv/kernel/ptrace.c static int riscv_fpr_set(struct task_struct *target, target 81 arch/riscv/kernel/ptrace.c struct __riscv_d_ext_state *fstate = &target->thread.fstate; target 82 arch/s390/include/asm/ftrace.h unsigned long target; target 85 arch/s390/include/asm/ftrace.h target = is_module_addr((void *) ip) ? ftrace_plt : FTRACE_ADDR; target 87 arch/s390/include/asm/ftrace.h insn->disp = (target - ip) / 2; target 76 arch/s390/include/asm/nospec-insn.h .ifc \rtarget,%r\r2 target 168 arch/s390/include/asm/nospec-insn.h 559: __DECODE_RRR __THUNK_BRASL,\rsave,\rtarget,\ruse target 190 arch/s390/include/asm/nospec-insn.h basr \rsave,\rtarget target 892 arch/s390/kernel/ptrace.c static int s390_regs_get(struct task_struct *target, target 897 arch/s390/kernel/ptrace.c if (target == current) target 898 arch/s390/kernel/ptrace.c save_access_regs(target->thread.acrs); target 903 arch/s390/kernel/ptrace.c *k++ = __peek_user(target, pos); target 910 arch/s390/kernel/ptrace.c if (__put_user(__peek_user(target, pos), u++)) target 919 arch/s390/kernel/ptrace.c static int s390_regs_set(struct task_struct *target, target 926 arch/s390/kernel/ptrace.c if (target == current) target 927 arch/s390/kernel/ptrace.c save_access_regs(target->thread.acrs); target 932 arch/s390/kernel/ptrace.c rc = __poke_user(target, pos, *k++); target 943 arch/s390/kernel/ptrace.c rc = __poke_user(target, pos, word); target 949 arch/s390/kernel/ptrace.c if (rc == 0 && target == current) target 950 arch/s390/kernel/ptrace.c restore_access_regs(target->thread.acrs); target 955 arch/s390/kernel/ptrace.c static int s390_fpregs_get(struct task_struct *target, target 961 arch/s390/kernel/ptrace.c if (target == current) target 964 arch/s390/kernel/ptrace.c fp_regs.fpc = target->thread.fpu.fpc; target 965 arch/s390/kernel/ptrace.c fpregs_store(&fp_regs, &target->thread.fpu); target 971 arch/s390/kernel/ptrace.c static int s390_fpregs_set(struct task_struct *target, target 979 arch/s390/kernel/ptrace.c if (target == current) target 983 arch/s390/kernel/ptrace.c convert_vx_to_fp(fprs, target->thread.fpu.vxrs); target 985 arch/s390/kernel/ptrace.c memcpy(&fprs, target->thread.fpu.fprs, sizeof(fprs)); target 989 arch/s390/kernel/ptrace.c u32 ufpc[2] = { target->thread.fpu.fpc, 0 }; target 996 arch/s390/kernel/ptrace.c target->thread.fpu.fpc = ufpc[0]; target 1006 arch/s390/kernel/ptrace.c convert_fp_to_vx(target->thread.fpu.vxrs, fprs); target 1008 arch/s390/kernel/ptrace.c memcpy(target->thread.fpu.fprs, &fprs, sizeof(fprs)); target 1013 arch/s390/kernel/ptrace.c static int s390_last_break_get(struct task_struct *target, target 1021 arch/s390/kernel/ptrace.c *k = target->thread.last_break; target 1024 arch/s390/kernel/ptrace.c if (__put_user(target->thread.last_break, u)) target 1031 arch/s390/kernel/ptrace.c static int s390_last_break_set(struct task_struct *target, target 1039 arch/s390/kernel/ptrace.c static int s390_tdb_get(struct task_struct *target, target 1044 arch/s390/kernel/ptrace.c struct pt_regs *regs = task_pt_regs(target); target 1049 arch/s390/kernel/ptrace.c data = target->thread.trap_tdb; target 1053 arch/s390/kernel/ptrace.c static int s390_tdb_set(struct task_struct *target, target 1061 arch/s390/kernel/ptrace.c static int s390_vxrs_low_get(struct task_struct *target, target 1071 arch/s390/kernel/ptrace.c if (target == current) target 1074 arch/s390/kernel/ptrace.c vxrs[i] = *((__u64 *)(target->thread.fpu.vxrs + i) + 1); target 1078 arch/s390/kernel/ptrace.c static int s390_vxrs_low_set(struct task_struct *target, target 1088 arch/s390/kernel/ptrace.c if (target == current) target 1092 arch/s390/kernel/ptrace.c vxrs[i] = *((__u64 *)(target->thread.fpu.vxrs + i) + 1); target 1097 arch/s390/kernel/ptrace.c *((__u64 *)(target->thread.fpu.vxrs + i) + 1) = vxrs[i]; target 1102 arch/s390/kernel/ptrace.c static int s390_vxrs_high_get(struct task_struct *target, target 1111 arch/s390/kernel/ptrace.c if (target == current) target 1113 arch/s390/kernel/ptrace.c memcpy(vxrs, target->thread.fpu.vxrs + __NUM_VXRS_LOW, sizeof(vxrs)); target 1118 arch/s390/kernel/ptrace.c static int s390_vxrs_high_set(struct task_struct *target, target 1127 arch/s390/kernel/ptrace.c if (target == current) target 1131 arch/s390/kernel/ptrace.c target->thread.fpu.vxrs + __NUM_VXRS_LOW, 0, -1); target 1135 arch/s390/kernel/ptrace.c static int s390_system_call_get(struct task_struct *target, target 1140 arch/s390/kernel/ptrace.c unsigned int *data = &target->thread.system_call; target 1145 arch/s390/kernel/ptrace.c static int s390_system_call_set(struct task_struct *target, target 1150 arch/s390/kernel/ptrace.c unsigned int *data = &target->thread.system_call; target 1155 arch/s390/kernel/ptrace.c static int s390_gs_cb_get(struct task_struct *target, target 1160 arch/s390/kernel/ptrace.c struct gs_cb *data = target->thread.gs_cb; target 1166 arch/s390/kernel/ptrace.c if (target == current) target 1172 arch/s390/kernel/ptrace.c static int s390_gs_cb_set(struct task_struct *target, target 1182 arch/s390/kernel/ptrace.c if (!target->thread.gs_cb) { target 1187 arch/s390/kernel/ptrace.c if (!target->thread.gs_cb) target 1189 arch/s390/kernel/ptrace.c else if (target == current) target 1192 arch/s390/kernel/ptrace.c gs_cb = *target->thread.gs_cb; target 1200 arch/s390/kernel/ptrace.c if (!target->thread.gs_cb) target 1201 arch/s390/kernel/ptrace.c target->thread.gs_cb = data; target 1202 arch/s390/kernel/ptrace.c *target->thread.gs_cb = gs_cb; target 1203 arch/s390/kernel/ptrace.c if (target == current) { target 1205 arch/s390/kernel/ptrace.c restore_gs_cb(target->thread.gs_cb); target 1211 arch/s390/kernel/ptrace.c static int s390_gs_bc_get(struct task_struct *target, target 1216 arch/s390/kernel/ptrace.c struct gs_cb *data = target->thread.gs_bc_cb; target 1226 arch/s390/kernel/ptrace.c static int s390_gs_bc_set(struct task_struct *target, target 1231 arch/s390/kernel/ptrace.c struct gs_cb *data = target->thread.gs_bc_cb; target 1239 arch/s390/kernel/ptrace.c target->thread.gs_bc_cb = data; target 1272 arch/s390/kernel/ptrace.c static int s390_runtime_instr_get(struct task_struct *target, target 1277 arch/s390/kernel/ptrace.c struct runtime_instr_cb *data = target->thread.ri_cb; target 1288 arch/s390/kernel/ptrace.c static int s390_runtime_instr_set(struct task_struct *target, target 1299 arch/s390/kernel/ptrace.c if (!target->thread.ri_cb) { target 1305 arch/s390/kernel/ptrace.c if (target->thread.ri_cb) { target 1306 arch/s390/kernel/ptrace.c if (target == current) target 1309 arch/s390/kernel/ptrace.c ri_cb = *target->thread.ri_cb; target 1325 arch/s390/kernel/ptrace.c if (!target->thread.ri_cb) target 1326 arch/s390/kernel/ptrace.c target->thread.ri_cb = data; target 1327 arch/s390/kernel/ptrace.c *target->thread.ri_cb = ri_cb; target 1328 arch/s390/kernel/ptrace.c if (target == current) target 1329 arch/s390/kernel/ptrace.c load_runtime_instr_cb(target->thread.ri_cb); target 1426 arch/s390/kernel/ptrace.c static int s390_compat_regs_get(struct task_struct *target, target 1431 arch/s390/kernel/ptrace.c if (target == current) target 1432 arch/s390/kernel/ptrace.c save_access_regs(target->thread.acrs); target 1437 arch/s390/kernel/ptrace.c *k++ = __peek_user_compat(target, pos); target 1444 arch/s390/kernel/ptrace.c if (__put_user(__peek_user_compat(target, pos), u++)) target 1453 arch/s390/kernel/ptrace.c static int s390_compat_regs_set(struct task_struct *target, target 1460 arch/s390/kernel/ptrace.c if (target == current) target 1461 arch/s390/kernel/ptrace.c save_access_regs(target->thread.acrs); target 1466 arch/s390/kernel/ptrace.c rc = __poke_user_compat(target, pos, *k++); target 1477 arch/s390/kernel/ptrace.c rc = __poke_user_compat(target, pos, word); target 1483 arch/s390/kernel/ptrace.c if (rc == 0 && target == current) target 1484 arch/s390/kernel/ptrace.c restore_access_regs(target->thread.acrs); target 1489 arch/s390/kernel/ptrace.c static int s390_compat_regs_high_get(struct task_struct *target, target 1497 arch/s390/kernel/ptrace.c &task_pt_regs(target)->gprs[pos / sizeof(compat_ulong_t)]; target 1517 arch/s390/kernel/ptrace.c static int s390_compat_regs_high_set(struct task_struct *target, target 1526 arch/s390/kernel/ptrace.c &task_pt_regs(target)->gprs[pos / sizeof(compat_ulong_t)]; target 1550 arch/s390/kernel/ptrace.c static int s390_compat_last_break_get(struct task_struct *target, target 1558 arch/s390/kernel/ptrace.c last_break = target->thread.last_break; target 1571 arch/s390/kernel/ptrace.c static int s390_compat_last_break_set(struct task_struct *target, target 2230 arch/s390/kvm/interrupt.c void *target = NULL; target 2242 arch/s390/kvm/interrupt.c target = (void *) &inti->ext; target 2247 arch/s390/kvm/interrupt.c target = (void *) &inti->io; target 2252 arch/s390/kvm/interrupt.c target = (void *) &inti->mchk; target 2260 arch/s390/kvm/interrupt.c if (copy_from_user(target, source, size)) target 254 arch/s390/net/bpf_jit_comp.c #define EMIT6_PCREL_RILB(op, b, target) \ target 256 arch/s390/net/bpf_jit_comp.c int rel = (target - jit->prg) / 2; \ target 261 arch/s390/net/bpf_jit_comp.c #define EMIT6_PCREL_RIL(op, target) \ target 263 arch/s390/net/bpf_jit_comp.c int rel = (target - jit->prg) / 2; \ target 122 arch/s390/numa/toptree.c static int toptree_insert(struct toptree *cand, struct toptree *target) target 124 arch/s390/numa/toptree.c if (!cand || !target) target 126 arch/s390/numa/toptree.c if (target->level != (cand->level + 1)) target 128 arch/s390/numa/toptree.c list_add_tail(&cand->sibling, &target->children); target 129 arch/s390/numa/toptree.c cand->parent = target; target 130 arch/s390/numa/toptree.c toptree_update_mask(target); target 141 arch/s390/numa/toptree.c static void toptree_move_children(struct toptree *cand, struct toptree *target) target 146 arch/s390/numa/toptree.c toptree_move(child, target); target 198 arch/s390/numa/toptree.c void toptree_move(struct toptree *cand, struct toptree *target) target 202 arch/s390/numa/toptree.c if (cand->level + 1 == target->level) { target 204 arch/s390/numa/toptree.c toptree_insert(cand, target); target 220 arch/s390/numa/toptree.c } while (stack_target->level < (target->level - 1)); target 224 arch/s390/numa/toptree.c toptree_insert(stack_target, target); target 29 arch/s390/numa/toptree.h void toptree_move(struct toptree *cand, struct toptree *target); target 40 arch/sh/include/asm/fpu.h extern int fpregs_get(struct task_struct *target, target 136 arch/sh/kernel/ptrace_32.c static int genregs_get(struct task_struct *target, target 141 arch/sh/kernel/ptrace_32.c const struct pt_regs *regs = task_pt_regs(target); target 160 arch/sh/kernel/ptrace_32.c static int genregs_set(struct task_struct *target, target 165 arch/sh/kernel/ptrace_32.c struct pt_regs *regs = task_pt_regs(target); target 184 arch/sh/kernel/ptrace_32.c int fpregs_get(struct task_struct *target, target 191 arch/sh/kernel/ptrace_32.c ret = init_fpu(target); target 197 arch/sh/kernel/ptrace_32.c &target->thread.xstate->hardfpu, 0, -1); target 200 arch/sh/kernel/ptrace_32.c &target->thread.xstate->softfpu, 0, -1); target 203 arch/sh/kernel/ptrace_32.c static int fpregs_set(struct task_struct *target, target 210 arch/sh/kernel/ptrace_32.c ret = init_fpu(target); target 214 arch/sh/kernel/ptrace_32.c set_stopped_child_used_math(target); target 218 arch/sh/kernel/ptrace_32.c &target->thread.xstate->hardfpu, 0, -1); target 221 arch/sh/kernel/ptrace_32.c &target->thread.xstate->softfpu, 0, -1); target 224 arch/sh/kernel/ptrace_32.c static int fpregs_active(struct task_struct *target, target 227 arch/sh/kernel/ptrace_32.c return tsk_used_math(target) ? regset->n : 0; target 232 arch/sh/kernel/ptrace_32.c static int dspregs_get(struct task_struct *target, target 238 arch/sh/kernel/ptrace_32.c (struct pt_dspregs *)&target->thread.dsp_status.dsp_regs; target 250 arch/sh/kernel/ptrace_32.c static int dspregs_set(struct task_struct *target, target 256 arch/sh/kernel/ptrace_32.c (struct pt_dspregs *)&target->thread.dsp_status.dsp_regs; target 268 arch/sh/kernel/ptrace_32.c static int dspregs_active(struct task_struct *target, target 271 arch/sh/kernel/ptrace_32.c struct pt_regs *regs = task_pt_regs(target); target 146 arch/sh/kernel/ptrace_64.c static int genregs_get(struct task_struct *target, target 151 arch/sh/kernel/ptrace_64.c const struct pt_regs *regs = task_pt_regs(target); target 179 arch/sh/kernel/ptrace_64.c static int genregs_set(struct task_struct *target, target 184 arch/sh/kernel/ptrace_64.c struct pt_regs *regs = task_pt_regs(target); target 214 arch/sh/kernel/ptrace_64.c int fpregs_get(struct task_struct *target, target 221 arch/sh/kernel/ptrace_64.c ret = init_fpu(target); target 226 arch/sh/kernel/ptrace_64.c &target->thread.xstate->hardfpu, 0, -1); target 229 arch/sh/kernel/ptrace_64.c static int fpregs_set(struct task_struct *target, target 236 arch/sh/kernel/ptrace_64.c ret = init_fpu(target); target 240 arch/sh/kernel/ptrace_64.c set_stopped_child_used_math(target); target 243 arch/sh/kernel/ptrace_64.c &target->thread.xstate->hardfpu, 0, -1); target 246 arch/sh/kernel/ptrace_64.c static int fpregs_active(struct task_struct *target, target 249 arch/sh/kernel/ptrace_64.c return tsk_used_math(target) ? regset->n : 0; target 47 arch/sparc/include/asm/jump_label.h jump_label_t target; target 19 arch/sparc/kernel/jump_label.c s32 off = (s32)entry->target - (s32)entry->code; target 325 arch/sparc/kernel/mdesc.c u64 target; target 327 arch/sparc/kernel/mdesc.c target = mdesc_arc_target(hp, a); target 328 arch/sparc/kernel/mdesc.c id = mdesc_get_property(hp, target, target 851 arch/sparc/kernel/mdesc.c u64 target = mdesc_arc_target(hp, a); target 852 arch/sparc/kernel/mdesc.c const char *name = mdesc_node_name(hp, target); target 855 arch/sparc/kernel/mdesc.c fill_in_one_cache(c, hp, target); target 49 arch/sparc/kernel/ptrace_32.c static int genregs32_get(struct task_struct *target, target 54 arch/sparc/kernel/ptrace_32.c const struct pt_regs *regs = target->thread.kregs; target 60 arch/sparc/kernel/ptrace_32.c if (target == current) target 127 arch/sparc/kernel/ptrace_32.c static int genregs32_set(struct task_struct *target, target 132 arch/sparc/kernel/ptrace_32.c struct pt_regs *regs = target->thread.kregs; target 138 arch/sparc/kernel/ptrace_32.c if (target == current) target 211 arch/sparc/kernel/ptrace_32.c static int fpregs32_get(struct task_struct *target, target 216 arch/sparc/kernel/ptrace_32.c const unsigned long *fpregs = target->thread.float_regs; target 220 arch/sparc/kernel/ptrace_32.c if (target == current) target 234 arch/sparc/kernel/ptrace_32.c &target->thread.fsr, target 255 arch/sparc/kernel/ptrace_32.c static int fpregs32_set(struct task_struct *target, target 260 arch/sparc/kernel/ptrace_32.c unsigned long *fpregs = target->thread.float_regs; target 264 arch/sparc/kernel/ptrace_32.c if (target == current) target 276 arch/sparc/kernel/ptrace_32.c &target->thread.fsr, target 162 arch/sparc/kernel/ptrace_64.c static int get_from_target(struct task_struct *target, unsigned long uaddr, target 165 arch/sparc/kernel/ptrace_64.c if (target == current) { target 169 arch/sparc/kernel/ptrace_64.c int len2 = access_process_vm(target, uaddr, kbuf, len, target 177 arch/sparc/kernel/ptrace_64.c static int set_to_target(struct task_struct *target, unsigned long uaddr, target 180 arch/sparc/kernel/ptrace_64.c if (target == current) { target 184 arch/sparc/kernel/ptrace_64.c int len2 = access_process_vm(target, uaddr, kbuf, len, target 192 arch/sparc/kernel/ptrace_64.c static int regwindow64_get(struct task_struct *target, target 202 arch/sparc/kernel/ptrace_64.c if (get_from_target(target, rw_addr, &win32, sizeof(win32))) target 210 arch/sparc/kernel/ptrace_64.c if (get_from_target(target, rw_addr, wbuf, sizeof(*wbuf))) target 217 arch/sparc/kernel/ptrace_64.c static int regwindow64_set(struct task_struct *target, target 232 arch/sparc/kernel/ptrace_64.c if (set_to_target(target, rw_addr, &win32, sizeof(win32))) target 236 arch/sparc/kernel/ptrace_64.c if (set_to_target(target, rw_addr, wbuf, sizeof(*wbuf))) target 248 arch/sparc/kernel/ptrace_64.c static int genregs64_get(struct task_struct *target, target 253 arch/sparc/kernel/ptrace_64.c const struct pt_regs *regs = task_pt_regs(target); target 256 arch/sparc/kernel/ptrace_64.c if (target == current) target 265 arch/sparc/kernel/ptrace_64.c if (regwindow64_get(target, regs, &window)) target 298 arch/sparc/kernel/ptrace_64.c static int genregs64_set(struct task_struct *target, target 303 arch/sparc/kernel/ptrace_64.c struct pt_regs *regs = task_pt_regs(target); target 306 arch/sparc/kernel/ptrace_64.c if (target == current) target 315 arch/sparc/kernel/ptrace_64.c if (regwindow64_get(target, regs, &window)) target 324 arch/sparc/kernel/ptrace_64.c regwindow64_set(target, regs, &window)) target 372 arch/sparc/kernel/ptrace_64.c static int fpregs64_get(struct task_struct *target, target 377 arch/sparc/kernel/ptrace_64.c const unsigned long *fpregs = task_thread_info(target)->fpregs; target 381 arch/sparc/kernel/ptrace_64.c if (target == current) target 384 arch/sparc/kernel/ptrace_64.c fprs = task_thread_info(target)->fpsaved[0]; target 410 arch/sparc/kernel/ptrace_64.c fsr = task_thread_info(target)->xfsr[0]; target 411 arch/sparc/kernel/ptrace_64.c gsr = task_thread_info(target)->gsr[0]; target 439 arch/sparc/kernel/ptrace_64.c static int fpregs64_set(struct task_struct *target, target 444 arch/sparc/kernel/ptrace_64.c unsigned long *fpregs = task_thread_info(target)->fpregs; target 448 arch/sparc/kernel/ptrace_64.c if (target == current) target 456 arch/sparc/kernel/ptrace_64.c task_thread_info(target)->xfsr, target 461 arch/sparc/kernel/ptrace_64.c task_thread_info(target)->gsr, target 465 arch/sparc/kernel/ptrace_64.c fprs = task_thread_info(target)->fpsaved[0]; target 474 arch/sparc/kernel/ptrace_64.c task_thread_info(target)->fpsaved[0] = fprs; target 516 arch/sparc/kernel/ptrace_64.c static int genregs32_get(struct task_struct *target, target 521 arch/sparc/kernel/ptrace_64.c const struct pt_regs *regs = task_pt_regs(target); target 527 arch/sparc/kernel/ptrace_64.c if (target == current) target 539 arch/sparc/kernel/ptrace_64.c if (target == current) { target 546 arch/sparc/kernel/ptrace_64.c if (access_process_vm(target, target 565 arch/sparc/kernel/ptrace_64.c if (target == current) { target 573 arch/sparc/kernel/ptrace_64.c if (access_process_vm(target, target 580 arch/sparc/kernel/ptrace_64.c if (access_process_vm(target, target 628 arch/sparc/kernel/ptrace_64.c static int genregs32_set(struct task_struct *target, target 633 arch/sparc/kernel/ptrace_64.c struct pt_regs *regs = task_pt_regs(target); target 639 arch/sparc/kernel/ptrace_64.c if (target == current) target 651 arch/sparc/kernel/ptrace_64.c if (target == current) { target 658 arch/sparc/kernel/ptrace_64.c if (access_process_vm(target, target 679 arch/sparc/kernel/ptrace_64.c if (target == current) { target 687 arch/sparc/kernel/ptrace_64.c if (access_process_vm(target, target 694 arch/sparc/kernel/ptrace_64.c if (access_process_vm(target, target 750 arch/sparc/kernel/ptrace_64.c static int fpregs32_get(struct task_struct *target, target 755 arch/sparc/kernel/ptrace_64.c const unsigned long *fpregs = task_thread_info(target)->fpregs; target 761 arch/sparc/kernel/ptrace_64.c if (target == current) target 764 arch/sparc/kernel/ptrace_64.c fprs = task_thread_info(target)->fpsaved[0]; target 766 arch/sparc/kernel/ptrace_64.c fsr = task_thread_info(target)->xfsr[0]; target 804 arch/sparc/kernel/ptrace_64.c static int fpregs32_set(struct task_struct *target, target 809 arch/sparc/kernel/ptrace_64.c unsigned long *fpregs = task_thread_info(target)->fpregs; target 813 arch/sparc/kernel/ptrace_64.c if (target == current) target 816 arch/sparc/kernel/ptrace_64.c fprs = task_thread_info(target)->fpsaved[0]; target 834 arch/sparc/kernel/ptrace_64.c val = task_thread_info(target)->xfsr[0]; target 837 arch/sparc/kernel/ptrace_64.c task_thread_info(target)->xfsr[0] = val; target 842 arch/sparc/kernel/ptrace_64.c task_thread_info(target)->fpsaved[0] = fprs; target 401 arch/sparc/kernel/smp_64.c u64 result, target; target 411 arch/sparc/kernel/smp_64.c target = (cpu << 14) | 0x70; target 435 arch/sparc/kernel/smp_64.c "r" (data0), "r" (data1), "r" (data2), "r" (target), target 526 arch/sparc/kernel/smp_64.c u64 target, nr; target 532 arch/sparc/kernel/smp_64.c target = (nr << 14) | 0x70; target 536 arch/sparc/kernel/smp_64.c target |= (nack_busy_id << 24); target 544 arch/sparc/kernel/smp_64.c : "r" (target), "i" (ASI_INTR_W)); target 213 arch/sparc/kernel/vio.c u64 target; target 215 arch/sparc/kernel/vio.c target = mdesc_arc_target(hp, a); target 216 arch/sparc/kernel/vio.c cfg_handle = mdesc_get_property(hp, target, target 263 arch/sparc/kernel/vio.c u64 target; target 265 arch/sparc/kernel/vio.c target = mdesc_arc_target(hp, a); target 267 arch/sparc/kernel/vio.c irq = mdesc_get_property(hp, target, "tx-ino", NULL); target 271 arch/sparc/kernel/vio.c irq = mdesc_get_property(hp, target, "rx-ino", NULL); target 275 arch/sparc/kernel/vio.c chan_id = mdesc_get_property(hp, target, "id", NULL); target 479 arch/sparc/kernel/vio.c u64 target = mdesc_arc_target(hp, a); target 480 arch/sparc/kernel/vio.c const char *name = mdesc_node_name(hp, target); target 1143 arch/sparc/mm/init_64.c u64 target = mdesc_arc_target(md, arc); target 1146 arch/sparc/mm/init_64.c val = mdesc_get_property(md, target, target 1161 arch/sparc/mm/init_64.c u64 target = mdesc_arc_target(md, arc); target 1162 arch/sparc/mm/init_64.c const char *name = mdesc_node_name(md, target); target 1168 arch/sparc/mm/init_64.c val = mdesc_get_property(md, target, "latency", NULL); target 1173 arch/sparc/mm/init_64.c candidate = target; target 1350 arch/sparc/mm/init_64.c u64 target = mdesc_arc_target(md, arc); target 1351 arch/sparc/mm/init_64.c const char *name = mdesc_node_name(md, target); target 1356 arch/sparc/mm/init_64.c id = mdesc_get_property(md, target, "id", NULL); target 1405 arch/sparc/mm/init_64.c u64 target = mdesc_arc_target(md, arc); target 1406 arch/sparc/mm/init_64.c struct mdesc_mlgroup *m = find_mlgroup(target); target 1425 arch/sparc/mm/init_64.c u64 target = mdesc_arc_target(md, arc); target 1426 arch/sparc/mm/init_64.c struct mdesc_mlgroup *m = find_mlgroup(target); target 181 arch/um/drivers/mconsole_user.c struct sockaddr_un target; target 199 arch/um/drivers/mconsole_user.c target.sun_family = AF_UNIX; target 200 arch/um/drivers/mconsole_user.c strcpy(target.sun_path, sock_name); target 211 arch/um/drivers/mconsole_user.c n = sendto(notify_sock, &packet, len, 0, (struct sockaddr *) &target, target 212 arch/um/drivers/mconsole_user.c sizeof(target)); target 224 arch/x86/events/amd/power.c int target; target 234 arch/x86/events/amd/power.c target = cpumask_any_but(topology_sibling_cpumask(cpu), cpu); target 235 arch/x86/events/amd/power.c if (target < nr_cpumask_bits) { target 236 arch/x86/events/amd/power.c cpumask_set_cpu(target, &cpu_mask); target 237 arch/x86/events/amd/power.c perf_pmu_migrate_context(&pmu_class, cpu, target); target 244 arch/x86/events/amd/power.c int target; target 255 arch/x86/events/amd/power.c target = cpumask_any_but(topology_sibling_cpumask(cpu), cpu); target 256 arch/x86/events/amd/power.c if (target >= nr_cpumask_bits) target 403 arch/x86/events/intel/cstate.c unsigned int target; target 408 arch/x86/events/intel/cstate.c target = cpumask_any_but(topology_sibling_cpumask(cpu), cpu); target 410 arch/x86/events/intel/cstate.c if (target < nr_cpu_ids) { target 411 arch/x86/events/intel/cstate.c cpumask_set_cpu(target, &cstate_core_cpu_mask); target 412 arch/x86/events/intel/cstate.c perf_pmu_migrate_context(&cstate_core_pmu, cpu, target); target 419 arch/x86/events/intel/cstate.c target = cpumask_any_but(topology_die_cpumask(cpu), cpu); target 421 arch/x86/events/intel/cstate.c if (target < nr_cpu_ids) { target 422 arch/x86/events/intel/cstate.c cpumask_set_cpu(target, &cstate_pkg_cpu_mask); target 423 arch/x86/events/intel/cstate.c perf_pmu_migrate_context(&cstate_pkg_pmu, cpu, target); target 431 arch/x86/events/intel/cstate.c unsigned int target; target 437 arch/x86/events/intel/cstate.c target = cpumask_any_and(&cstate_core_cpu_mask, target 440 arch/x86/events/intel/cstate.c if (has_cstate_core && target >= nr_cpu_ids) target 447 arch/x86/events/intel/cstate.c target = cpumask_any_and(&cstate_pkg_cpu_mask, target 449 arch/x86/events/intel/cstate.c if (has_cstate_pkg && target >= nr_cpu_ids) target 527 arch/x86/events/intel/rapl.c int target; target 535 arch/x86/events/intel/rapl.c target = cpumask_any_but(topology_die_cpumask(cpu), cpu); target 538 arch/x86/events/intel/rapl.c if (target < nr_cpu_ids) { target 539 arch/x86/events/intel/rapl.c cpumask_set_cpu(target, &rapl_cpu_mask); target 540 arch/x86/events/intel/rapl.c pmu->cpu = target; target 541 arch/x86/events/intel/rapl.c perf_pmu_migrate_context(pmu->pmu, cpu, target); target 549 arch/x86/events/intel/rapl.c int target; target 569 arch/x86/events/intel/rapl.c target = cpumask_any_and(&rapl_cpu_mask, topology_die_cpumask(cpu)); target 570 arch/x86/events/intel/rapl.c if (target < nr_cpu_ids) target 1214 arch/x86/events/intel/uncore.c int die, target; target 1220 arch/x86/events/intel/uncore.c target = cpumask_any_but(topology_die_cpumask(cpu), cpu); target 1223 arch/x86/events/intel/uncore.c if (target < nr_cpu_ids) target 1224 arch/x86/events/intel/uncore.c cpumask_set_cpu(target, &uncore_cpu_mask); target 1226 arch/x86/events/intel/uncore.c target = -1; target 1228 arch/x86/events/intel/uncore.c uncore_change_context(uncore_msr_uncores, cpu, target); target 1229 arch/x86/events/intel/uncore.c uncore_change_context(uncore_mmio_uncores, cpu, target); target 1230 arch/x86/events/intel/uncore.c uncore_change_context(uncore_pci_uncores, cpu, target); target 1305 arch/x86/events/intel/uncore.c int die, target, msr_ret, mmio_ret; target 1317 arch/x86/events/intel/uncore.c target = cpumask_any_and(&uncore_cpu_mask, topology_die_cpumask(cpu)); target 1318 arch/x86/events/intel/uncore.c if (target < nr_cpu_ids) target 13 arch/x86/include/asm/intel_pconfig.h int pconfig_target_supported(enum pconfig_target target); target 58 arch/x86/include/asm/jump_label.h .macro STATIC_JUMP_IF_TRUE target, key, def target 75 arch/x86/include/asm/jump_label.h .macro STATIC_JUMP_IF_FALSE target, key, def target 53 arch/x86/include/asm/mcsafe_test.h .macro MCSAFE_TEST_SRC reg count target target 59 arch/x86/include/asm/mcsafe_test.h .macro MCSAFE_TEST_DST reg count target target 68 arch/x86/include/asm/mcsafe_test.h .macro MCSAFE_TEST_SRC reg count target target 71 arch/x86/include/asm/mcsafe_test.h .macro MCSAFE_TEST_DST reg count target target 84 arch/x86/kernel/cpu/hypervisor.c static void __init copy_array(const void *src, void *target, unsigned int size) target 88 arch/x86/kernel/cpu/hypervisor.c const void **to = (const void **)target; target 27 arch/x86/kernel/cpu/intel_pconfig.c int pconfig_target_supported(enum pconfig_target target) target 35 arch/x86/kernel/cpu/intel_pconfig.c if (WARN_ON_ONCE(target >= 64)) target 37 arch/x86/kernel/cpu/intel_pconfig.c return targets_supported & (1ULL << target); target 16 arch/x86/kernel/fpu/regset.c int regset_fpregs_active(struct task_struct *target, const struct user_regset *regset) target 21 arch/x86/kernel/fpu/regset.c int regset_xregset_fpregs_active(struct task_struct *target, const struct user_regset *regset) target 29 arch/x86/kernel/fpu/regset.c int xfpregs_get(struct task_struct *target, const struct user_regset *regset, target 33 arch/x86/kernel/fpu/regset.c struct fpu *fpu = &target->thread.fpu; target 45 arch/x86/kernel/fpu/regset.c int xfpregs_set(struct task_struct *target, const struct user_regset *regset, target 49 arch/x86/kernel/fpu/regset.c struct fpu *fpu = &target->thread.fpu; target 76 arch/x86/kernel/fpu/regset.c int xstateregs_get(struct task_struct *target, const struct user_regset *regset, target 80 arch/x86/kernel/fpu/regset.c struct fpu *fpu = &target->thread.fpu; target 113 arch/x86/kernel/fpu/regset.c int xstateregs_set(struct task_struct *target, const struct user_regset *regset, target 117 arch/x86/kernel/fpu/regset.c struct fpu *fpu = &target->thread.fpu; target 295 arch/x86/kernel/fpu/regset.c int fpregs_get(struct task_struct *target, const struct user_regset *regset, target 299 arch/x86/kernel/fpu/regset.c struct fpu *fpu = &target->thread.fpu; target 305 arch/x86/kernel/fpu/regset.c return fpregs_soft_get(target, regset, pos, count, kbuf, ubuf); target 315 arch/x86/kernel/fpu/regset.c convert_from_fxsr(kbuf, target); target 319 arch/x86/kernel/fpu/regset.c convert_from_fxsr(&env, target); target 324 arch/x86/kernel/fpu/regset.c int fpregs_set(struct task_struct *target, const struct user_regset *regset, target 328 arch/x86/kernel/fpu/regset.c struct fpu *fpu = &target->thread.fpu; target 336 arch/x86/kernel/fpu/regset.c return fpregs_soft_set(target, regset, pos, count, kbuf, ubuf); target 344 arch/x86/kernel/fpu/regset.c convert_from_fxsr(&env, target); target 348 arch/x86/kernel/fpu/regset.c convert_to_fxsr(&target->thread.fpu.state.fxsave, &env); target 208 arch/x86/kernel/kprobes/opt.c unsigned long target = 0; target 227 arch/x86/kernel/kprobes/opt.c target = (unsigned long)insn->next_byte + insn->immediate.value; target 229 arch/x86/kernel/kprobes/opt.c return (start <= target && target <= start + len); target 61 arch/x86/kernel/paravirt.c static unsigned paravirt_patch_call(void *insn_buff, const void *target, target 66 arch/x86/kernel/paravirt.c unsigned long delta = (unsigned long)target - (addr+call_len); target 88 arch/x86/kernel/paravirt.c static unsigned paravirt_patch_jmp(void *insn_buff, const void *target, target 92 arch/x86/kernel/paravirt.c unsigned long delta = (unsigned long)target - (addr+5); target 508 arch/x86/kernel/pci-calgary_64.c unsigned long target = ((unsigned long)bar) | offset; target 509 arch/x86/kernel/pci-calgary_64.c return (void __iomem*)target; target 533 arch/x86/kernel/pci-calgary_64.c void __iomem *target; target 536 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, phb_offset(tbl->it_busno) | PHB_AER_OFFSET); target 537 arch/x86/kernel/pci-calgary_64.c aer = readl(target); target 538 arch/x86/kernel/pci-calgary_64.c writel(0, target); target 541 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, phb_offset(tbl->it_busno) | PHB_PLSSR_OFFSET); target 542 arch/x86/kernel/pci-calgary_64.c val = readl(target); target 545 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, split_queue_offset(tbl->it_busno)); target 547 arch/x86/kernel/pci-calgary_64.c val = readq(target); target 554 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, tar_offset(tbl->it_busno)); target 555 arch/x86/kernel/pci-calgary_64.c writeq(tbl->tar_val, target); target 558 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, phb_offset(tbl->it_busno) | PHB_AER_OFFSET); target 559 arch/x86/kernel/pci-calgary_64.c writel(aer, target); target 560 arch/x86/kernel/pci-calgary_64.c (void)readl(target); /* flush */ target 566 arch/x86/kernel/pci-calgary_64.c void __iomem *target; target 578 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, phb_offset(bus) | PHB_PAGE_MIG_CTRL); target 579 arch/x86/kernel/pci-calgary_64.c val = be32_to_cpu(readl(target)); target 580 arch/x86/kernel/pci-calgary_64.c printk(KERN_DEBUG "1a. read 0x%x [LE] from %p\n", val, target); target 582 arch/x86/kernel/pci-calgary_64.c printk(KERN_DEBUG "1b. writing 0x%x [LE] to %p\n", val, target); target 583 arch/x86/kernel/pci-calgary_64.c writel(cpu_to_be32(val), target); target 587 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, split_queue_offset(bus)); target 589 arch/x86/kernel/pci-calgary_64.c val64 = readq(target); target 596 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, phb_offset(bus) | PHB_PAGE_MIG_DEBUG); target 597 arch/x86/kernel/pci-calgary_64.c val = be32_to_cpu(readl(target)); target 598 arch/x86/kernel/pci-calgary_64.c printk(KERN_DEBUG "3. read 0x%x [LE] from %p\n", val, target); target 611 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, phb_offset(bus) | PHB_PAGE_MIG_CTRL); target 612 arch/x86/kernel/pci-calgary_64.c printk(KERN_DEBUG "5a. slamming into HardStop by reading %p\n", target); target 613 arch/x86/kernel/pci-calgary_64.c val = be32_to_cpu(readl(target)); target 614 arch/x86/kernel/pci-calgary_64.c printk(KERN_DEBUG "5b. read 0x%x [LE] from %p\n", val, target); target 615 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, phb_offset(bus) | PHB_PAGE_MIG_DEBUG); target 616 arch/x86/kernel/pci-calgary_64.c val = be32_to_cpu(readl(target)); target 617 arch/x86/kernel/pci-calgary_64.c printk(KERN_DEBUG "5c. read 0x%x [LE] from %p (debug)\n", val, target); target 621 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, tar_offset(bus)); target 622 arch/x86/kernel/pci-calgary_64.c writeq(tbl->tar_val, target); target 626 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, phb_offset(bus) | PHB_PAGE_MIG_CTRL); target 627 arch/x86/kernel/pci-calgary_64.c val = be32_to_cpu(readl(target)); target 628 arch/x86/kernel/pci-calgary_64.c printk(KERN_DEBUG "7b. read 0x%x [LE] from %p\n", val, target); target 632 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, phb_offset(bus) | PHB_PAGE_MIG_CTRL); target 634 arch/x86/kernel/pci-calgary_64.c printk(KERN_DEBUG "8b. writing 0x%x [LE] to %p\n", val, target); target 635 arch/x86/kernel/pci-calgary_64.c writel(cpu_to_be32(val), target); target 636 arch/x86/kernel/pci-calgary_64.c val = be32_to_cpu(readl(target)); target 637 arch/x86/kernel/pci-calgary_64.c printk(KERN_DEBUG "8c. read 0x%x [LE] from %p\n", val, target); target 654 arch/x86/kernel/pci-calgary_64.c void __iomem *target; target 662 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, phb_offset(busnum) | PHB_MEM_1_LOW); target 663 arch/x86/kernel/pci-calgary_64.c low = be32_to_cpu(readl(target)); target 664 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, phb_offset(busnum) | PHB_MEM_1_HIGH); target 665 arch/x86/kernel/pci-calgary_64.c high = be32_to_cpu(readl(target)); target 666 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, phb_offset(busnum) | PHB_MEM_1_SIZE); target 667 arch/x86/kernel/pci-calgary_64.c sizelow = be32_to_cpu(readl(target)); target 677 arch/x86/kernel/pci-calgary_64.c void __iomem *target; target 686 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, phb_offset(busnum) | PHB_CONFIG_RW_OFFSET); target 687 arch/x86/kernel/pci-calgary_64.c val32 = be32_to_cpu(readl(target)); target 691 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, phb_offset(busnum) | PHB_MEM_2_LOW); target 692 arch/x86/kernel/pci-calgary_64.c low = be32_to_cpu(readl(target)); target 693 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, phb_offset(busnum) | PHB_MEM_2_HIGH); target 694 arch/x86/kernel/pci-calgary_64.c high = be32_to_cpu(readl(target)); target 695 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, phb_offset(busnum) | PHB_MEM_2_SIZE_LOW); target 696 arch/x86/kernel/pci-calgary_64.c sizelow = be32_to_cpu(readl(target)); target 697 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, phb_offset(busnum) | PHB_MEM_2_SIZE_HIGH); target 698 arch/x86/kernel/pci-calgary_64.c sizehigh = be32_to_cpu(readl(target)); target 739 arch/x86/kernel/pci-calgary_64.c void __iomem *target; target 766 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, tar_offset(dev->bus->number)); target 767 arch/x86/kernel/pci-calgary_64.c val64 = be64_to_cpu(readq(target)); target 780 arch/x86/kernel/pci-calgary_64.c writeq(tbl->tar_val, target); target 781 arch/x86/kernel/pci-calgary_64.c readq(target); /* flush */ target 790 arch/x86/kernel/pci-calgary_64.c void __iomem *target; target 793 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(tbl->bbar, tar_offset(dev->bus->number)); target 794 arch/x86/kernel/pci-calgary_64.c val64 = be64_to_cpu(readq(target)); target 796 arch/x86/kernel/pci-calgary_64.c writeq(cpu_to_be64(val64), target); target 797 arch/x86/kernel/pci-calgary_64.c readq(target); /* flush */ target 814 arch/x86/kernel/pci-calgary_64.c void __iomem *target; target 817 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, phb_offset(tbl->it_busno) | PHB_CSR_OFFSET); target 818 arch/x86/kernel/pci-calgary_64.c csr = be32_to_cpu(readl(target)); target 820 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, phb_offset(tbl->it_busno) | PHB_PLSSR_OFFSET); target 821 arch/x86/kernel/pci-calgary_64.c plssr = be32_to_cpu(readl(target)); target 832 arch/x86/kernel/pci-calgary_64.c void __iomem *target; target 839 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, phboff | PHB_CSR_OFFSET); target 840 arch/x86/kernel/pci-calgary_64.c csr = be32_to_cpu(readl(target)); target 842 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, phboff | PHB_PLSSR_OFFSET); target 843 arch/x86/kernel/pci-calgary_64.c plssr = be32_to_cpu(readl(target)); target 845 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, phboff | 0x290); target 846 arch/x86/kernel/pci-calgary_64.c csmr = be32_to_cpu(readl(target)); target 848 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, phboff | 0x800); target 849 arch/x86/kernel/pci-calgary_64.c mck = be32_to_cpu(readl(target)); target 861 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, phboff | erroff); target 862 arch/x86/kernel/pci-calgary_64.c errregs[i] = be32_to_cpu(readl(target)); target 868 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, phboff | PHB_ROOT_COMPLEX_STATUS); target 869 arch/x86/kernel/pci-calgary_64.c rcstat = be32_to_cpu(readl(target)); target 879 arch/x86/kernel/pci-calgary_64.c void __iomem *target; target 881 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, phb_offset(tbl->it_busno) | PHB_CSR_OFFSET); target 882 arch/x86/kernel/pci-calgary_64.c val32 = be32_to_cpu(readl(target)); target 889 arch/x86/kernel/pci-calgary_64.c writel(0, target); target 892 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, phb_offset(tbl->it_busno) | target 894 arch/x86/kernel/pci-calgary_64.c val32 = be32_to_cpu(readl(target)); target 896 arch/x86/kernel/pci-calgary_64.c writel(cpu_to_be32(val32), target); target 897 arch/x86/kernel/pci-calgary_64.c readl(target); /* flush */ target 908 arch/x86/kernel/pci-calgary_64.c void __iomem *target; target 925 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, CALGARY_CONFIG_REG); target 926 arch/x86/kernel/pci-calgary_64.c val64 = be64_to_cpu(readq(target)); target 932 arch/x86/kernel/pci-calgary_64.c writeq(cpu_to_be64(val64), target); target 933 arch/x86/kernel/pci-calgary_64.c readq(target); /* flush */ target 940 arch/x86/kernel/pci-calgary_64.c void __iomem *target; target 946 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, phb_offset(busnum) | PHB_SAVIOR_L2); target 947 arch/x86/kernel/pci-calgary_64.c val = cpu_to_be32(readl(target)); target 949 arch/x86/kernel/pci-calgary_64.c writel(cpu_to_be32(val), target); target 969 arch/x86/kernel/pci-calgary_64.c void __iomem *target; target 978 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, phb_offset(busnum) | PHB_CONFIG_RW_OFFSET); target 979 arch/x86/kernel/pci-calgary_64.c val32 = be32_to_cpu(readl(target)); target 988 arch/x86/kernel/pci-calgary_64.c writel(cpu_to_be32(val32), target); target 989 arch/x86/kernel/pci-calgary_64.c readl(target); /* flush */ target 999 arch/x86/kernel/pci-calgary_64.c void __iomem *target; target 1008 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, phb_offset(busnum) | PHB_CONFIG_RW_OFFSET); target 1009 arch/x86/kernel/pci-calgary_64.c val32 = be32_to_cpu(readl(target)); target 1013 arch/x86/kernel/pci-calgary_64.c writel(cpu_to_be32(val32), target); target 1014 arch/x86/kernel/pci-calgary_64.c readl(target); /* flush */ target 1068 arch/x86/kernel/pci-calgary_64.c void __iomem *target; target 1087 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bbar, offset); target 1089 arch/x86/kernel/pci-calgary_64.c val = be32_to_cpu(readl(target)); target 1306 arch/x86/kernel/pci-calgary_64.c void __iomem *target; target 1324 arch/x86/kernel/pci-calgary_64.c target = calgary_reg(bus_info[bus].bbar, target 1326 arch/x86/kernel/pci-calgary_64.c tce_space = be64_to_cpu(readq(target)); target 422 arch/x86/kernel/ptrace.c static int genregs_get(struct task_struct *target, target 430 arch/x86/kernel/ptrace.c *k++ = getreg(target, pos); target 437 arch/x86/kernel/ptrace.c if (__put_user(getreg(target, pos), u++)) target 447 arch/x86/kernel/ptrace.c static int genregs_set(struct task_struct *target, target 456 arch/x86/kernel/ptrace.c ret = putreg(target, pos, *k++); target 467 arch/x86/kernel/ptrace.c ret = putreg(target, pos, word); target 697 arch/x86/kernel/ptrace.c static int ioperm_active(struct task_struct *target, target 700 arch/x86/kernel/ptrace.c return target->thread.io_bitmap_max / regset->size; target 703 arch/x86/kernel/ptrace.c static int ioperm_get(struct task_struct *target, target 708 arch/x86/kernel/ptrace.c if (!target->thread.io_bitmap_ptr) target 712 arch/x86/kernel/ptrace.c target->thread.io_bitmap_ptr, target 989 arch/x86/kernel/ptrace.c static int genregs32_get(struct task_struct *target, target 997 arch/x86/kernel/ptrace.c getreg32(target, pos, k++); target 1005 arch/x86/kernel/ptrace.c getreg32(target, pos, &word); target 1016 arch/x86/kernel/ptrace.c static int genregs32_set(struct task_struct *target, target 1025 arch/x86/kernel/ptrace.c ret = putreg32(target, pos, *k++); target 1036 arch/x86/kernel/ptrace.c ret = putreg32(target, pos, word); target 248 arch/x86/kernel/tls.c int regset_tls_active(struct task_struct *target, target 251 arch/x86/kernel/tls.c struct thread_struct *t = &target->thread; target 258 arch/x86/kernel/tls.c int regset_tls_get(struct task_struct *target, const struct user_regset *regset, target 272 arch/x86/kernel/tls.c tls = &target->thread.tls_array[pos]; target 292 arch/x86/kernel/tls.c int regset_tls_set(struct task_struct *target, const struct user_regset *regset, target 316 arch/x86/kernel/tls.c set_tls_desc(target, target 792 arch/x86/kvm/lapic.c struct kvm_lapic *source, struct kvm_lapic *target) target 797 arch/x86/kvm/lapic.c !ipi && dest_id == APIC_BROADCAST && apic_x2apic_mode(target)) target 806 arch/x86/kvm/lapic.c struct kvm_lapic *target = vcpu->arch.apic; target 807 arch/x86/kvm/lapic.c u32 mda = kvm_apic_mda(vcpu, dest, source, target); target 809 arch/x86/kvm/lapic.c ASSERT(target); target 813 arch/x86/kvm/lapic.c return kvm_apic_match_physical_addr(target, mda); target 815 arch/x86/kvm/lapic.c return kvm_apic_match_logical_addr(target, mda); target 817 arch/x86/kvm/lapic.c return target == source; target 821 arch/x86/kvm/lapic.c return target != source; target 7435 arch/x86/kvm/x86.c struct kvm_vcpu *target = NULL; target 7442 arch/x86/kvm/x86.c target = map->phys_map[dest_id]->vcpu; target 7446 arch/x86/kvm/x86.c if (target && READ_ONCE(target->ready)) target 7447 arch/x86/kvm/x86.c kvm_vcpu_yield_to(target); target 638 arch/x86/math-emu/fpu_entry.c int fpregs_soft_set(struct task_struct *target, target 643 arch/x86/math-emu/fpu_entry.c struct swregs_state *s387 = &target->thread.fpu.state.soft; target 690 arch/x86/math-emu/fpu_entry.c int fpregs_soft_get(struct task_struct *target, target 695 arch/x86/math-emu/fpu_entry.c struct swregs_state *s387 = &target->thread.fpu.state.soft; target 60 arch/xtensa/include/asm/jump_label.h jump_label_t target; target 39 arch/xtensa/kernel/ptrace.c static int gpr_get(struct task_struct *target, target 44 arch/xtensa/kernel/ptrace.c struct pt_regs *regs = task_pt_regs(target); target 68 arch/xtensa/kernel/ptrace.c static int gpr_set(struct task_struct *target, target 85 arch/xtensa/kernel/ptrace.c regs = task_pt_regs(target); target 117 arch/xtensa/kernel/ptrace.c static int tie_get(struct task_struct *target, target 123 arch/xtensa/kernel/ptrace.c struct pt_regs *regs = task_pt_regs(target); target 124 arch/xtensa/kernel/ptrace.c struct thread_info *ti = task_thread_info(target); target 151 arch/xtensa/kernel/ptrace.c static int tie_set(struct task_struct *target, target 157 arch/xtensa/kernel/ptrace.c struct pt_regs *regs = task_pt_regs(target); target 158 arch/xtensa/kernel/ptrace.c struct thread_info *ti = task_thread_info(target); target 60 block/badblocks.c sector_t target = s + sectors; target 66 block/badblocks.c target += (1<<bb->shift) - 1; target 67 block/badblocks.c target >>= bb->shift; target 68 block/badblocks.c sectors = target - s; target 90 block/badblocks.c if (a < target) target 106 block/badblocks.c if (BB_OFFSET(p[lo]) < target) { target 335 block/badblocks.c sector_t target = s + sectors; target 347 block/badblocks.c target >>= bb->shift; target 348 block/badblocks.c sectors = target - s; target 361 block/badblocks.c if (a < target) target 371 block/badblocks.c if ((BB_OFFSET(p[lo]) + BB_LEN(p[lo]) > target) && target 372 block/badblocks.c (BB_OFFSET(p[lo]) < target)) { target 389 block/badblocks.c p[lo] = BB_MAKE(target, end - target, ack); target 396 block/badblocks.c (BB_OFFSET(p[lo]) < target)) { target 1292 block/genhd.c int i, target; target 1298 block/genhd.c target = partno + 1; target 1299 block/genhd.c if (target < 0) target 1303 block/genhd.c if (disk_max_parts(disk) && target > disk_max_parts(disk)) target 1306 block/genhd.c if (target <= len) target 1309 block/genhd.c new_ptbl = kzalloc_node(struct_size(new_ptbl, part, target), GFP_KERNEL, target 1314 block/genhd.c new_ptbl->len = target; target 619 block/kyber-iosched.c u64 target, u64 latency) target 625 block/kyber-iosched.c divisor = max_t(u64, target >> KYBER_LATENCY_SHIFT, 1); target 640 block/kyber-iosched.c u64 target; target 647 block/kyber-iosched.c target = kqd->latency_targets[sched_domain]; target 649 block/kyber-iosched.c target, now - rq->start_time_ns); target 650 block/kyber-iosched.c add_latency_sample(cpu_latency, sched_domain, KYBER_IO_LATENCY, target, target 156 drivers/acpi/acpica/acdebug.h void acpi_db_decode_and_display_object(char *target, char *output_type); target 230 drivers/acpi/acpica/acinterp.h union acpi_operand_object *target, target 325 drivers/acpi/acpica/acmacros.h #define ACPI_INSERT_BITS(target, mask, source) \ target 326 drivers/acpi/acpica/acmacros.h target = ((target & (~(mask))) | (source & mask)) target 135 drivers/acpi/acpica/acstruct.h union acpi_operand_object *target; target 143 drivers/acpi/acpica/acstruct.h union acpi_operand_object *target; target 24 drivers/acpi/acpica/dbdisply.c static void *acpi_db_get_pointer(void *target); target 86 drivers/acpi/acpica/dbdisply.c static void *acpi_db_get_pointer(void *target) target 91 drivers/acpi/acpica/dbdisply.c address = strtoul(target, NULL, 16); target 139 drivers/acpi/acpica/dbdisply.c void acpi_db_decode_and_display_object(char *target, char *output_type) target 150 drivers/acpi/acpica/dbdisply.c if (!target) { target 172 drivers/acpi/acpica/dbdisply.c if ((target[0] >= 0x30) && (target[0] <= 0x39)) { target 173 drivers/acpi/acpica/dbdisply.c obj_ptr = acpi_db_get_pointer(target); target 263 drivers/acpi/acpica/dbdisply.c node = acpi_db_local_ns_lookup(target); target 269 drivers/acpi/acpica/exconfig.c union acpi_operand_object *target, target 447 drivers/acpi/acpica/exconfig.c status = acpi_ex_store(ddb_handle, target, walk_state); target 333 drivers/acpi/acpica/exdump.c u8 *target; target 357 drivers/acpi/acpica/exdump.c target = ACPI_ADD_PTR(u8, obj_desc, info->offset); target 374 drivers/acpi/acpica/exdump.c acpi_os_printf("%20s : %2.2X\n", name, *target); target 380 drivers/acpi/acpica/exdump.c ACPI_GET16(target)); target 386 drivers/acpi/acpica/exdump.c ACPI_GET32(target)); target 392 drivers/acpi/acpica/exdump.c ACPI_FORMAT_UINT64(ACPI_GET64(target))); target 399 drivers/acpi/acpica/exdump.c *ACPI_CAST_PTR(void *, target)); target 439 drivers/acpi/acpica/exdump.c start = *ACPI_CAST_PTR(void *, target); target 475 drivers/acpi/acpica/exdump.c start = *ACPI_CAST_PTR(void *, target); target 511 drivers/acpi/acpica/exdump.c start = *ACPI_CAST_PTR(void *, target); target 547 drivers/acpi/acpica/exdump.c target); target 177 drivers/acpi/acpica/rsdump.c u8 *target = NULL; target 187 drivers/acpi/acpica/rsdump.c previous_target = target; target 188 drivers/acpi/acpica/rsdump.c target = ACPI_ADD_PTR(u8, resource, table->offset); target 211 drivers/acpi/acpica/rsdump.c acpi_rs_out_string(name, ACPI_CAST_PTR(char, target)); target 220 drivers/acpi/acpica/rsdump.c table->pointer[*target]); target 222 drivers/acpi/acpica/rsdump.c acpi_rs_out_integer8(name, ACPI_GET8(target)); target 228 drivers/acpi/acpica/rsdump.c acpi_rs_out_integer16(name, ACPI_GET16(target)); target 233 drivers/acpi/acpica/rsdump.c acpi_rs_out_integer32(name, ACPI_GET32(target)); target 238 drivers/acpi/acpica/rsdump.c acpi_rs_out_integer64(name, ACPI_GET64(target)); target 246 drivers/acpi/acpica/rsdump.c table->pointer[*target & 0x01]); target 252 drivers/acpi/acpica/rsdump.c table->pointer[*target & 0x03]); target 258 drivers/acpi/acpica/rsdump.c table->pointer[*target & 0x07]); target 269 drivers/acpi/acpica/rsdump.c target); target 283 drivers/acpi/acpica/rsdump.c (u8, target))); target 295 drivers/acpi/acpica/rsdump.c target); target 307 drivers/acpi/acpica/rsdump.c target)); target 319 drivers/acpi/acpica/rsdump.c (u16, target))); target 329 drivers/acpi/acpica/rsdump.c target)); target 339 drivers/acpi/acpica/rsdump.c target)); target 349 drivers/acpi/acpica/rsdump.c target)); target 359 drivers/acpi/acpica/rsdump.c target)); target 42 drivers/acpi/acpica/rsmisc.c char *target; target 143 drivers/acpi/acpica/rsmisc.c target = ACPI_ADD_PTR(void, aml, info->value); target 144 drivers/acpi/acpica/rsmisc.c item_count = ACPI_GET16(target) - ACPI_GET16(source); target 165 drivers/acpi/acpica/rsmisc.c target = ACPI_ADD_PTR(void, aml, (info->value + 2)); target 166 drivers/acpi/acpica/rsmisc.c if (ACPI_GET16(target)) { target 170 drivers/acpi/acpica/rsmisc.c target = ACPI_ADD_PTR(void, aml, info->value); target 172 drivers/acpi/acpica/rsmisc.c ACPI_GET16(target) - ACPI_GET16(source); target 227 drivers/acpi/acpica/rsmisc.c target = (char *)ACPI_ADD_PTR(void, resource, target 230 drivers/acpi/acpica/rsmisc.c *(u16 **)destination = ACPI_CAST_PTR(u16, target); target 235 drivers/acpi/acpica/rsmisc.c acpi_rs_move_data(target, source, item_count, target 243 drivers/acpi/acpica/rsmisc.c target = (char *)ACPI_ADD_PTR(void, resource, target 246 drivers/acpi/acpica/rsmisc.c *(u8 **)destination = ACPI_CAST_PTR(u8, target); target 251 drivers/acpi/acpica/rsmisc.c acpi_rs_move_data(target, source, item_count, target 259 drivers/acpi/acpica/rsmisc.c target = (char *)ACPI_ADD_PTR(void, resource, target 262 drivers/acpi/acpica/rsmisc.c *(u8 **)destination = ACPI_CAST_PTR(u8, target); target 267 drivers/acpi/acpica/rsmisc.c acpi_rs_move_data(target, source, item_count, target 275 drivers/acpi/acpica/rsmisc.c target = (char *)ACPI_ADD_PTR(void, resource, target 278 drivers/acpi/acpica/rsmisc.c *(u8 **)destination = ACPI_CAST_PTR(u8, target); target 285 drivers/acpi/acpica/rsmisc.c acpi_rs_move_data(target, source, item_count, target 296 drivers/acpi/acpica/rsmisc.c target = ACPI_ADD_PTR(char, resource, info->value); target 297 drivers/acpi/acpica/rsmisc.c memcpy(destination, source, ACPI_GET16(target)); target 325 drivers/acpi/acpica/rsmisc.c target = ACPI_ADD_PTR(char, resource, target 336 drivers/acpi/acpica/rsmisc.c target); target 350 drivers/acpi/acpica/rsmisc.c target = ACPI_ADD_PTR(char, resource, info->value); target 351 drivers/acpi/acpica/rsmisc.c ACPI_SET8(target, item_count); target 366 drivers/acpi/acpica/rsmisc.c target = ACPI_ADD_PTR(char, resource, info->value); target 367 drivers/acpi/acpica/rsmisc.c ACPI_SET8(target, item_count); target 440 drivers/acpi/acpica/rsmisc.c char *target; target 534 drivers/acpi/acpica/rsmisc.c target = ACPI_ADD_PTR(void, aml, info->value); target 535 drivers/acpi/acpica/rsmisc.c ACPI_SET16(target, aml_length); target 559 drivers/acpi/acpica/rsmisc.c target = ACPI_ADD_PTR(void, aml, info->value); target 563 drivers/acpi/acpica/rsmisc.c ACPI_SET16(target, aml_length); target 112 drivers/acpi/acpica/tbfadt.c struct acpi_generic_address *target; target 713 drivers/acpi/acpica/tbfadt.c target, source64->space_id, target 80 drivers/acpi/hmat/hmat.c struct memory_target *target; target 82 drivers/acpi/hmat/hmat.c list_for_each_entry(target, &targets, node) target 83 drivers/acpi/hmat/hmat.c if (target->memory_pxm == mem_pxm) target 84 drivers/acpi/hmat/hmat.c return target; target 109 drivers/acpi/hmat/hmat.c struct memory_target *target; target 111 drivers/acpi/hmat/hmat.c target = find_mem_target(mem_pxm); target 112 drivers/acpi/hmat/hmat.c if (target) target 115 drivers/acpi/hmat/hmat.c target = kzalloc(sizeof(*target), GFP_KERNEL); target 116 drivers/acpi/hmat/hmat.c if (!target) target 119 drivers/acpi/hmat/hmat.c target->memory_pxm = mem_pxm; target 120 drivers/acpi/hmat/hmat.c target->processor_pxm = PXM_INVAL; target 121 drivers/acpi/hmat/hmat.c list_add_tail(&target->node, &targets); target 122 drivers/acpi/hmat/hmat.c INIT_LIST_HEAD(&target->caches); target 196 drivers/acpi/hmat/hmat.c static void hmat_update_target_access(struct memory_target *target, target 201 drivers/acpi/hmat/hmat.c target->hmem_attrs.read_latency = value; target 202 drivers/acpi/hmat/hmat.c target->hmem_attrs.write_latency = value; target 205 drivers/acpi/hmat/hmat.c target->hmem_attrs.read_latency = value; target 208 drivers/acpi/hmat/hmat.c target->hmem_attrs.write_latency = value; target 211 drivers/acpi/hmat/hmat.c target->hmem_attrs.read_bandwidth = value; target 212 drivers/acpi/hmat/hmat.c target->hmem_attrs.write_bandwidth = value; target 215 drivers/acpi/hmat/hmat.c target->hmem_attrs.read_bandwidth = value; target 218 drivers/acpi/hmat/hmat.c target->hmem_attrs.write_bandwidth = value; target 268 drivers/acpi/hmat/hmat.c struct memory_target *target; target 310 drivers/acpi/hmat/hmat.c target = find_mem_target(targs[targ]); target 311 drivers/acpi/hmat/hmat.c if (target && target->processor_pxm == inits[init]) target 312 drivers/acpi/hmat/hmat.c hmat_update_target_access(target, type, value); target 327 drivers/acpi/hmat/hmat.c struct memory_target *target; target 342 drivers/acpi/hmat/hmat.c target = find_mem_target(cache->memory_PD); target 343 drivers/acpi/hmat/hmat.c if (!target) target 381 drivers/acpi/hmat/hmat.c list_add_tail(&tcache->node, &target->caches); target 390 drivers/acpi/hmat/hmat.c struct memory_target *target = NULL; target 407 drivers/acpi/hmat/hmat.c target = find_mem_target(p->memory_PD); target 408 drivers/acpi/hmat/hmat.c if (!target) { target 413 drivers/acpi/hmat/hmat.c if (target && p->flags & ACPI_HMAT_PROCESSOR_PD_VALID) { target 420 drivers/acpi/hmat/hmat.c target->processor_pxm = p_node; target 459 drivers/acpi/hmat/hmat.c static u32 hmat_initiator_perf(struct memory_target *target, target 484 drivers/acpi/hmat/hmat.c if (targs[i] == target->memory_pxm) { target 541 drivers/acpi/hmat/hmat.c static void hmat_register_target_initiators(struct memory_target *target) target 550 drivers/acpi/hmat/hmat.c mem_nid = pxm_to_node(target->memory_pxm); target 556 drivers/acpi/hmat/hmat.c if (target->processor_pxm != PXM_INVAL) { target 557 drivers/acpi/hmat/hmat.c cpu_nid = pxm_to_node(target->processor_pxm); target 585 drivers/acpi/hmat/hmat.c value = hmat_initiator_perf(target, initiator, loc->hmat_loc); target 592 drivers/acpi/hmat/hmat.c hmat_update_target_access(target, loc->hmat_loc->data_type, best); target 601 drivers/acpi/hmat/hmat.c static void hmat_register_target_cache(struct memory_target *target) target 603 drivers/acpi/hmat/hmat.c unsigned mem_nid = pxm_to_node(target->memory_pxm); target 606 drivers/acpi/hmat/hmat.c list_for_each_entry(tcache, &target->caches, node) target 610 drivers/acpi/hmat/hmat.c static void hmat_register_target_perf(struct memory_target *target) target 612 drivers/acpi/hmat/hmat.c unsigned mem_nid = pxm_to_node(target->memory_pxm); target 613 drivers/acpi/hmat/hmat.c node_set_perf_attrs(mem_nid, &target->hmem_attrs, 0); target 616 drivers/acpi/hmat/hmat.c static void hmat_register_target(struct memory_target *target) target 618 drivers/acpi/hmat/hmat.c int nid = pxm_to_node(target->memory_pxm); target 631 drivers/acpi/hmat/hmat.c if (!target->registered) { target 632 drivers/acpi/hmat/hmat.c hmat_register_target_initiators(target); target 633 drivers/acpi/hmat/hmat.c hmat_register_target_cache(target); target 634 drivers/acpi/hmat/hmat.c hmat_register_target_perf(target); target 635 drivers/acpi/hmat/hmat.c target->registered = true; target 642 drivers/acpi/hmat/hmat.c struct memory_target *target; target 644 drivers/acpi/hmat/hmat.c list_for_each_entry(target, &targets, node) target 645 drivers/acpi/hmat/hmat.c hmat_register_target(target); target 651 drivers/acpi/hmat/hmat.c struct memory_target *target; target 659 drivers/acpi/hmat/hmat.c target = find_mem_target(pxm); target 660 drivers/acpi/hmat/hmat.c if (!target) target 663 drivers/acpi/hmat/hmat.c hmat_register_target(target); target 674 drivers/acpi/hmat/hmat.c struct memory_target *target, *tnext; target 679 drivers/acpi/hmat/hmat.c list_for_each_entry_safe(target, tnext, &targets, node) { target 680 drivers/acpi/hmat/hmat.c list_for_each_entry_safe(tcache, cnext, &target->caches, node) { target 684 drivers/acpi/hmat/hmat.c list_del(&target->node); target 685 drivers/acpi/hmat/hmat.c kfree(target); target 2874 drivers/android/binder.c e->target_handle = tr->target.handle; target 2936 drivers/android/binder.c if (tr->target.handle) { target 2947 drivers/android/binder.c ref = binder_get_ref_olocked(proc, tr->target.handle, target 3625 drivers/android/binder.c uint32_t target; target 3631 drivers/android/binder.c if (get_user(target, (uint32_t __user *)ptr)) target 3636 drivers/android/binder.c if (increment && !target) { target 3648 drivers/android/binder.c proc, target, increment, strong, target 3650 drivers/android/binder.c if (!ret && rdata.desc != target) { target 3653 drivers/android/binder.c target, rdata.desc); target 3673 drivers/android/binder.c strong, target, ret); target 3854 drivers/android/binder.c uint32_t target; target 3859 drivers/android/binder.c if (get_user(target, (uint32_t __user *)ptr)) target 3886 drivers/android/binder.c ref = binder_get_ref_olocked(proc, target, false); target 3893 drivers/android/binder.c target); target 4434 drivers/android/binder.c trd->target.ptr = target_node->ptr; target 4445 drivers/android/binder.c trd->target.ptr = 0; target 2133 drivers/ata/libata-core.c u32 target, target_limit; target 2139 drivers/ata/libata-core.c target = 1; target 2143 drivers/ata/libata-core.c target_limit = (1 << target) - 1; target 2155 drivers/ata/libata-core.c if (plink->sata_spd > target) { target 2157 drivers/ata/libata-core.c sata_spd_string(target)); target 3101 drivers/ata/libata-core.c u32 limit, target, spd; target 3113 drivers/ata/libata-core.c target = 0; target 3115 drivers/ata/libata-core.c target = fls(limit); target 3118 drivers/ata/libata-core.c *scontrol = (*scontrol & ~0xf0) | ((target & 0xf) << 4); target 3120 drivers/ata/libata-core.c return spd != target; target 117 drivers/base/core.c static int device_is_dependent(struct device *dev, void *target) target 122 drivers/base/core.c if (dev == target) target 125 drivers/base/core.c ret = device_for_each_child(dev, target, device_is_dependent); target 130 drivers/base/core.c if (link->consumer == target) target 133 drivers/base/core.c ret = device_is_dependent(link->consumer, target); target 701 drivers/base/devres.c struct action_devres *target = p; target 703 drivers/base/devres.c return devres->action == target->action && target 704 drivers/base/devres.c devres->data == target->data; target 986 drivers/base/devres.c struct pages_devres *target = p; target 988 drivers/base/devres.c return devres->addr == target->addr; target 698 drivers/base/node.c struct node_access_nodes *initiator, *target; target 707 drivers/base/node.c target = node_init_node_access(targ_node, access); target 708 drivers/base/node.c if (!initiator || !target) target 717 drivers/base/node.c ret = sysfs_add_link_to_group(&target->dev.kobj, "initiators", target 202 drivers/bus/mvebu-mbus.c u32 *size, u8 *target, u8 *attr, target 220 drivers/bus/mvebu-mbus.c if (target) target 221 drivers/bus/mvebu-mbus.c *target = (ctrlreg & WIN_CTRL_TGT_MASK) >> WIN_CTRL_TGT_SHIFT; target 273 drivers/bus/mvebu-mbus.c u8 target, u8 attr) target 330 drivers/bus/mvebu-mbus.c phys_addr_t remap, u8 target, target 350 drivers/bus/mvebu-mbus.c (target << WIN_CTRL_TGT_SHIFT) | target 375 drivers/bus/mvebu-mbus.c phys_addr_t remap, u8 target, target 388 drivers/bus/mvebu-mbus.c target, attr); target 400 drivers/bus/mvebu-mbus.c remap, target, attr); target 902 drivers/bus/mvebu-mbus.c int mvebu_mbus_add_window_remap_by_id(unsigned int target, target 909 drivers/bus/mvebu-mbus.c if (!mvebu_mbus_window_conflicts(s, base, size, target, attribute)) { target 911 drivers/bus/mvebu-mbus.c target, attribute); target 915 drivers/bus/mvebu-mbus.c return mvebu_mbus_alloc_window(s, base, size, remap, target, attribute); target 918 drivers/bus/mvebu-mbus.c int mvebu_mbus_add_window_by_id(unsigned int target, unsigned int attribute, target 921 drivers/bus/mvebu-mbus.c return mvebu_mbus_add_window_remap_by_id(target, attribute, base, target 951 drivers/bus/mvebu-mbus.c int mvebu_mbus_get_dram_win_info(phys_addr_t phyaddr, u8 *target, u8 *attr) target 969 drivers/bus/mvebu-mbus.c *target = dram->mbus_dram_target_id; target 980 drivers/bus/mvebu-mbus.c int mvebu_mbus_get_io_win_info(phys_addr_t phyaddr, u32 *size, u8 *target, target 990 drivers/bus/mvebu-mbus.c size, target, attr, NULL); target 1185 drivers/bus/mvebu-mbus.c u8 target, u8 attr) target 1187 drivers/bus/mvebu-mbus.c if (!mvebu_mbus_window_conflicts(mbus, base, size, target, attr)) { target 1189 drivers/bus/mvebu-mbus.c target, attr); target 1194 drivers/bus/mvebu-mbus.c target, attr)) { target 1196 drivers/bus/mvebu-mbus.c target, attr); target 1253 drivers/bus/mvebu-mbus.c u8 target, attr; target 1263 drivers/bus/mvebu-mbus.c target = TARGET(windowid); target 1269 drivers/bus/mvebu-mbus.c ret = mbus_dt_setup_win(mbus, base, size, target, attr); target 83 drivers/char/agp/isoch.c struct isoch_data *master, target; target 124 drivers/char/agp/isoch.c target.maxbw = (tnistat >> 16) & 0xff; target 125 drivers/char/agp/isoch.c target.n = (tnistat >> 8) & 0xff; target 126 drivers/char/agp/isoch.c target.y = (tnistat >> 6) & 0x3; target 127 drivers/char/agp/isoch.c target.l = (tnistat >> 3) & 0x7; target 128 drivers/char/agp/isoch.c target.rq = (tstatus >> 24) & 0xff; target 130 drivers/char/agp/isoch.c y_max = target.y; target 157 drivers/char/agp/isoch.c if (tot_bw > target.maxbw) { target 165 drivers/char/agp/isoch.c target.y = y_max; target 175 drivers/char/agp/isoch.c tnicmd |= target.y << 6; target 180 drivers/char/agp/isoch.c target.n = (tnistat >> 8) & 0xff; target 184 drivers/char/agp/isoch.c master[cdev].y = target.y; target 192 drivers/char/agp/isoch.c if (tot_n > target.n) { target 203 drivers/char/agp/isoch.c rem = target.n - tot_n; target 228 drivers/char/agp/isoch.c rq_isoch = (target.y > 0x1) ? target.n * (1 << (target.y - 1)) : target.n; target 229 drivers/char/agp/isoch.c rq_async = target.rq - rq_isoch; target 3867 drivers/clk/clk.c struct clk_core *target) target 3873 drivers/clk/clk.c if (root->parents[i].core == target) target 3877 drivers/clk/clk.c clk_core_evict_parent_cache_subtree(child, target); target 412 drivers/cpufreq/cppc_cpufreq.c .target = cppc_cpufreq_set_target, target 371 drivers/cpufreq/cpufreq-nforce2.c .target = nforce2_target, target 67 drivers/cpufreq/cpufreq.c return cpufreq_driver->target_index || cpufreq_driver->target; target 2141 drivers/cpufreq/cpufreq.c if (cpufreq_driver->target) target 2142 drivers/cpufreq/cpufreq.c return cpufreq_driver->target(policy, target_freq, relation); target 2648 drivers/cpufreq/cpufreq.c driver_data->target) || target 2650 drivers/cpufreq/cpufreq.c driver_data->target)) || target 439 drivers/cpufreq/gx-suspmod.c .target = cpufreq_gx_target, target 1761 drivers/cpufreq/intel_pstate.c int target, avg_pstate; target 1771 drivers/cpufreq/intel_pstate.c target = global.no_turbo || global.turbo_disabled ? target 1773 drivers/cpufreq/intel_pstate.c target += target >> 2; target 1774 drivers/cpufreq/intel_pstate.c target = mul_fp(target, busy_frac); target 1775 drivers/cpufreq/intel_pstate.c if (target < cpu->pstate.min_pstate) target 1776 drivers/cpufreq/intel_pstate.c target = cpu->pstate.min_pstate; target 1786 drivers/cpufreq/intel_pstate.c if (avg_pstate > target) target 1787 drivers/cpufreq/intel_pstate.c target += (avg_pstate - target) >> 1; target 1789 drivers/cpufreq/intel_pstate.c return target; target 2457 drivers/cpufreq/intel_pstate.c .target = intel_cpufreq_target, target 573 drivers/cpufreq/pcc-cpufreq.c .target = pcc_cpufreq_target, target 92 drivers/cpufreq/s3c24xx-cpufreq.c static inline int closer(unsigned int target, unsigned int n, unsigned int c) target 94 drivers/cpufreq/s3c24xx-cpufreq.c int diff_cur = abs(target - c); target 95 drivers/cpufreq/s3c24xx-cpufreq.c int diff_new = abs(target - n); target 428 drivers/cpufreq/s3c24xx-cpufreq.c .target = s3c_cpufreq_target, target 45 drivers/cpufreq/sh-cpufreq.c struct cpufreq_target *target = arg; target 46 drivers/cpufreq/sh-cpufreq.c struct cpufreq_policy *policy = target->policy; target 59 drivers/cpufreq/sh-cpufreq.c freq = clk_round_rate(cpuclk, target->freq * 1000); target 64 drivers/cpufreq/sh-cpufreq.c dev_dbg(dev, "requested frequency %u Hz\n", target->freq * 1000); target 70 drivers/cpufreq/sh-cpufreq.c cpufreq_freq_transition_begin(target->policy, &freqs); target 72 drivers/cpufreq/sh-cpufreq.c cpufreq_freq_transition_end(target->policy, &freqs, 0); target 163 drivers/cpufreq/sh-cpufreq.c .target = sh_cpufreq_target, target 65 drivers/cpufreq/unicore2-cpufreq.c .target = ucv2_target, target 465 drivers/cpuidle/governors/menu.c struct cpuidle_state *target = &drv->states[last_idx]; target 510 drivers/cpuidle/governors/menu.c if (measured_us > 2 * target->exit_latency) target 511 drivers/cpuidle/governors/menu.c measured_us -= target->exit_latency; target 1002 drivers/crypto/axis/artpec6_crypto.c unsigned int mod, target, diff, pad_bytes, size_bytes; target 1010 drivers/crypto/axis/artpec6_crypto.c target = 448 / 8; target 1015 drivers/crypto/axis/artpec6_crypto.c target = 896 / 8; target 1021 drivers/crypto/axis/artpec6_crypto.c target -= 1; target 1023 drivers/crypto/axis/artpec6_crypto.c pad_bytes = diff > target ? target + mod - diff : target - diff; target 1406 drivers/crypto/chelsio/chtls/chtls_io.c int target; target 1412 drivers/crypto/chelsio/chtls/chtls_io.c target = sock_rcvlowat(sk, flags & MSG_WAITALL, len); target 1440 drivers/crypto/chelsio/chtls/chtls_io.c if (copied >= target && !sk->sk_backlog.tail) target 1480 drivers/crypto/chelsio/chtls/chtls_io.c if (copied >= target) target 1496 drivers/crypto/chelsio/chtls/chtls_io.c if (copied < target) { target 1548 drivers/crypto/chelsio/chtls/chtls_io.c if (copied >= target && target 1693 drivers/crypto/chelsio/chtls/chtls_io.c int target; /* Read at least this many bytes */ target 1717 drivers/crypto/chelsio/chtls/chtls_io.c target = sock_rcvlowat(sk, flags & MSG_WAITALL, len); target 1746 drivers/crypto/chelsio/chtls/chtls_io.c if (copied >= target && !sk->sk_backlog.tail) target 1784 drivers/crypto/chelsio/chtls/chtls_io.c if (copied >= target) target 1798 drivers/crypto/chelsio/chtls/chtls_io.c if (copied < target) target 1848 drivers/crypto/chelsio/chtls/chtls_io.c if (copied >= target && target 598 drivers/crypto/ixp4xx_crypto.c static int register_chain_var(struct crypto_tfm *tfm, u8 xpad, u32 target, target 638 drivers/crypto/ixp4xx_crypto.c crypt->icv_rev_aes = target; target 405 drivers/crypto/nx/nx-842-powernv.c ret = setup_ddl(&crb->target, wmem->ddl_out, target 305 drivers/devfreq/devfreq.c err = devfreq->profile->target(devfreq->dev.parent, &new_freq, flags); target 339 drivers/devfreq/exynos-bus.c profile->target = exynos_bus_target; target 387 drivers/devfreq/exynos-bus.c profile->target = exynos_bus_target; target 108 drivers/devfreq/governor_passive.c ret = devfreq->profile->target(devfreq->dev.parent, &freq, 0); target 192 drivers/devfreq/rk3399_dmc.c .target = rk3399_dmcfreq_target, target 96 drivers/devfreq/tegra20-devfreq.c .target = tegra_devfreq_target, target 515 drivers/devfreq/tegra30-devfreq.c .target = tegra_devfreq_target, target 489 drivers/dma/mv_xor.c u8 target, attr; target 515 drivers/dma/mv_xor.c ret = mvebu_mbus_get_io_win_info(addr, &size, &target, &attr); target 538 drivers/dma/mv_xor.c writel((addr & 0xffff0000) | (attr << 8) | target, target 267 drivers/dma/ste_dma40_ll.c dma_addr_t target, target 281 drivers/dma/ste_dma40_ll.c if (!target) target 287 drivers/dma/ste_dma40_ll.c dma_addr_t dst = target ?: sg_addr; target 443 drivers/dma/ste_dma40_ll.h dma_addr_t target, target 1534 drivers/edac/sb_edac.c int target; target 1537 drivers/edac/sb_edac.c for (target = 0; target < KNL_MAX_CHANNELS; target++) { target 1539 drivers/edac/sb_edac.c if (knl_get_mc_route(target, target 121 drivers/firmware/efi/libstub/random.c efi_physical_addr_t target; target 129 drivers/firmware/efi/libstub/random.c target = round_up(md->phys_addr, align) + target_slot * align; target 133 drivers/firmware/efi/libstub/random.c EFI_LOADER_DATA, pages, &target); target 135 drivers/firmware/efi/libstub/random.c *addr = target; target 493 drivers/firmware/qemu_fw_cfg.c struct kobject *target, const char *name) target 500 drivers/firmware/qemu_fw_cfg.c if (!dir || !target || !name || !*name) target 513 drivers/firmware/qemu_fw_cfg.c ret = sysfs_create_link(&dir->kobj, target, tok); target 368 drivers/fpga/of-fpga-region.c region = of_fpga_region_find(nd->target); target 151 drivers/gpu/drm/amd/amdgpu/amdgpu_display.c uint32_t page_flip_flags, uint32_t target, target 221 drivers/gpu/drm/amd/amdgpu/amdgpu_display.c work->target_vblank = target - (uint32_t)drm_crtc_vblank_count(crtc) + target 632 drivers/gpu/drm/amd/amdgpu/amdgpu_mode.h uint32_t page_flip_flags, uint32_t target, target 712 drivers/gpu/drm/amd/amdgpu/atom.c int execute = 0, target = U16(*ptr); target 741 drivers/gpu/drm/amd/amdgpu/atom.c SDEBUG(" target: 0x%04X\n", target); target 743 drivers/gpu/drm/amd/amdgpu/atom.c if (ctx->last_jump == (ctx->start + target)) { target 756 drivers/gpu/drm/amd/amdgpu/atom.c ctx->last_jump = ctx->start + target; target 759 drivers/gpu/drm/amd/amdgpu/atom.c *ptr = ctx->start + target; target 1004 drivers/gpu/drm/amd/amdgpu/atom.c uint32_t src, val, target; target 1014 drivers/gpu/drm/amd/amdgpu/atom.c target = U16(*ptr); target 1016 drivers/gpu/drm/amd/amdgpu/atom.c SDEBUG(" target: %04X\n", target); target 1017 drivers/gpu/drm/amd/amdgpu/atom.c *ptr = ctx->start + target; target 5119 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c uint32_t tmp, target; target 5123 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c target = SOC15_REG_OFFSET(GC, 0, mmCP_ME1_PIPE0_INT_CNTL); target 5125 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c target = SOC15_REG_OFFSET(GC, 0, mmCP_ME2_PIPE0_INT_CNTL); target 5126 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c target += ring->pipe; target 5136 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c tmp = RREG32(target); target 5139 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c WREG32(target, tmp); target 5146 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c tmp = RREG32(target); target 5149 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c WREG32(target, tmp); target 197 drivers/gpu/drm/arm/display/komeda/komeda_dev.h komeda_product_match(struct komeda_dev *mdev, u32 target) target 199 drivers/gpu/drm/arm/display/komeda/komeda_dev.h return MALIDP_CORE_ID_PRODUCT_ID(mdev->chip.core_id) == target; target 3397 drivers/gpu/drm/drm_atomic_helper.c uint32_t target, target 3420 drivers/gpu/drm/drm_atomic_helper.c crtc_state->target_vblank = target; target 279 drivers/gpu/drm/etnaviv/etnaviv_buffer.c u32 dwords, target; target 288 drivers/gpu/drm/etnaviv/etnaviv_buffer.c target = etnaviv_buffer_reserve(gpu, buffer, dwords); target 310 drivers/gpu/drm/etnaviv/etnaviv_buffer.c target); target 342 drivers/gpu/drm/etnaviv/etnaviv_buffer.c u32 target, extra_dwords; target 363 drivers/gpu/drm/etnaviv/etnaviv_buffer.c target = etnaviv_buffer_reserve(gpu, buffer, extra_dwords); target 425 drivers/gpu/drm/etnaviv/etnaviv_buffer.c link_target = target; target 23 drivers/gpu/drm/gma500/cdv_intel_display.c struct drm_crtc *crtc, int target, target 401 drivers/gpu/drm/gma500/cdv_intel_display.c struct drm_crtc *crtc, int target, target 410 drivers/gpu/drm/gma500/cdv_intel_display.c if (target < 200000) { target 426 drivers/gpu/drm/gma500/cdv_intel_display.c if (target < 200000) { target 700 drivers/gpu/drm/gma500/gma_display.c struct drm_crtc *crtc, int target, int refclk, target 707 drivers/gpu/drm/gma500/gma_display.c int err = target; target 723 drivers/gpu/drm/gma500/gma_display.c if (target < limit->p2.dot_limit) target 749 drivers/gpu/drm/gma500/gma_display.c this_err = abs(clock.dot - target); target 759 drivers/gpu/drm/gma500/gma_display.c return err != target; target 43 drivers/gpu/drm/gma500/gma_display.h int target, int refclk, target 91 drivers/gpu/drm/gma500/gma_display.h struct drm_crtc *crtc, int target, int refclk, target 630 drivers/gpu/drm/gma500/mdfld_intel_display.c mdfldFindBestPLL(struct drm_crtc *crtc, int target, int refclk, target 635 drivers/gpu/drm/gma500/mdfld_intel_display.c int err = target; target 646 drivers/gpu/drm/gma500/mdfld_intel_display.c this_err = abs(clock.dot - target); target 653 drivers/gpu/drm/gma500/mdfld_intel_display.c return err != target; target 37 drivers/gpu/drm/gma500/oaktrail_crtc.c struct drm_crtc *crtc, int target, target 41 drivers/gpu/drm/gma500/oaktrail_crtc.c struct drm_crtc *crtc, int target, target 124 drivers/gpu/drm/gma500/oaktrail_crtc.c struct drm_crtc *crtc, int target, target 141 drivers/gpu/drm/gma500/oaktrail_crtc.c target_vco = target * clock.p; target 153 drivers/gpu/drm/gma500/oaktrail_crtc.c ((target * 10000) / actual_freq); target 182 drivers/gpu/drm/gma500/oaktrail_crtc.c struct drm_crtc *crtc, int target, target 186 drivers/gpu/drm/gma500/oaktrail_crtc.c int err = target; target 198 drivers/gpu/drm/gma500/oaktrail_crtc.c this_err = abs(clock.dot - target); target 205 drivers/gpu/drm/gma500/oaktrail_crtc.c return err != target; target 176 drivers/gpu/drm/gma500/oaktrail_hdmi.c static void oaktrail_hdmi_find_dpll(struct drm_crtc *crtc, int target, target 182 drivers/gpu/drm/gma500/oaktrail_hdmi.c np_min = DIV_ROUND_UP(oaktrail_hdmi_limit.vco.min, target * 10); target 183 drivers/gpu/drm/gma500/oaktrail_hdmi.c np_max = oaktrail_hdmi_limit.vco.max / (target * 10); target 189 drivers/gpu/drm/gma500/oaktrail_hdmi.c nr_min = DIV_ROUND_UP((refclk * 1000), (target * 10 * np_max)); target 190 drivers/gpu/drm/gma500/oaktrail_hdmi.c nr_max = DIV_ROUND_UP((refclk * 1000), (target * 10 * np_min)); target 196 drivers/gpu/drm/gma500/oaktrail_hdmi.c np = DIV_ROUND_UP((refclk * 1000), (target * 10 * nr_max)); target 197 drivers/gpu/drm/gma500/oaktrail_hdmi.c nr = DIV_ROUND_UP((refclk * 1000), (target * 10 * np)); target 198 drivers/gpu/drm/gma500/oaktrail_hdmi.c nf = DIV_ROUND_CLOSEST((target * 10 * np * nr), refclk); target 633 drivers/gpu/drm/i915/display/intel_display.c int target) target 648 drivers/gpu/drm/i915/display/intel_display.c if (target < limit->p2.dot_limit) target 668 drivers/gpu/drm/i915/display/intel_display.c int target, int refclk, struct dpll *match_clock, target 673 drivers/gpu/drm/i915/display/intel_display.c int err = target; target 677 drivers/gpu/drm/i915/display/intel_display.c clock.p2 = i9xx_select_p2_div(limit, crtc_state, target); target 700 drivers/gpu/drm/i915/display/intel_display.c this_err = abs(clock.dot - target); target 710 drivers/gpu/drm/i915/display/intel_display.c return (err != target); target 726 drivers/gpu/drm/i915/display/intel_display.c int target, int refclk, struct dpll *match_clock, target 731 drivers/gpu/drm/i915/display/intel_display.c int err = target; target 735 drivers/gpu/drm/i915/display/intel_display.c clock.p2 = i9xx_select_p2_div(limit, crtc_state, target); target 756 drivers/gpu/drm/i915/display/intel_display.c this_err = abs(clock.dot - target); target 766 drivers/gpu/drm/i915/display/intel_display.c return (err != target); target 782 drivers/gpu/drm/i915/display/intel_display.c int target, int refclk, struct dpll *match_clock, target 790 drivers/gpu/drm/i915/display/intel_display.c int err_most = (target >> 8) + (target >> 9); target 794 drivers/gpu/drm/i915/display/intel_display.c clock.p2 = i9xx_select_p2_div(limit, crtc_state, target); target 814 drivers/gpu/drm/i915/display/intel_display.c this_err = abs(clock.dot - target); target 876 drivers/gpu/drm/i915/display/intel_display.c int target, int refclk, struct dpll *match_clock, target 887 drivers/gpu/drm/i915/display/intel_display.c target *= 5; /* fast clock */ target 901 drivers/gpu/drm/i915/display/intel_display.c clock.m2 = DIV_ROUND_CLOSEST(target * clock.p * clock.n, target 911 drivers/gpu/drm/i915/display/intel_display.c if (!vlv_PLL_is_optimal(dev, target, target 936 drivers/gpu/drm/i915/display/intel_display.c int target, int refclk, struct dpll *match_clock, target 955 drivers/gpu/drm/i915/display/intel_display.c target *= 5; /* fast clock */ target 965 drivers/gpu/drm/i915/display/intel_display.c m2 = DIV_ROUND_CLOSEST_ULL(mul_u32_u32(target, clock.p * clock.n) << 22, target 978 drivers/gpu/drm/i915/display/intel_display.c if (!vlv_PLL_is_optimal(dev, target, &clock, best_clock, target 287 drivers/gpu/drm/i915/display/intel_panel.c static inline u32 panel_fitter_scaling(u32 source, u32 target) target 296 drivers/gpu/drm/i915/display/intel_panel.c u32 ratio = source * FACTOR / target; target 892 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c const struct i915_vma *target) target 894 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c return gen8_canonical_addr((int)reloc->delta + target->node.start); target 1262 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c const struct i915_vma *target) target 1265 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c u64 target_offset = relocation_target(reloc, target); target 1345 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c return target->node.start | UPDATE; target 1353 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c struct i915_vma *target; target 1357 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c target = eb_get_vma(eb, reloc->target_handle); target 1358 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c if (unlikely(!target)) target 1385 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c *target->exec_flags |= EXEC_OBJECT_WRITE; target 1395 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c err = i915_vma_bind(target, target->obj->cache_level, target 1408 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c gen8_canonical_addr(target->node.start) == reloc->presumed_offset) target 1440 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c return relocate_entry(vma, reloc, eb, target); target 147 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c unsigned long target, target 179 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c trace_i915_gem_shrink(i915, target, shrink); target 229 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c while (count < target && target 16 drivers/gpu/drm/i915/gem/i915_gem_shrinker.h unsigned long target, target 40 drivers/gpu/drm/i915/gem/i915_gem_throttle.c struct i915_request *request, *target = NULL; target 53 drivers/gpu/drm/i915/gem/i915_gem_throttle.c if (target) { target 54 drivers/gpu/drm/i915/gem/i915_gem_throttle.c list_del(&target->client_link); target 55 drivers/gpu/drm/i915/gem/i915_gem_throttle.c target->file_priv = NULL; target 58 drivers/gpu/drm/i915/gem/i915_gem_throttle.c target = request; target 60 drivers/gpu/drm/i915/gem/i915_gem_throttle.c if (target) target 61 drivers/gpu/drm/i915/gem/i915_gem_throttle.c i915_request_get(target); target 64 drivers/gpu/drm/i915/gem/i915_gem_throttle.c if (!target) target 67 drivers/gpu/drm/i915/gem/i915_gem_throttle.c ret = i915_request_wait(target, target 70 drivers/gpu/drm/i915/gem/i915_gem_throttle.c i915_request_put(target); target 1873 drivers/gpu/drm/i915/gt/intel_ringbuffer.c struct i915_request *target; target 1880 drivers/gpu/drm/i915/gt/intel_ringbuffer.c list_for_each_entry(target, &tl->requests, link) { target 1881 drivers/gpu/drm/i915/gt/intel_ringbuffer.c if (target->ring != ring) target 1885 drivers/gpu/drm/i915/gt/intel_ringbuffer.c if (bytes <= __intel_ring_space(target->postfix, target 1890 drivers/gpu/drm/i915/gt/intel_ringbuffer.c if (GEM_WARN_ON(&target->link == &tl->requests)) target 1893 drivers/gpu/drm/i915/gt/intel_ringbuffer.c timeout = i915_request_wait(target, target 1899 drivers/gpu/drm/i915/gt/intel_ringbuffer.c i915_request_retire_upto(target); target 261 drivers/gpu/drm/i915/i915_gem_evict.c struct drm_mm_node *target, target 266 drivers/gpu/drm/i915/i915_gem_evict.c u64 start = target->start; target 267 drivers/gpu/drm/i915/i915_gem_evict.c u64 end = start + target->size; target 276 drivers/gpu/drm/i915/i915_gem_evict.c trace_i915_gem_evict_node(vm, target, flags); target 314 drivers/gpu/drm/i915/i915_gem_evict.c if (node->start + node->size == target->start) { target 315 drivers/gpu/drm/i915/i915_gem_evict.c if (node->color == target->color) target 318 drivers/gpu/drm/i915/i915_gem_evict.c if (node->start == target->start + target->size) { target 319 drivers/gpu/drm/i915/i915_gem_evict.c if (node->color == target->color) target 1002 drivers/gpu/drm/i915/i915_pmu.c unsigned int target; target 1007 drivers/gpu/drm/i915/i915_pmu.c target = cpumask_any_but(topology_sibling_cpumask(cpu), cpu); target 1009 drivers/gpu/drm/i915/i915_pmu.c if (target < nr_cpu_ids) { target 1010 drivers/gpu/drm/i915/i915_pmu.c cpumask_set_cpu(target, &i915_pmu_cpumask); target 1011 drivers/gpu/drm/i915/i915_pmu.c perf_pmu_migrate_context(&pmu->base, cpu, target); target 435 drivers/gpu/drm/i915/i915_trace.h TP_PROTO(struct drm_i915_private *i915, unsigned long target, unsigned flags), target 436 drivers/gpu/drm/i915/i915_trace.h TP_ARGS(i915, target, flags), target 440 drivers/gpu/drm/i915/i915_trace.h __field(unsigned long, target) target 446 drivers/gpu/drm/i915/i915_trace.h __entry->target = target; target 451 drivers/gpu/drm/i915/i915_trace.h __entry->dev, __entry->target, __entry->flags) target 219 drivers/gpu/drm/i915/selftests/i915_gem_evict.c struct drm_mm_node target = { target 233 drivers/gpu/drm/i915/selftests/i915_gem_evict.c err = i915_gem_evict_for_node(&ggtt->vm, &target, 0); target 243 drivers/gpu/drm/i915/selftests/i915_gem_evict.c err = i915_gem_evict_for_node(&ggtt->vm, &target, 0); target 267 drivers/gpu/drm/i915/selftests/i915_gem_evict.c struct drm_mm_node target = { target 320 drivers/gpu/drm/i915/selftests/i915_gem_evict.c err = i915_gem_evict_for_node(&ggtt->vm, &target, 0); target 329 drivers/gpu/drm/i915/selftests/i915_gem_evict.c target.color = I915_CACHE_L3_LLC; target 331 drivers/gpu/drm/i915/selftests/i915_gem_evict.c err = i915_gem_evict_for_node(&ggtt->vm, &target, 0); target 571 drivers/gpu/drm/i915/selftests/i915_sw_fence.c unsigned long target, delay; target 575 drivers/gpu/drm/i915/selftests/i915_sw_fence.c timed_fence_init(&tf, target = jiffies); target 585 drivers/gpu/drm/i915/selftests/i915_sw_fence.c timed_fence_init(&tf, target = jiffies + delay); target 599 drivers/gpu/drm/i915/selftests/i915_sw_fence.c if (time_before(jiffies, target)) { target 601 drivers/gpu/drm/i915/selftests/i915_sw_fence.c target, jiffies); target 967 drivers/gpu/drm/meson/meson_vclk.c void meson_vclk_setup(struct meson_drm *priv, unsigned int target, target 976 drivers/gpu/drm/meson/meson_vclk.c if (target == MESON_VCLK_TARGET_CVBS) { target 979 drivers/gpu/drm/meson/meson_vclk.c } else if (target == MESON_VCLK_TARGET_DMT) { target 30 drivers/gpu/drm/meson/meson_vclk.h void meson_vclk_setup(struct meson_drm *priv, unsigned int target, target 56 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c static inline int CRASHDUMP_READ(u64 *in, u32 reg, u32 dwords, u64 target) target 58 drivers/gpu/drm/msm/adreno/a6xx_gpu_state.c in[0] = target; target 80 drivers/gpu/drm/msm/msm_gpu.c .target = msm_devfreq_target, target 175 drivers/gpu/drm/nouveau/dispnv50/disp.c .target = NV_DMA_V0_TARGET_VRAM, target 186 drivers/gpu/drm/nouveau/dispnv50/disp.c .target = NV_DMA_V0_TARGET_VRAM, target 68 drivers/gpu/drm/nouveau/dispnv50/wndw.c args.base.target = NV_DMA_V0_TARGET_VRAM; target 12 drivers/gpu/drm/nouveau/include/nvif/cl0002.h __u8 target; target 30 drivers/gpu/drm/nouveau/include/nvkm/core/memory.h enum nvkm_memory_target (*target)(struct nvkm_memory *); target 58 drivers/gpu/drm/nouveau/include/nvkm/core/memory.h #define nvkm_memory_target(p) (p)->func->target(p) target 13 drivers/gpu/drm/nouveau/include/nvkm/engine/dma.h u32 target; target 139 drivers/gpu/drm/nouveau/include/nvkm/subdev/fb.h struct nvkm_ram_data target; target 553 drivers/gpu/drm/nouveau/nouveau_abi16.c args.target = NV_DMA_V0_TARGET_VM; target 559 drivers/gpu/drm/nouveau/nouveau_abi16.c args.target = NV_DMA_V0_TARGET_AGP; target 564 drivers/gpu/drm/nouveau/nouveau_abi16.c args.target = NV_DMA_V0_TARGET_VM; target 129 drivers/gpu/drm/nouveau/nouveau_chan.c u32 target; target 142 drivers/gpu/drm/nouveau/nouveau_chan.c target = TTM_PL_FLAG_TT | TTM_PL_FLAG_UNCACHED; target 144 drivers/gpu/drm/nouveau/nouveau_chan.c target = TTM_PL_FLAG_VRAM; target 146 drivers/gpu/drm/nouveau/nouveau_chan.c ret = nouveau_bo_new(cli, size, 0, target, 0, 0, NULL, NULL, target 149 drivers/gpu/drm/nouveau/nouveau_chan.c ret = nouveau_bo_pin(chan->push.buffer, target, false); target 178 drivers/gpu/drm/nouveau/nouveau_chan.c args.target = NV_DMA_V0_TARGET_VM; target 189 drivers/gpu/drm/nouveau/nouveau_chan.c args.target = NV_DMA_V0_TARGET_PCI; target 195 drivers/gpu/drm/nouveau/nouveau_chan.c args.target = NV_DMA_V0_TARGET_VRAM; target 202 drivers/gpu/drm/nouveau/nouveau_chan.c args.target = NV_DMA_V0_TARGET_AGP; target 208 drivers/gpu/drm/nouveau/nouveau_chan.c args.target = NV_DMA_V0_TARGET_VM; target 380 drivers/gpu/drm/nouveau/nouveau_chan.c args.target = NV_DMA_V0_TARGET_VM; target 385 drivers/gpu/drm/nouveau/nouveau_chan.c args.target = NV_DMA_V0_TARGET_VRAM; target 397 drivers/gpu/drm/nouveau/nouveau_chan.c args.target = NV_DMA_V0_TARGET_VM; target 403 drivers/gpu/drm/nouveau/nouveau_chan.c args.target = NV_DMA_V0_TARGET_AGP; target 409 drivers/gpu/drm/nouveau/nouveau_chan.c args.target = NV_DMA_V0_TARGET_VM; target 400 drivers/gpu/drm/nouveau/nouveau_drm.c .target = NV_DMA_V0_TARGET_VRAM, target 114 drivers/gpu/drm/nouveau/nouveau_svm.c unsigned target, cmd, priority; target 144 drivers/gpu/drm/nouveau/nouveau_svm.c target = args->header >> NOUVEAU_SVM_BIND_TARGET_SHIFT; target 145 drivers/gpu/drm/nouveau/nouveau_svm.c target &= NOUVEAU_SVM_BIND_TARGET_MASK; target 146 drivers/gpu/drm/nouveau/nouveau_svm.c switch (target) { target 95 drivers/gpu/drm/nouveau/nv17_fence.c .target = NV_DMA_V0_TARGET_VRAM, target 56 drivers/gpu/drm/nouveau/nv50_fence.c .target = NV_DMA_V0_TARGET_VRAM, target 137 drivers/gpu/drm/nouveau/nvkm/core/memory.c nvkm_memory_new(struct nvkm_device *device, enum nvkm_memory_target target, target 145 drivers/gpu/drm/nouveau/nvkm/core/memory.c if (unlikely(target != NVKM_MEM_TARGET_INST || !imem)) target 92 drivers/gpu/drm/nouveau/nvkm/engine/dma/user.c args->v0.version, args->v0.target, args->v0.access, target 94 drivers/gpu/drm/nouveau/nvkm/engine/dma/user.c dmaobj->target = args->v0.target; target 107 drivers/gpu/drm/nouveau/nvkm/engine/dma/user.c switch (dmaobj->target) { target 109 drivers/gpu/drm/nouveau/nvkm/engine/dma/user.c dmaobj->target = NV_MEM_TARGET_VM; target 118 drivers/gpu/drm/nouveau/nvkm/engine/dma/user.c dmaobj->target = NV_MEM_TARGET_VRAM; target 123 drivers/gpu/drm/nouveau/nvkm/engine/dma/user.c dmaobj->target = NV_MEM_TARGET_PCI; target 129 drivers/gpu/drm/nouveau/nvkm/engine/dma/user.c dmaobj->target = NV_MEM_TARGET_PCI_NOSNOOP; target 103 drivers/gpu/drm/nouveau/nvkm/engine/dma/usergf100.c if (dmaobj->base.target != NV_MEM_TARGET_VM) { target 120 drivers/gpu/drm/nouveau/nvkm/engine/dma/usergf100.c switch (dmaobj->base.target) { target 100 drivers/gpu/drm/nouveau/nvkm/engine/dma/usergf119.c if (dmaobj->base.target != NV_MEM_TARGET_VM) { target 114 drivers/gpu/drm/nouveau/nvkm/engine/dma/usergf119.c switch (dmaobj->base.target) { target 110 drivers/gpu/drm/nouveau/nvkm/engine/dma/usergv100.c switch (dmaobj->base.target) { target 97 drivers/gpu/drm/nouveau/nvkm/engine/dma/usernv04.c if (dmaobj->base.target == NV_MEM_TARGET_VM) { target 100 drivers/gpu/drm/nouveau/nvkm/engine/dma/usernv04.c dmaobj->base.target = NV_MEM_TARGET_PCI; target 105 drivers/gpu/drm/nouveau/nvkm/engine/dma/usernv04.c switch (dmaobj->base.target) { target 105 drivers/gpu/drm/nouveau/nvkm/engine/dma/usernv50.c if (dmaobj->base.target != NV_MEM_TARGET_VM) { target 125 drivers/gpu/drm/nouveau/nvkm/engine/dma/usernv50.c switch (dmaobj->base.target) { target 58 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gf100.c int target; target 73 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gf100.c case NVKM_MEM_TARGET_VRAM: target = 0; break; target 74 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gf100.c case NVKM_MEM_TARGET_NCOH: target = 3; break; target 82 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gf100.c (target << 28)); target 157 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gk104.c int target; target 160 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gk104.c case NVKM_MEM_TARGET_VRAM: target = 0; break; target 161 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gk104.c case NVKM_MEM_TARGET_NCOH: target = 3; break; target 168 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gk104.c (target << 28)); target 185 drivers/gpu/drm/nouveau/nvkm/subdev/clk/mcp77.c calc_P(u32 src, u32 target, int *div) target 189 drivers/gpu/drm/nouveau/nvkm/subdev/clk/mcp77.c if (clk0 <= target) { target 196 drivers/gpu/drm/nouveau/nvkm/subdev/clk/mcp77.c if (target - clk0 <= clk1 - target) target 344 drivers/gpu/drm/nouveau/nvkm/subdev/clk/nv50.c calc_div(u32 src, u32 target, int *div) target 348 drivers/gpu/drm/nouveau/nvkm/subdev/clk/nv50.c if (clk0 <= target) { target 355 drivers/gpu/drm/nouveau/nvkm/subdev/clk/nv50.c if (target - clk0 <= clk1 - target) target 96 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ram.c .target = nvkm_vram_target, target 1127 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c ret = gk104_ram_calc_data(ram, freq, &ram->base.target); target 1131 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c if (ram->base.target.freq < ram->base.former.freq) { target 1132 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c *xits = ram->base.target; target 1136 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c copy = &ram->base.target; target 1143 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c ram->base.next = &ram->base.target; target 1148 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgk104.c ram->base.next = &ram->base.target; target 350 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgt215.c struct nvbios_ramcfg *cfg = &ram->base.target.bios; target 511 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramgt215.c next = &ram->base.target; target 75 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramnv50.c struct nvbios_ramcfg *cfg = &ram->base.target.bios; target 154 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramnv50.c struct nvbios_ramcfg *cfg = &ram->base.target.bios; target 235 drivers/gpu/drm/nouveau/nvkm/subdev/fb/ramnv50.c next = &ram->base.target; target 355 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/gk20a.c .target = gk20a_instobj_target, target 367 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/gk20a.c .target = gk20a_instobj_target, target 112 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv04.c .target = nv04_instobj_target, target 112 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv40.c .target = nv40_instobj_target, target 340 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c .target = nv50_instobj_target, target 32 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c enum nvkm_memory_target target; target 45 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c return nvkm_mem(memory)->target; target 102 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c .target = nvkm_mem_target, target 125 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c .target = nvkm_mem_target, target 153 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c enum nvkm_memory_target target; target 159 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c target = NVKM_MEM_TARGET_HOST; target 161 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c target = NVKM_MEM_TARGET_NCOH; target 168 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/mem.c mem->target = target; target 241 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c const enum nvkm_memory_target target = nvkm_memory_target(map->memory); target 264 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c vol = target == NVKM_MEM_TARGET_HOST; target 273 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c aper = vmm->func->aper(target); target 323 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c gf100_vmm_aper(enum nvkm_memory_target target) target 325 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgf100.c switch (target) { target 27 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgk20a.c gk20a_vmm_aper(enum nvkm_memory_target target) target 29 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgk20a.c switch (target) { target 315 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c const enum nvkm_memory_target target = nvkm_memory_target(map->memory); target 337 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c vol = target == NVKM_MEM_TARGET_HOST; target 346 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/vmmgp100.c aper = vmm->func->aper(target); target 32 drivers/gpu/drm/nouveau/nvkm/subdev/therm/fan.c nvkm_fan_update(struct nvkm_fan *fan, bool immediate, int target) target 43 drivers/gpu/drm/nouveau/nvkm/subdev/therm/fan.c if (target < 0) target 44 drivers/gpu/drm/nouveau/nvkm/subdev/therm/fan.c target = fan->percent; target 45 drivers/gpu/drm/nouveau/nvkm/subdev/therm/fan.c target = max_t(u8, target, fan->bios.min_duty); target 46 drivers/gpu/drm/nouveau/nvkm/subdev/therm/fan.c target = min_t(u8, target, fan->bios.max_duty); target 47 drivers/gpu/drm/nouveau/nvkm/subdev/therm/fan.c if (fan->percent != target) { target 48 drivers/gpu/drm/nouveau/nvkm/subdev/therm/fan.c nvkm_debug(subdev, "FAN target: %d\n", target); target 49 drivers/gpu/drm/nouveau/nvkm/subdev/therm/fan.c fan->percent = target; target 54 drivers/gpu/drm/nouveau/nvkm/subdev/therm/fan.c if (duty == target) { target 65 drivers/gpu/drm/nouveau/nvkm/subdev/therm/fan.c if (duty < target) target 66 drivers/gpu/drm/nouveau/nvkm/subdev/therm/fan.c duty = min(duty + 3, target); target 67 drivers/gpu/drm/nouveau/nvkm/subdev/therm/fan.c else if (duty > target) target 68 drivers/gpu/drm/nouveau/nvkm/subdev/therm/fan.c duty = max(duty - 3, target); target 70 drivers/gpu/drm/nouveau/nvkm/subdev/therm/fan.c duty = target; target 86 drivers/gpu/drm/nouveau/nvkm/subdev/therm/fan.c if (target != duty) { target 91 drivers/gpu/drm/nouveau/nvkm/subdev/therm/fan.c if (duty > target) target 93 drivers/gpu/drm/nouveau/nvkm/subdev/therm/fan.c else if (duty == target) target 131 drivers/gpu/drm/panfrost/panfrost_devfreq.c .target = panfrost_devfreq_target, target 697 drivers/gpu/drm/radeon/atom.c int execute = 0, target = U16(*ptr); target 726 drivers/gpu/drm/radeon/atom.c SDEBUG(" target: 0x%04X\n", target); target 728 drivers/gpu/drm/radeon/atom.c if (ctx->last_jump == (ctx->start + target)) { target 741 drivers/gpu/drm/radeon/atom.c ctx->last_jump = ctx->start + target; target 744 drivers/gpu/drm/radeon/atom.c *ptr = ctx->start + target; target 975 drivers/gpu/drm/radeon/atom.c uint32_t src, val, target; target 985 drivers/gpu/drm/radeon/atom.c target = U16(*ptr); target 987 drivers/gpu/drm/radeon/atom.c SDEBUG(" target: %04X\n", target); target 988 drivers/gpu/drm/radeon/atom.c *ptr = ctx->start + target; target 739 drivers/gpu/drm/radeon/radeon_acpi.c struct radeon_encoder *target = NULL; target 751 drivers/gpu/drm/radeon/radeon_acpi.c target = enc; target 757 drivers/gpu/drm/radeon/radeon_acpi.c target = enc; target 764 drivers/gpu/drm/radeon/radeon_acpi.c atif->encoder_for_bl = target; target 484 drivers/gpu/drm/radeon/radeon_display.c uint32_t target, target 578 drivers/gpu/drm/radeon/radeon_display.c work->target_vblank = target - (uint32_t)drm_crtc_vblank_count(crtc) + target 211 drivers/gpu/drm/radeon/rv6xx_dpm.c struct rv6xx_sclk_stepping *target) target 213 drivers/gpu/drm/radeon/rv6xx_dpm.c return (cur->post_divider > target->post_divider) && target 214 drivers/gpu/drm/radeon/rv6xx_dpm.c ((cur->vco_frequency * target->post_divider) <= target 215 drivers/gpu/drm/radeon/rv6xx_dpm.c (target->vco_frequency * (cur->post_divider - 1))); target 220 drivers/gpu/drm/radeon/rv6xx_dpm.c struct rv6xx_sclk_stepping *target) target 224 drivers/gpu/drm/radeon/rv6xx_dpm.c while (rv6xx_can_step_post_div(rdev, &next, target)) target 232 drivers/gpu/drm/radeon/rv6xx_dpm.c struct rv6xx_sclk_stepping *target, target 235 drivers/gpu/drm/radeon/rv6xx_dpm.c return (increasing_vco && (cur->vco_frequency >= target->vco_frequency)) || target 236 drivers/gpu/drm/radeon/rv6xx_dpm.c (!increasing_vco && (cur->vco_frequency <= target->vco_frequency)); target 244 drivers/gpu/drm/radeon/rv6xx_dpm.c struct rv6xx_sclk_stepping target; target 249 drivers/gpu/drm/radeon/rv6xx_dpm.c rv6xx_convert_clock_to_stepping(rdev, high, &target); target 253 drivers/gpu/drm/radeon/rv6xx_dpm.c increasing_vco = (target.vco_frequency >= cur.vco_frequency); target 255 drivers/gpu/drm/radeon/rv6xx_dpm.c if (target.post_divider > cur.post_divider) target 256 drivers/gpu/drm/radeon/rv6xx_dpm.c cur.post_divider = target.post_divider; target 261 drivers/gpu/drm/radeon/rv6xx_dpm.c if (rv6xx_can_step_post_div(rdev, &cur, &target)) target 262 drivers/gpu/drm/radeon/rv6xx_dpm.c next = rv6xx_next_post_div_step(rdev, &cur, &target); target 266 drivers/gpu/drm/radeon/rv6xx_dpm.c if (rv6xx_reached_stepping_target(rdev, &next, &target, increasing_vco)) { target 268 drivers/gpu/drm/radeon/rv6xx_dpm.c rv6xx_next_vco_step(rdev, &target, !increasing_vco, R600_ENDINGVCOSTEPPCT_DFLT); target 274 drivers/gpu/drm/radeon/rv6xx_dpm.c if ((next.post_divider != target.post_divider) && target 275 drivers/gpu/drm/radeon/rv6xx_dpm.c (next.vco_frequency != target.vco_frequency)) { target 278 drivers/gpu/drm/radeon/rv6xx_dpm.c final_vco.vco_frequency = target.vco_frequency; target 284 drivers/gpu/drm/radeon/rv6xx_dpm.c rv6xx_output_stepping(rdev, step_index++, &target); target 85 drivers/gpu/drm/rcar-du/rcar_du_crtc.c unsigned long target) target 144 drivers/gpu/drm/rcar-du/rcar_du_crtc.c diff = abs((long)output - (long)target); target 172 drivers/gpu/drm/rcar-du/rcar_du_crtc.c static void rcar_du_escr_divider(struct clk *clk, unsigned long target, target 190 drivers/gpu/drm/rcar-du/rcar_du_crtc.c rate = clk_round_rate(clk, target); target 191 drivers/gpu/drm/rcar-du/rcar_du_crtc.c div = clamp(DIV_ROUND_CLOSEST(rate, target), 1UL, 64UL) - 1; target 192 drivers/gpu/drm/rcar-du/rcar_du_crtc.c diff = abs(rate / (div + 1) - target); target 220 drivers/gpu/drm/rcar-du/rcar_du_crtc.c unsigned long target = mode_clock; target 240 drivers/gpu/drm/rcar-du/rcar_du_crtc.c target *= 2; target 245 drivers/gpu/drm/rcar-du/rcar_du_crtc.c rcar_du_dpll_divider(rcrtc, &dpll, extclk, target); target 191 drivers/gpu/drm/rcar-du/rcar_lvds.c unsigned long target, struct pll_info *pll, target 291 drivers/gpu/drm/rcar-du/rcar_lvds.c div = max(1UL, DIV_ROUND_CLOSEST(fout, target)); target 292 drivers/gpu/drm/rcar-du/rcar_lvds.c diff = abs(fout / div - target); target 312 drivers/gpu/drm/rcar-du/rcar_lvds.c error = (long)(output - target) * 10000 / (long)target; target 316 drivers/gpu/drm/rcar-du/rcar_lvds.c clk, fin, output, target, error / 100, target 351 drivers/gpu/drm/sti/sti_dvo.c int target = mode->clock * 1000; target 352 drivers/gpu/drm/sti/sti_dvo.c int target_min = target - CLK_TOLERANCE_HZ; target 353 drivers/gpu/drm/sti/sti_dvo.c int target_max = target + CLK_TOLERANCE_HZ; target 359 drivers/gpu/drm/sti/sti_dvo.c result = clk_round_rate(dvo->clk_pix, target); target 362 drivers/gpu/drm/sti/sti_dvo.c target, result); target 365 drivers/gpu/drm/sti/sti_dvo.c DRM_DEBUG_DRIVER("dvo pixclk=%d not supported\n", target); target 606 drivers/gpu/drm/sti/sti_hda.c int target = mode->clock * 1000; target 607 drivers/gpu/drm/sti/sti_hda.c int target_min = target - CLK_TOLERANCE_HZ; target 608 drivers/gpu/drm/sti/sti_hda.c int target_max = target + CLK_TOLERANCE_HZ; target 618 drivers/gpu/drm/sti/sti_hda.c result = clk_round_rate(hda->clk_pix, target); target 621 drivers/gpu/drm/sti/sti_hda.c target, result); target 625 drivers/gpu/drm/sti/sti_hda.c target); target 1002 drivers/gpu/drm/sti/sti_hdmi.c int target = mode->clock * 1000; target 1003 drivers/gpu/drm/sti/sti_hdmi.c int target_min = target - CLK_TOLERANCE_HZ; target 1004 drivers/gpu/drm/sti/sti_hdmi.c int target_max = target + CLK_TOLERANCE_HZ; target 1011 drivers/gpu/drm/sti/sti_hdmi.c result = clk_round_rate(hdmi->clk_pix, target); target 1014 drivers/gpu/drm/sti/sti_hdmi.c target, result); target 1017 drivers/gpu/drm/sti/sti_hdmi.c DRM_DEBUG_DRIVER("hdmi pixclk=%d not supported\n", target); target 474 drivers/gpu/drm/stm/ltdc.c int target = mode->clock * 1000; target 475 drivers/gpu/drm/stm/ltdc.c int target_min = target - CLK_TOLERANCE_HZ; target 476 drivers/gpu/drm/stm/ltdc.c int target_max = target + CLK_TOLERANCE_HZ; target 479 drivers/gpu/drm/stm/ltdc.c result = clk_round_rate(ldev->pixel_clk, target); target 481 drivers/gpu/drm/stm/ltdc.c DRM_DEBUG_DRIVER("clk rate target %d, available %d\n", target, result); target 291 drivers/gpu/drm/tegra/drm.c u32 cmdbuf, target; target 302 drivers/gpu/drm/tegra/drm.c err = get_user(target, &src->target.handle); target 306 drivers/gpu/drm/tegra/drm.c err = get_user(dest->target.offset, &src->target.offset); target 318 drivers/gpu/drm/tegra/drm.c dest->target.bo = host1x_bo_lookup(file, target); target 319 drivers/gpu/drm/tegra/drm.c if (!dest->target.bo) target 451 drivers/gpu/drm/tegra/drm.c obj = host1x_to_tegra_bo(reloc->target.bo); target 454 drivers/gpu/drm/tegra/drm.c if (reloc->target.offset >= obj->gem.size) { target 44 drivers/gpu/drm/tegra/falcon.c enum falcon_memory target) target 48 drivers/gpu/drm/tegra/falcon.c if (target == FALCON_MEMORY_IMEM) target 242 drivers/gpu/drm/ttm/ttm_memory.c uint64_t target; target 248 drivers/gpu/drm/ttm/ttm_memory.c target = zone->swap_limit; target 250 drivers/gpu/drm/ttm/ttm_memory.c target = zone->emer_mem; target 252 drivers/gpu/drm/ttm/ttm_memory.c target = zone->max_mem; target 254 drivers/gpu/drm/ttm/ttm_memory.c target = (extra > target) ? 0ULL : target; target 256 drivers/gpu/drm/ttm/ttm_memory.c if (zone->used_mem > target) target 61 drivers/gpu/drm/virtio/virtgpu_drv.h uint32_t target; target 292 drivers/gpu/drm/virtio/virtgpu_ioctl.c if (rc->target != 2) target 304 drivers/gpu/drm/virtio/virtgpu_ioctl.c params.target = rc->target; target 878 drivers/gpu/drm/virtio/virtgpu_vq.c cmd_p->target = cpu_to_le32(params->target); target 95 drivers/gpu/drm/vkms/vkms_drv.h #define drm_crtc_to_vkms_output(target) \ target 96 drivers/gpu/drm/vkms/vkms_drv.h container_of(target, struct vkms_output, crtc) target 98 drivers/gpu/drm/vkms/vkms_drv.h #define drm_device_to_vkms_device(target) \ target 99 drivers/gpu/drm/vkms/vkms_drv.h container_of(target, struct vkms_device, drm) target 101 drivers/gpu/drm/vkms/vkms_drv.h #define drm_gem_to_vkms_gem(target)\ target 102 drivers/gpu/drm/vkms/vkms_drv.h container_of(target, struct vkms_gem_object, gem) target 104 drivers/gpu/drm/vkms/vkms_drv.h #define to_vkms_crtc_state(target)\ target 105 drivers/gpu/drm/vkms/vkms_drv.h container_of(target, struct vkms_crtc_state, base) target 107 drivers/gpu/drm/vkms/vkms_drv.h #define to_vkms_plane_state(target)\ target 108 drivers/gpu/drm/vkms/vkms_drv.h container_of(target, struct vkms_plane_state, base) target 478 drivers/gpu/drm/vmwgfx/device_include/svga3d_cmd.h SVGA3dSurfaceImageId target; target 542 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c cmd->body.target.sid = ((rebind) ? bi->res->id : SVGA3D_INVALID_ID); target 543 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c cmd->body.target.face = 0; target 544 drivers/gpu/drm/vmwgfx/vmwgfx_binding.c cmd->body.target.mipmap = 0; target 893 drivers/gpu/drm/vmwgfx/vmwgfx_execbuf.c &cmd->body.target.sid, &res); target 112 drivers/gpu/host1x/job.c reloc->target.bo = host1x_bo_get(reloc->target.bo); target 113 drivers/gpu/host1x/job.c if (!reloc->target.bo) { target 118 drivers/gpu/host1x/job.c phys_addr = host1x_bo_pin(reloc->target.bo, &sgt); target 121 drivers/gpu/host1x/job.c job->unpins[job->num_unpins].bo = reloc->target.bo; target 198 drivers/gpu/host1x/job.c reloc->target.offset) >> reloc->shift; target 199 drivers/gpu/host1x/job.c u32 *target; target 206 drivers/gpu/host1x/job.c target = (u32 *)job->gather_copy_mapped + target 227 drivers/gpu/host1x/job.c target = cmdbuf_page_addr + (reloc->cmdbuf.offset & ~PAGE_MASK); target 229 drivers/gpu/host1x/job.c *target = reloc_addr; target 973 drivers/gpu/vga/vgaarb.c struct pci_dev *target; target 1024 drivers/gpu/vga/vgaarb.c pdev = priv->target; target 1037 drivers/gpu/vga/vgaarb.c if (pdev == priv->target) target 1107 drivers/gpu/vga/vgaarb.c pdev = priv->target; target 1108 drivers/gpu/vga/vgaarb.c if (priv->target == NULL) { target 1150 drivers/gpu/vga/vgaarb.c pdev = priv->target; target 1151 drivers/gpu/vga/vgaarb.c if (priv->target == NULL) { target 1201 drivers/gpu/vga/vgaarb.c pdev = priv->target; target 1202 drivers/gpu/vga/vgaarb.c if (priv->target == NULL) { target 1267 drivers/gpu/vga/vgaarb.c priv->target = pdev; target 1301 drivers/gpu/vga/vgaarb.c pdev = priv->target; target 1302 drivers/gpu/vga/vgaarb.c if (priv->target == NULL) { target 1344 drivers/gpu/vga/vgaarb.c priv->target = vga_default_device(); /* Maybe this is still null! */ target 1345 drivers/gpu/vga/vgaarb.c priv->cards[0].pdev = priv->target; target 649 drivers/hid/hid-multitouch.c __s32 **target; target 661 drivers/hid/hid-multitouch.c target = (__s32 **)((char *)usage + offset); target 664 drivers/hid/hid-multitouch.c if (*target != DEFAULT_TRUE && target 665 drivers/hid/hid-multitouch.c *target != DEFAULT_FALSE && target 666 drivers/hid/hid-multitouch.c *target != DEFAULT_ZERO) { target 678 drivers/hid/hid-multitouch.c target = (__s32 **)((char *)usage + offset); target 681 drivers/hid/hid-multitouch.c *target = value; target 650 drivers/hwmon/coretemp.c int indx, target; target 678 drivers/hwmon/coretemp.c target = cpumask_any_and(&pd->cpumask, topology_sibling_cpumask(cpu)); target 679 drivers/hwmon/coretemp.c if (target >= nr_cpu_ids) { target 683 drivers/hwmon/coretemp.c tdata->cpu = target; target 704 drivers/hwmon/coretemp.c target = cpumask_first(&pd->cpumask); target 706 drivers/hwmon/coretemp.c tdata->cpu = target; target 1833 drivers/hwmon/f71882fg.c int target, full_speed; target 1836 drivers/hwmon/f71882fg.c target = fan_to_reg(val * fan_from_reg(full_speed) / 255); target 1837 drivers/hwmon/f71882fg.c f71882fg_write16(data, F71882FG_REG_FAN_TARGET(nr), target); target 1838 drivers/hwmon/f71882fg.c data->fan_target[nr] = target; target 2970 drivers/hwmon/nct6775.c int target = data->target_speed[nr]; target 2973 drivers/hwmon/nct6775.c if (target) { target 2974 drivers/hwmon/nct6775.c int low = target - data->target_speed_tolerance[nr]; target 2975 drivers/hwmon/nct6775.c int high = target + data->target_speed_tolerance[nr]; target 466 drivers/i2c/busses/i2c-mpc.c static int mpc_write(struct mpc_i2c *i2c, int target, target 476 drivers/i2c/busses/i2c-mpc.c writeb((target << 1), i2c->base + MPC_I2C_DR); target 494 drivers/i2c/busses/i2c-mpc.c static int mpc_read(struct mpc_i2c *i2c, int target, target 504 drivers/i2c/busses/i2c-mpc.c writeb((target << 1) | 1, i2c->base + MPC_I2C_DR); target 306 drivers/i2c/busses/i2c-octeon-core.c static int octeon_i2c_read(struct octeon_i2c *i2c, int target, target 312 drivers/i2c/busses/i2c-octeon-core.c octeon_i2c_data_write(i2c, (target << 1) | 1); target 374 drivers/i2c/busses/i2c-octeon-core.c static int octeon_i2c_write(struct octeon_i2c *i2c, int target, target 379 drivers/i2c/busses/i2c-octeon-core.c octeon_i2c_data_write(i2c, target << 1); target 175 drivers/iio/light/lm3533-als.c u8 target; target 182 drivers/iio/light/lm3533-als.c ret = lm3533_als_get_target(indio_dev, channel, zone, &target); target 186 drivers/iio/light/lm3533-als.c *val = target; target 1365 drivers/iio/light/tsl2772.c static int tsl2772_device_id_verif(int id, int target) target 1367 drivers/iio/light/tsl2772.c switch (target) { target 11076 drivers/infiniband/hw/hfi1/chip.c static int set_vl_weights(struct hfi1_pportdata *ppd, u32 target, target 11118 drivers/infiniband/hw/hfi1/chip.c write_csr(dd, target + (i * 8), reg); target 768 drivers/infiniband/hw/hfi1/debugfs.c size_t count, loff_t *ppos, u32 target) target 791 drivers/infiniband/hw/hfi1/debugfs.c total_written = i2c_write(ppd, target, i2c_addr, offset, buff, count); target 822 drivers/infiniband/hw/hfi1/debugfs.c size_t count, loff_t *ppos, u32 target) target 845 drivers/infiniband/hw/hfi1/debugfs.c total_read = i2c_read(ppd, target, i2c_addr, offset, buff, count); target 882 drivers/infiniband/hw/hfi1/debugfs.c size_t count, loff_t *ppos, u32 target) target 898 drivers/infiniband/hw/hfi1/debugfs.c total_written = qsfp_write(ppd, target, *ppos, buff, count); target 929 drivers/infiniband/hw/hfi1/debugfs.c size_t count, loff_t *ppos, u32 target) target 949 drivers/infiniband/hw/hfi1/debugfs.c total_read = qsfp_read(ppd, target, *ppos, buff, count); target 985 drivers/infiniband/hw/hfi1/debugfs.c static int __i2c_debugfs_open(struct inode *in, struct file *fp, u32 target) target 992 drivers/infiniband/hw/hfi1/debugfs.c ret = acquire_chip_resource(ppd->dd, i2c_target(target), 0); target 1009 drivers/infiniband/hw/hfi1/debugfs.c static int __i2c_debugfs_release(struct inode *in, struct file *fp, u32 target) target 1015 drivers/infiniband/hw/hfi1/debugfs.c release_chip_resource(ppd->dd, i2c_target(target)); target 1031 drivers/infiniband/hw/hfi1/debugfs.c static int __qsfp_debugfs_open(struct inode *in, struct file *fp, u32 target) target 1041 drivers/infiniband/hw/hfi1/debugfs.c ret = acquire_chip_resource(ppd->dd, i2c_target(target), 0); target 1058 drivers/infiniband/hw/hfi1/debugfs.c static int __qsfp_debugfs_release(struct inode *in, struct file *fp, u32 target) target 1064 drivers/infiniband/hw/hfi1/debugfs.c release_chip_resource(ppd->dd, i2c_target(target)); target 2289 drivers/infiniband/hw/hfi1/hfi.h static inline u32 i2c_target(u32 target) target 2291 drivers/infiniband/hw/hfi1/hfi.h return target ? CR_I2C2 : CR_I2C1; target 316 drivers/infiniband/hw/hfi1/qsfp.c static int __i2c_write(struct hfi1_pportdata *ppd, u32 target, int i2c_addr, target 324 drivers/infiniband/hw/hfi1/qsfp.c bus = target ? dd->asic_data->i2c_bus1 : dd->asic_data->i2c_bus0; target 335 drivers/infiniband/hw/hfi1/qsfp.c int i2c_write(struct hfi1_pportdata *ppd, u32 target, int i2c_addr, int offset, target 340 drivers/infiniband/hw/hfi1/qsfp.c if (!check_chip_resource(ppd->dd, i2c_target(target), __func__)) target 343 drivers/infiniband/hw/hfi1/qsfp.c ret = __i2c_write(ppd, target, i2c_addr, offset, bp, len); target 355 drivers/infiniband/hw/hfi1/qsfp.c static int __i2c_read(struct hfi1_pportdata *ppd, u32 target, int i2c_addr, target 363 drivers/infiniband/hw/hfi1/qsfp.c bus = target ? dd->asic_data->i2c_bus1 : dd->asic_data->i2c_bus0; target 374 drivers/infiniband/hw/hfi1/qsfp.c int i2c_read(struct hfi1_pportdata *ppd, u32 target, int i2c_addr, int offset, target 379 drivers/infiniband/hw/hfi1/qsfp.c if (!check_chip_resource(ppd->dd, i2c_target(target), __func__)) target 382 drivers/infiniband/hw/hfi1/qsfp.c ret = __i2c_read(ppd, target, i2c_addr, offset, bp, len); target 397 drivers/infiniband/hw/hfi1/qsfp.c int qsfp_write(struct hfi1_pportdata *ppd, u32 target, int addr, void *bp, target 406 drivers/infiniband/hw/hfi1/qsfp.c if (!check_chip_resource(ppd->dd, i2c_target(target), __func__)) target 416 drivers/infiniband/hw/hfi1/qsfp.c ret = __i2c_write(ppd, target, QSFP_DEV | QSFP_OFFSET_SIZE, target 423 drivers/infiniband/hw/hfi1/qsfp.c target, ret); target 433 drivers/infiniband/hw/hfi1/qsfp.c ret = __i2c_write(ppd, target, QSFP_DEV | QSFP_OFFSET_SIZE, target 453 drivers/infiniband/hw/hfi1/qsfp.c int one_qsfp_write(struct hfi1_pportdata *ppd, u32 target, int addr, void *bp, target 463 drivers/infiniband/hw/hfi1/qsfp.c ret = qsfp_write(ppd, target, addr, bp, len); target 477 drivers/infiniband/hw/hfi1/qsfp.c int qsfp_read(struct hfi1_pportdata *ppd, u32 target, int addr, void *bp, target 486 drivers/infiniband/hw/hfi1/qsfp.c if (!check_chip_resource(ppd->dd, i2c_target(target), __func__)) target 495 drivers/infiniband/hw/hfi1/qsfp.c ret = __i2c_write(ppd, target, QSFP_DEV | QSFP_OFFSET_SIZE, target 502 drivers/infiniband/hw/hfi1/qsfp.c target, ret); target 512 drivers/infiniband/hw/hfi1/qsfp.c ret = __i2c_read(ppd, target, QSFP_DEV | QSFP_OFFSET_SIZE, target 530 drivers/infiniband/hw/hfi1/qsfp.c int one_qsfp_read(struct hfi1_pportdata *ppd, u32 target, int addr, void *bp, target 540 drivers/infiniband/hw/hfi1/qsfp.c ret = qsfp_read(ppd, target, addr, bp, len); target 560 drivers/infiniband/hw/hfi1/qsfp.c u32 target = ppd->dd->hfi1_id; target 576 drivers/infiniband/hw/hfi1/qsfp.c ret = qsfp_read(ppd, target, 0, cache, QSFP_PAGESIZE); target 589 drivers/infiniband/hw/hfi1/qsfp.c ret = qsfp_read(ppd, target, 384, cache + 256, 128); target 594 drivers/infiniband/hw/hfi1/qsfp.c ret = qsfp_read(ppd, target, 640, cache + 384, 128); target 599 drivers/infiniband/hw/hfi1/qsfp.c ret = qsfp_read(ppd, target, 896, cache + 512, 128); target 606 drivers/infiniband/hw/hfi1/qsfp.c ret = qsfp_read(ppd, target, 640, cache + 384, 128); target 611 drivers/infiniband/hw/hfi1/qsfp.c ret = qsfp_read(ppd, target, 896, cache + 512, 128); target 618 drivers/infiniband/hw/hfi1/qsfp.c ret = qsfp_read(ppd, target, 384, cache + 256, 128); target 623 drivers/infiniband/hw/hfi1/qsfp.c ret = qsfp_read(ppd, target, 896, cache + 512, 128); target 630 drivers/infiniband/hw/hfi1/qsfp.c ret = qsfp_read(ppd, target, 896, cache + 512, 128); target 232 drivers/infiniband/hw/hfi1/qsfp.h int i2c_write(struct hfi1_pportdata *ppd, u32 target, int i2c_addr, target 234 drivers/infiniband/hw/hfi1/qsfp.h int i2c_read(struct hfi1_pportdata *ppd, u32 target, int i2c_addr, target 236 drivers/infiniband/hw/hfi1/qsfp.h int qsfp_write(struct hfi1_pportdata *ppd, u32 target, int addr, void *bp, target 238 drivers/infiniband/hw/hfi1/qsfp.h int qsfp_read(struct hfi1_pportdata *ppd, u32 target, int addr, void *bp, target 240 drivers/infiniband/hw/hfi1/qsfp.h int one_qsfp_write(struct hfi1_pportdata *ppd, u32 target, int addr, void *bp, target 242 drivers/infiniband/hw/hfi1/qsfp.h int one_qsfp_read(struct hfi1_pportdata *ppd, u32 target, int addr, void *bp, target 962 drivers/infiniband/hw/hfi1/user_sdma.c evict_data.target = npages; target 1521 drivers/infiniband/hw/hfi1/user_sdma.c if (evict_data->cleared >= evict_data->target) target 170 drivers/infiniband/hw/hfi1/user_sdma.h u32 target; /* target count to evict */ target 221 drivers/infiniband/ulp/srp/ib_srp.c static int srp_target_is_topspin(struct srp_target_port *target) target 227 drivers/infiniband/ulp/srp/ib_srp.c (!memcmp(&target->ioc_guid, topspin_oui, sizeof topspin_oui) || target 228 drivers/infiniband/ulp/srp/ib_srp.c !memcmp(&target->ioc_guid, cisco_oui, sizeof cisco_oui)); target 280 drivers/infiniband/ulp/srp/ib_srp.c static int srp_init_ib_qp(struct srp_target_port *target, target 290 drivers/infiniband/ulp/srp/ib_srp.c ret = ib_find_cached_pkey(target->srp_host->srp_dev->dev, target 291 drivers/infiniband/ulp/srp/ib_srp.c target->srp_host->port, target 292 drivers/infiniband/ulp/srp/ib_srp.c be16_to_cpu(target->ib_cm.pkey), target 300 drivers/infiniband/ulp/srp/ib_srp.c attr->port_num = target->srp_host->port; target 315 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 318 drivers/infiniband/ulp/srp/ib_srp.c new_cm_id = ib_create_cm_id(target->srp_host->srp_dev->dev, target 326 drivers/infiniband/ulp/srp/ib_srp.c if (rdma_cap_opa_ah(target->srp_host->srp_dev->dev, target 327 drivers/infiniband/ulp/srp/ib_srp.c target->srp_host->port)) target 331 drivers/infiniband/ulp/srp/ib_srp.c ch->ib_cm.path.sgid = target->sgid; target 332 drivers/infiniband/ulp/srp/ib_srp.c ch->ib_cm.path.dgid = target->ib_cm.orig_dgid; target 333 drivers/infiniband/ulp/srp/ib_srp.c ch->ib_cm.path.pkey = target->ib_cm.pkey; target 334 drivers/infiniband/ulp/srp/ib_srp.c ch->ib_cm.path.service_id = target->ib_cm.service_id; target 341 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 345 drivers/infiniband/ulp/srp/ib_srp.c new_cm_id = rdma_create_id(target->net, srp_rdma_cm_handler, ch, target 354 drivers/infiniband/ulp/srp/ib_srp.c ret = rdma_resolve_addr(new_cm_id, target->rdma_cm.src_specified ? target 355 drivers/infiniband/ulp/srp/ib_srp.c (struct sockaddr *)&target->rdma_cm.src : NULL, target 356 drivers/infiniband/ulp/srp/ib_srp.c (struct sockaddr *)&target->rdma_cm.dst, target 360 drivers/infiniband/ulp/srp/ib_srp.c &target->rdma_cm.src, &target->rdma_cm.dst, ret); target 370 drivers/infiniband/ulp/srp/ib_srp.c &target->rdma_cm.dst, ret); target 385 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 387 drivers/infiniband/ulp/srp/ib_srp.c return target->using_rdma_cm ? srp_new_rdma_cm_id(ch) : target 391 drivers/infiniband/ulp/srp/ib_srp.c static struct ib_fmr_pool *srp_alloc_fmr_pool(struct srp_target_port *target) target 393 drivers/infiniband/ulp/srp/ib_srp.c struct srp_device *dev = target->srp_host->srp_dev; target 397 drivers/infiniband/ulp/srp/ib_srp.c fmr_param.pool_size = target->mr_pool_size; target 525 drivers/infiniband/ulp/srp/ib_srp.c static struct srp_fr_pool *srp_alloc_fr_pool(struct srp_target_port *target) target 527 drivers/infiniband/ulp/srp/ib_srp.c struct srp_device *dev = target->srp_host->srp_dev; target 529 drivers/infiniband/ulp/srp/ib_srp.c return srp_create_fr_pool(dev->dev, dev->pd, target->mr_pool_size, target 553 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 554 drivers/infiniband/ulp/srp/ib_srp.c struct srp_device *dev = target->srp_host->srp_dev; target 560 drivers/infiniband/ulp/srp/ib_srp.c const int m = 1 + dev->use_fast_reg * target->mr_per_cmd * 2; target 568 drivers/infiniband/ulp/srp/ib_srp.c recv_cq = ib_alloc_cq(dev->dev, ch, target->queue_size + 1, target 575 drivers/infiniband/ulp/srp/ib_srp.c send_cq = ib_alloc_cq(dev->dev, ch, m * target->queue_size, target 583 drivers/infiniband/ulp/srp/ib_srp.c init_attr->cap.max_send_wr = m * target->queue_size; target 584 drivers/infiniband/ulp/srp/ib_srp.c init_attr->cap.max_recv_wr = target->queue_size + 1; target 592 drivers/infiniband/ulp/srp/ib_srp.c if (target->using_rdma_cm) { target 598 drivers/infiniband/ulp/srp/ib_srp.c ret = srp_init_ib_qp(target, qp); target 612 drivers/infiniband/ulp/srp/ib_srp.c fr_pool = srp_alloc_fr_pool(target); target 615 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_WARNING, target->scsi_host, PFX target 620 drivers/infiniband/ulp/srp/ib_srp.c fmr_pool = srp_alloc_fmr_pool(target); target 623 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_WARNING, target->scsi_host, PFX target 654 drivers/infiniband/ulp/srp/ib_srp.c if (target->using_rdma_cm) target 674 drivers/infiniband/ulp/srp/ib_srp.c static void srp_free_ch_ib(struct srp_target_port *target, target 677 drivers/infiniband/ulp/srp/ib_srp.c struct srp_device *dev = target->srp_host->srp_dev; target 680 drivers/infiniband/ulp/srp/ib_srp.c if (!ch->target) target 683 drivers/infiniband/ulp/srp/ib_srp.c if (target->using_rdma_cm) { target 717 drivers/infiniband/ulp/srp/ib_srp.c ch->target = NULL; target 723 drivers/infiniband/ulp/srp/ib_srp.c for (i = 0; i < target->queue_size; ++i) target 724 drivers/infiniband/ulp/srp/ib_srp.c srp_free_iu(target->srp_host, ch->rx_ring[i]); target 729 drivers/infiniband/ulp/srp/ib_srp.c for (i = 0; i < target->queue_size; ++i) target 730 drivers/infiniband/ulp/srp/ib_srp.c srp_free_iu(target->srp_host, ch->tx_ring[i]); target 741 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 745 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_ERR, target->scsi_host, target 754 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 762 drivers/infiniband/ulp/srp/ib_srp.c target->srp_host->srp_dev->dev, target 763 drivers/infiniband/ulp/srp/ib_srp.c target->srp_host->port, target 782 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_WARNING, target->scsi_host, target 785 drivers/infiniband/ulp/srp/ib_srp.c be16_to_cpu(target->ib_cm.pkey), target 786 drivers/infiniband/ulp/srp/ib_srp.c be64_to_cpu(target->ib_cm.service_id)); target 793 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 805 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_WARNING, target->scsi_host, target 813 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 815 drivers/infiniband/ulp/srp/ib_srp.c return target->using_rdma_cm ? srp_rdma_lookup_path(ch) : target 839 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 854 drivers/infiniband/ulp/srp/ib_srp.c req->ib_param.retry_count = target->tl_retry_count; target 876 drivers/infiniband/ulp/srp/ib_srp.c if (target->using_rdma_cm) { target 898 drivers/infiniband/ulp/srp/ib_srp.c subnet_timeout = srp_get_subnet_timeout(target->srp_host); target 902 drivers/infiniband/ulp/srp/ib_srp.c req->ib_param.service_id = target->ib_cm.service_id; target 925 drivers/infiniband/ulp/srp/ib_srp.c if (target->io_class == SRP_REV10_IB_IO_CLASS) { target 926 drivers/infiniband/ulp/srp/ib_srp.c memcpy(ipi, &target->sgid.global.interface_id, 8); target 927 drivers/infiniband/ulp/srp/ib_srp.c memcpy(ipi + 8, &target->initiator_ext, 8); target 928 drivers/infiniband/ulp/srp/ib_srp.c memcpy(tpi, &target->ioc_guid, 8); target 929 drivers/infiniband/ulp/srp/ib_srp.c memcpy(tpi + 8, &target->id_ext, 8); target 931 drivers/infiniband/ulp/srp/ib_srp.c memcpy(ipi, &target->initiator_ext, 8); target 932 drivers/infiniband/ulp/srp/ib_srp.c memcpy(ipi + 8, &target->sgid.global.interface_id, 8); target 933 drivers/infiniband/ulp/srp/ib_srp.c memcpy(tpi, &target->id_ext, 8); target 934 drivers/infiniband/ulp/srp/ib_srp.c memcpy(tpi + 8, &target->ioc_guid, 8); target 942 drivers/infiniband/ulp/srp/ib_srp.c if (srp_target_is_topspin(target)) { target 943 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_DEBUG, target->scsi_host, target 946 drivers/infiniband/ulp/srp/ib_srp.c be64_to_cpu(target->ioc_guid)); target 948 drivers/infiniband/ulp/srp/ib_srp.c memcpy(ipi + 8, &target->srp_host->srp_dev->dev->node_guid, 8); target 951 drivers/infiniband/ulp/srp/ib_srp.c if (target->using_rdma_cm) target 961 drivers/infiniband/ulp/srp/ib_srp.c static bool srp_queue_remove_work(struct srp_target_port *target) target 965 drivers/infiniband/ulp/srp/ib_srp.c spin_lock_irq(&target->lock); target 966 drivers/infiniband/ulp/srp/ib_srp.c if (target->state != SRP_TARGET_REMOVED) { target 967 drivers/infiniband/ulp/srp/ib_srp.c target->state = SRP_TARGET_REMOVED; target 970 drivers/infiniband/ulp/srp/ib_srp.c spin_unlock_irq(&target->lock); target 973 drivers/infiniband/ulp/srp/ib_srp.c queue_work(srp_remove_wq, &target->remove_work); target 978 drivers/infiniband/ulp/srp/ib_srp.c static void srp_disconnect_target(struct srp_target_port *target) target 985 drivers/infiniband/ulp/srp/ib_srp.c for (i = 0; i < target->ch_count; i++) { target 986 drivers/infiniband/ulp/srp/ib_srp.c ch = &target->ch[i]; target 989 drivers/infiniband/ulp/srp/ib_srp.c if (target->using_rdma_cm) { target 998 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_DEBUG, target->scsi_host, target 1004 drivers/infiniband/ulp/srp/ib_srp.c static void srp_free_req_data(struct srp_target_port *target, target 1007 drivers/infiniband/ulp/srp/ib_srp.c struct srp_device *dev = target->srp_host->srp_dev; target 1015 drivers/infiniband/ulp/srp/ib_srp.c for (i = 0; i < target->req_ring_size; ++i) { target 1025 drivers/infiniband/ulp/srp/ib_srp.c target->indirect_size, target 1037 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 1038 drivers/infiniband/ulp/srp/ib_srp.c struct srp_device *srp_dev = target->srp_host->srp_dev; target 1045 drivers/infiniband/ulp/srp/ib_srp.c ch->req_ring = kcalloc(target->req_ring_size, sizeof(*ch->req_ring), target 1050 drivers/infiniband/ulp/srp/ib_srp.c for (i = 0; i < target->req_ring_size; ++i) { target 1052 drivers/infiniband/ulp/srp/ib_srp.c mr_list = kmalloc_array(target->mr_per_cmd, sizeof(void *), target 1066 drivers/infiniband/ulp/srp/ib_srp.c req->indirect_desc = kmalloc(target->indirect_size, GFP_KERNEL); target 1071 drivers/infiniband/ulp/srp/ib_srp.c target->indirect_size, target 1099 drivers/infiniband/ulp/srp/ib_srp.c static void srp_remove_target(struct srp_target_port *target) target 1104 drivers/infiniband/ulp/srp/ib_srp.c WARN_ON_ONCE(target->state != SRP_TARGET_REMOVED); target 1106 drivers/infiniband/ulp/srp/ib_srp.c srp_del_scsi_host_attr(target->scsi_host); target 1107 drivers/infiniband/ulp/srp/ib_srp.c srp_rport_get(target->rport); target 1108 drivers/infiniband/ulp/srp/ib_srp.c srp_remove_host(target->scsi_host); target 1109 drivers/infiniband/ulp/srp/ib_srp.c scsi_remove_host(target->scsi_host); target 1110 drivers/infiniband/ulp/srp/ib_srp.c srp_stop_rport_timers(target->rport); target 1111 drivers/infiniband/ulp/srp/ib_srp.c srp_disconnect_target(target); target 1112 drivers/infiniband/ulp/srp/ib_srp.c kobj_ns_drop(KOBJ_NS_TYPE_NET, target->net); target 1113 drivers/infiniband/ulp/srp/ib_srp.c for (i = 0; i < target->ch_count; i++) { target 1114 drivers/infiniband/ulp/srp/ib_srp.c ch = &target->ch[i]; target 1115 drivers/infiniband/ulp/srp/ib_srp.c srp_free_ch_ib(target, ch); target 1117 drivers/infiniband/ulp/srp/ib_srp.c cancel_work_sync(&target->tl_err_work); target 1118 drivers/infiniband/ulp/srp/ib_srp.c srp_rport_put(target->rport); target 1119 drivers/infiniband/ulp/srp/ib_srp.c for (i = 0; i < target->ch_count; i++) { target 1120 drivers/infiniband/ulp/srp/ib_srp.c ch = &target->ch[i]; target 1121 drivers/infiniband/ulp/srp/ib_srp.c srp_free_req_data(target, ch); target 1123 drivers/infiniband/ulp/srp/ib_srp.c kfree(target->ch); target 1124 drivers/infiniband/ulp/srp/ib_srp.c target->ch = NULL; target 1126 drivers/infiniband/ulp/srp/ib_srp.c spin_lock(&target->srp_host->target_lock); target 1127 drivers/infiniband/ulp/srp/ib_srp.c list_del(&target->list); target 1128 drivers/infiniband/ulp/srp/ib_srp.c spin_unlock(&target->srp_host->target_lock); target 1130 drivers/infiniband/ulp/srp/ib_srp.c scsi_host_put(target->scsi_host); target 1135 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = target 1138 drivers/infiniband/ulp/srp/ib_srp.c WARN_ON_ONCE(target->state != SRP_TARGET_REMOVED); target 1140 drivers/infiniband/ulp/srp/ib_srp.c srp_remove_target(target); target 1145 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = rport->lld_data; target 1147 drivers/infiniband/ulp/srp/ib_srp.c srp_queue_remove_work(target); target 1154 drivers/infiniband/ulp/srp/ib_srp.c static int srp_connected_ch(struct srp_target_port *target) target 1158 drivers/infiniband/ulp/srp/ib_srp.c for (i = 0; i < target->ch_count; i++) target 1159 drivers/infiniband/ulp/srp/ib_srp.c c += target->ch[i].connected; target 1167 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 1170 drivers/infiniband/ulp/srp/ib_srp.c WARN_ON_ONCE(!multich && srp_connected_ch(target) > 0); target 1207 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_ERR, target->scsi_host, PFX target 1246 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 1247 drivers/infiniband/ulp/srp/ib_srp.c struct srp_device *dev = target->srp_host->srp_dev; target 1262 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_ERR, target->scsi_host, PFX target 1266 drivers/infiniband/ulp/srp/ib_srp.c &target->tl_err_work); target 1348 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = rport->lld_data; target 1352 drivers/infiniband/ulp/srp/ib_srp.c for (i = 0; i < target->ch_count; i++) { target 1353 drivers/infiniband/ulp/srp/ib_srp.c ch = &target->ch[i]; target 1355 drivers/infiniband/ulp/srp/ib_srp.c for (j = 0; j < target->req_ring_size; ++j) { target 1389 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = rport->lld_data; target 1391 drivers/infiniband/ulp/srp/ib_srp.c uint32_t max_iu_len = srp_max_it_iu_len(target->cmd_sg_cnt, target 1396 drivers/infiniband/ulp/srp/ib_srp.c srp_disconnect_target(target); target 1398 drivers/infiniband/ulp/srp/ib_srp.c if (target->state == SRP_TARGET_SCANNING) target 1406 drivers/infiniband/ulp/srp/ib_srp.c for (i = 0; i < target->ch_count; i++) { target 1407 drivers/infiniband/ulp/srp/ib_srp.c ch = &target->ch[i]; target 1410 drivers/infiniband/ulp/srp/ib_srp.c for (i = 0; i < target->ch_count; i++) { target 1411 drivers/infiniband/ulp/srp/ib_srp.c ch = &target->ch[i]; target 1412 drivers/infiniband/ulp/srp/ib_srp.c for (j = 0; j < target->req_ring_size; ++j) { target 1418 drivers/infiniband/ulp/srp/ib_srp.c for (i = 0; i < target->ch_count; i++) { target 1419 drivers/infiniband/ulp/srp/ib_srp.c ch = &target->ch[i]; target 1428 drivers/infiniband/ulp/srp/ib_srp.c for (j = 0; j < target->queue_size; ++j) target 1432 drivers/infiniband/ulp/srp/ib_srp.c target->qp_in_error = false; target 1434 drivers/infiniband/ulp/srp/ib_srp.c for (i = 0; i < target->ch_count; i++) { target 1435 drivers/infiniband/ulp/srp/ib_srp.c ch = &target->ch[i]; target 1443 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_INFO, target->scsi_host, target 1468 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 1469 drivers/infiniband/ulp/srp/ib_srp.c struct srp_device *dev = target->srp_host->srp_dev; target 1474 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_ERR, ch->target->scsi_host, target 1476 drivers/infiniband/ulp/srp/ib_srp.c ch->target->mr_per_cmd); target 1485 drivers/infiniband/ulp/srp/ib_srp.c if (state->npages == 1 && target->global_rkey) { target 1487 drivers/infiniband/ulp/srp/ib_srp.c target->global_rkey); target 1525 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 1526 drivers/infiniband/ulp/srp/ib_srp.c struct srp_device *dev = target->srp_host->srp_dev; target 1533 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_ERR, ch->target->scsi_host, target 1535 drivers/infiniband/ulp/srp/ib_srp.c ch->target->mr_per_cmd); target 1541 drivers/infiniband/ulp/srp/ib_srp.c if (sg_nents == 1 && target->global_rkey) { target 1546 drivers/infiniband/ulp/srp/ib_srp.c target->global_rkey); target 1603 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 1604 drivers/infiniband/ulp/srp/ib_srp.c struct srp_device *dev = target->srp_host->srp_dev; target 1652 drivers/infiniband/ulp/srp/ib_srp.c state->fmr.end = req->fmr_list + ch->target->mr_per_cmd; target 1674 drivers/infiniband/ulp/srp/ib_srp.c state->fr.end = req->fr_list + ch->target->mr_per_cmd; target 1699 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 1705 drivers/infiniband/ulp/srp/ib_srp.c target->global_rkey); target 1722 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 1723 drivers/infiniband/ulp/srp/ib_srp.c struct srp_device *dev = target->srp_host->srp_dev; target 1770 drivers/infiniband/ulp/srp/ib_srp.c struct srp_device *dev = ch->target->srp_host->srp_dev; target 1803 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 1823 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_WARNING, target->scsi_host, target 1833 drivers/infiniband/ulp/srp/ib_srp.c dev = target->srp_host->srp_dev; target 1856 drivers/infiniband/ulp/srp/ib_srp.c sge[i].lkey = target->lkey; target 1866 drivers/infiniband/ulp/srp/ib_srp.c if (count == 1 && target->global_rkey) { target 1877 drivers/infiniband/ulp/srp/ib_srp.c buf->key = cpu_to_be32(target->global_rkey); target 1891 drivers/infiniband/ulp/srp/ib_srp.c target->indirect_size, DMA_TO_DEVICE); target 1930 drivers/infiniband/ulp/srp/ib_srp.c if (unlikely(target->cmd_sg_cnt < state.ndesc && target 1931 drivers/infiniband/ulp/srp/ib_srp.c !target->allow_ext_sg)) { target 1932 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_ERR, target->scsi_host, target 1938 drivers/infiniband/ulp/srp/ib_srp.c count = min(state.ndesc, target->cmd_sg_cnt); target 1950 drivers/infiniband/ulp/srp/ib_srp.c if (!target->global_rkey) { target 1957 drivers/infiniband/ulp/srp/ib_srp.c idb_rkey = cpu_to_be32(target->global_rkey); target 1983 drivers/infiniband/ulp/srp/ib_srp.c if (ret == -ENOMEM && req->nmdesc >= target->mr_pool_size) target 2019 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 2033 drivers/infiniband/ulp/srp/ib_srp.c ++target->zero_req_lim; target 2073 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 2081 drivers/infiniband/ulp/srp/ib_srp.c iu->sge[0].lkey = target->lkey; target 2097 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 2103 drivers/infiniband/ulp/srp/ib_srp.c list.lkey = target->lkey; target 2117 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 2131 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_ERR, target->scsi_host, target 2137 drivers/infiniband/ulp/srp/ib_srp.c scmnd = scsi_host_find_tag(target->scsi_host, rsp->tag); target 2145 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_ERR, target->scsi_host, target 2147 drivers/infiniband/ulp/srp/ib_srp.c rsp->tag, ch - target->ch, ch->qp->qp_num); target 2184 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 2185 drivers/infiniband/ulp/srp/ib_srp.c struct ib_device *dev = target->srp_host->srp_dev->dev; target 2196 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_ERR, target->scsi_host, PFX target 2208 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_ERR, target->scsi_host, PFX target 2226 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_ERR, ch->target->scsi_host, PFX target 2233 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 2240 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_ERR, target->scsi_host, PFX target 2244 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_ERR, target->scsi_host, PFX target 2252 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 2253 drivers/infiniband/ulp/srp/ib_srp.c struct ib_device *dev = target->srp_host->srp_dev->dev; target 2268 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_ERR, target->scsi_host, target 2289 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_WARNING, target->scsi_host, target 2294 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_WARNING, target->scsi_host, target 2304 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_ERR, target->scsi_host, target 2317 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target; target 2319 drivers/infiniband/ulp/srp/ib_srp.c target = container_of(work, struct srp_target_port, tl_err_work); target 2320 drivers/infiniband/ulp/srp/ib_srp.c if (target->rport) target 2321 drivers/infiniband/ulp/srp/ib_srp.c srp_start_tl_fail_timers(target->rport); target 2328 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 2330 drivers/infiniband/ulp/srp/ib_srp.c if (ch->connected && !target->qp_in_error) { target 2331 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_ERR, target->scsi_host, target 2335 drivers/infiniband/ulp/srp/ib_srp.c queue_work(system_long_wq, &target->tl_err_work); target 2337 drivers/infiniband/ulp/srp/ib_srp.c target->qp_in_error = true; target 2342 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = host_to_target(shost); target 2353 drivers/infiniband/ulp/srp/ib_srp.c scmnd->result = srp_chkready(target->rport); target 2359 drivers/infiniband/ulp/srp/ib_srp.c ch = &target->ch[blk_mq_unique_tag_to_hwq(tag)]; target 2361 drivers/infiniband/ulp/srp/ib_srp.c WARN_ONCE(idx >= target->req_ring_size, "%s: tag %#x: idx %d >= %d\n", target 2363 drivers/infiniband/ulp/srp/ib_srp.c target->req_ring_size); target 2373 drivers/infiniband/ulp/srp/ib_srp.c dev = target->srp_host->srp_dev->dev; target 2398 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_ERR, target->scsi_host, target 2415 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_ERR, target->scsi_host, PFX "Send failed\n"); target 2451 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 2454 drivers/infiniband/ulp/srp/ib_srp.c ch->rx_ring = kcalloc(target->queue_size, sizeof(*ch->rx_ring), target 2458 drivers/infiniband/ulp/srp/ib_srp.c ch->tx_ring = kcalloc(target->queue_size, sizeof(*ch->tx_ring), target 2463 drivers/infiniband/ulp/srp/ib_srp.c for (i = 0; i < target->queue_size; ++i) { target 2464 drivers/infiniband/ulp/srp/ib_srp.c ch->rx_ring[i] = srp_alloc_iu(target->srp_host, target 2471 drivers/infiniband/ulp/srp/ib_srp.c for (i = 0; i < target->queue_size; ++i) { target 2472 drivers/infiniband/ulp/srp/ib_srp.c ch->tx_ring[i] = srp_alloc_iu(target->srp_host, target 2484 drivers/infiniband/ulp/srp/ib_srp.c for (i = 0; i < target->queue_size; ++i) { target 2485 drivers/infiniband/ulp/srp/ib_srp.c srp_free_iu(target->srp_host, ch->rx_ring[i]); target 2486 drivers/infiniband/ulp/srp/ib_srp.c srp_free_iu(target->srp_host, ch->tx_ring[i]); target 2530 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 2541 drivers/infiniband/ulp/srp/ib_srp.c ch->max_it_iu_len = srp_max_it_iu_len(target->cmd_sg_cnt, target 2547 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_DEBUG, target->scsi_host, target 2554 drivers/infiniband/ulp/srp/ib_srp.c target->scsi_host->can_queue target 2556 drivers/infiniband/ulp/srp/ib_srp.c target->scsi_host->can_queue); target 2557 drivers/infiniband/ulp/srp/ib_srp.c target->scsi_host->cmd_per_lun target 2558 drivers/infiniband/ulp/srp/ib_srp.c = min_t(int, target->scsi_host->can_queue, target 2559 drivers/infiniband/ulp/srp/ib_srp.c target->scsi_host->cmd_per_lun); target 2561 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_WARNING, target->scsi_host, target 2573 drivers/infiniband/ulp/srp/ib_srp.c for (i = 0; i < target->queue_size; i++) { target 2581 drivers/infiniband/ulp/srp/ib_srp.c if (!target->using_rdma_cm) { target 2601 drivers/infiniband/ulp/srp/ib_srp.c target->rq_tmo_jiffies = srp_compute_rq_tmo(qp_attr, attr_mask); target 2621 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 2622 drivers/infiniband/ulp/srp/ib_srp.c struct Scsi_Host *shost = target->scsi_host; target 2640 drivers/infiniband/ulp/srp/ib_srp.c if (srp_target_is_topspin(target)) { target 2681 drivers/infiniband/ulp/srp/ib_srp.c target->sgid.raw, target 2682 drivers/infiniband/ulp/srp/ib_srp.c target->ib_cm.orig_dgid.raw, target 2707 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 2712 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_DEBUG, target->scsi_host, target 2724 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_DEBUG, target->scsi_host, PFX "REJ received\n"); target 2731 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_WARNING, target->scsi_host, target 2735 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_ERR, target->scsi_host, target 2737 drivers/infiniband/ulp/srp/ib_srp.c queue_work(system_long_wq, &target->tl_err_work); target 2741 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_ERR, target->scsi_host, target 2754 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_WARNING, target->scsi_host, target 2768 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 2769 drivers/infiniband/ulp/srp/ib_srp.c struct Scsi_Host *shost = target->scsi_host; target 2819 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 2845 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_DEBUG, target->scsi_host, target 2857 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_DEBUG, target->scsi_host, PFX "REJ received\n"); target 2865 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_WARNING, target->scsi_host, target 2870 drivers/infiniband/ulp/srp/ib_srp.c queue_work(system_long_wq, &target->tl_err_work); target 2875 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_ERR, target->scsi_host, target 2883 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_WARNING, target->scsi_host, target 2912 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = ch->target; target 2913 drivers/infiniband/ulp/srp/ib_srp.c struct srp_rport *rport = target->rport; target 2914 drivers/infiniband/ulp/srp/ib_srp.c struct ib_device *dev = target->srp_host->srp_dev->dev; target 2919 drivers/infiniband/ulp/srp/ib_srp.c if (!ch->connected || target->qp_in_error) target 2977 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = host_to_target(scmnd->device->host); target 2984 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_ERR, target->scsi_host, "SRP abort called\n"); target 2990 drivers/infiniband/ulp/srp/ib_srp.c if (WARN_ON_ONCE(ch_idx >= target->ch_count)) target 2992 drivers/infiniband/ulp/srp/ib_srp.c ch = &target->ch[ch_idx]; target 2995 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_ERR, target->scsi_host, target 3000 drivers/infiniband/ulp/srp/ib_srp.c else if (target->rport->state == SRP_RPORT_LOST) target 3015 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = host_to_target(scmnd->device->host); target 3019 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_ERR, target->scsi_host, "SRP reset_device called\n"); target 3021 drivers/infiniband/ulp/srp/ib_srp.c ch = &target->ch[0]; target 3033 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = host_to_target(scmnd->device->host); target 3035 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_ERR, target->scsi_host, PFX "SRP reset_host called\n"); target 3037 drivers/infiniband/ulp/srp/ib_srp.c return srp_reconnect_rport(target->rport) == 0 ? SUCCESS : FAILED; target 3043 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = host_to_target(shost); target 3045 drivers/infiniband/ulp/srp/ib_srp.c if (target->target_can_queue) target 3046 drivers/infiniband/ulp/srp/ib_srp.c starget->can_queue = target->target_can_queue; target 3053 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = host_to_target(shost); target 3058 drivers/infiniband/ulp/srp/ib_srp.c timeout = max_t(unsigned, 30 * HZ, target->rq_tmo_jiffies); target 3068 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = host_to_target(class_to_shost(dev)); target 3070 drivers/infiniband/ulp/srp/ib_srp.c return sprintf(buf, "0x%016llx\n", be64_to_cpu(target->id_ext)); target 3076 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = host_to_target(class_to_shost(dev)); target 3078 drivers/infiniband/ulp/srp/ib_srp.c return sprintf(buf, "0x%016llx\n", be64_to_cpu(target->ioc_guid)); target 3084 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = host_to_target(class_to_shost(dev)); target 3086 drivers/infiniband/ulp/srp/ib_srp.c if (target->using_rdma_cm) target 3089 drivers/infiniband/ulp/srp/ib_srp.c be64_to_cpu(target->ib_cm.service_id)); target 3095 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = host_to_target(class_to_shost(dev)); target 3097 drivers/infiniband/ulp/srp/ib_srp.c if (target->using_rdma_cm) target 3099 drivers/infiniband/ulp/srp/ib_srp.c return sprintf(buf, "0x%04x\n", be16_to_cpu(target->ib_cm.pkey)); target 3105 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = host_to_target(class_to_shost(dev)); target 3107 drivers/infiniband/ulp/srp/ib_srp.c return sprintf(buf, "%pI6\n", target->sgid.raw); target 3113 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = host_to_target(class_to_shost(dev)); target 3114 drivers/infiniband/ulp/srp/ib_srp.c struct srp_rdma_ch *ch = &target->ch[0]; target 3116 drivers/infiniband/ulp/srp/ib_srp.c if (target->using_rdma_cm) target 3124 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = host_to_target(class_to_shost(dev)); target 3126 drivers/infiniband/ulp/srp/ib_srp.c if (target->using_rdma_cm) target 3128 drivers/infiniband/ulp/srp/ib_srp.c return sprintf(buf, "%pI6\n", target->ib_cm.orig_dgid.raw); target 3134 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = host_to_target(class_to_shost(dev)); target 3138 drivers/infiniband/ulp/srp/ib_srp.c for (i = 0; i < target->ch_count; i++) { target 3139 drivers/infiniband/ulp/srp/ib_srp.c ch = &target->ch[i]; target 3148 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = host_to_target(class_to_shost(dev)); target 3150 drivers/infiniband/ulp/srp/ib_srp.c return sprintf(buf, "%d\n", target->zero_req_lim); target 3156 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = host_to_target(class_to_shost(dev)); target 3158 drivers/infiniband/ulp/srp/ib_srp.c return sprintf(buf, "%d\n", target->srp_host->port); target 3164 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = host_to_target(class_to_shost(dev)); target 3167 drivers/infiniband/ulp/srp/ib_srp.c dev_name(&target->srp_host->srp_dev->dev->dev)); target 3173 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = host_to_target(class_to_shost(dev)); target 3175 drivers/infiniband/ulp/srp/ib_srp.c return sprintf(buf, "%d\n", target->ch_count); target 3181 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = host_to_target(class_to_shost(dev)); target 3183 drivers/infiniband/ulp/srp/ib_srp.c return sprintf(buf, "%d\n", target->comp_vector); target 3189 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = host_to_target(class_to_shost(dev)); target 3191 drivers/infiniband/ulp/srp/ib_srp.c return sprintf(buf, "%d\n", target->tl_retry_count); target 3197 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = host_to_target(class_to_shost(dev)); target 3199 drivers/infiniband/ulp/srp/ib_srp.c return sprintf(buf, "%u\n", target->cmd_sg_cnt); target 3205 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target = host_to_target(class_to_shost(dev)); target 3207 drivers/infiniband/ulp/srp/ib_srp.c return sprintf(buf, "%s\n", target->allow_ext_sg ? "true" : "false"); target 3287 drivers/infiniband/ulp/srp/ib_srp.c static int srp_add_target(struct srp_host *host, struct srp_target_port *target) target 3292 drivers/infiniband/ulp/srp/ib_srp.c target->state = SRP_TARGET_SCANNING; target 3293 drivers/infiniband/ulp/srp/ib_srp.c sprintf(target->target_name, "SRP.T10:%016llX", target 3294 drivers/infiniband/ulp/srp/ib_srp.c be64_to_cpu(target->id_ext)); target 3296 drivers/infiniband/ulp/srp/ib_srp.c if (scsi_add_host(target->scsi_host, host->srp_dev->dev->dev.parent)) target 3299 drivers/infiniband/ulp/srp/ib_srp.c memcpy(ids.port_id, &target->id_ext, 8); target 3300 drivers/infiniband/ulp/srp/ib_srp.c memcpy(ids.port_id + 8, &target->ioc_guid, 8); target 3302 drivers/infiniband/ulp/srp/ib_srp.c rport = srp_rport_add(target->scsi_host, &ids); target 3304 drivers/infiniband/ulp/srp/ib_srp.c scsi_remove_host(target->scsi_host); target 3308 drivers/infiniband/ulp/srp/ib_srp.c rport->lld_data = target; target 3309 drivers/infiniband/ulp/srp/ib_srp.c target->rport = rport; target 3312 drivers/infiniband/ulp/srp/ib_srp.c list_add_tail(&target->list, &host->target_list); target 3315 drivers/infiniband/ulp/srp/ib_srp.c scsi_scan_target(&target->scsi_host->shost_gendev, target 3316 drivers/infiniband/ulp/srp/ib_srp.c 0, target->scsi_id, SCAN_WILD_CARD, SCSI_SCAN_INITIAL); target 3318 drivers/infiniband/ulp/srp/ib_srp.c if (srp_connected_ch(target) < target->ch_count || target 3319 drivers/infiniband/ulp/srp/ib_srp.c target->qp_in_error) { target 3320 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_INFO, target->scsi_host, target 3322 drivers/infiniband/ulp/srp/ib_srp.c srp_queue_remove_work(target); target 3327 drivers/infiniband/ulp/srp/ib_srp.c dev_name(&target->scsi_host->shost_gendev), target 3328 drivers/infiniband/ulp/srp/ib_srp.c srp_sdev_count(target->scsi_host)); target 3330 drivers/infiniband/ulp/srp/ib_srp.c spin_lock_irq(&target->lock); target 3331 drivers/infiniband/ulp/srp/ib_srp.c if (target->state == SRP_TARGET_SCANNING) target 3332 drivers/infiniband/ulp/srp/ib_srp.c target->state = SRP_TARGET_LIVE; target 3333 drivers/infiniband/ulp/srp/ib_srp.c spin_unlock_irq(&target->lock); target 3358 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target) target 3363 drivers/infiniband/ulp/srp/ib_srp.c if (target->state == SRP_TARGET_REMOVED) target 3370 drivers/infiniband/ulp/srp/ib_srp.c if (t != target && target 3371 drivers/infiniband/ulp/srp/ib_srp.c target->id_ext == t->id_ext && target 3372 drivers/infiniband/ulp/srp/ib_srp.c target->ioc_guid == t->ioc_guid && target 3373 drivers/infiniband/ulp/srp/ib_srp.c target->initiator_ext == t->initiator_ext) { target 3492 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target) target 3529 drivers/infiniband/ulp/srp/ib_srp.c target->id_ext = cpu_to_be64(ull); target 3545 drivers/infiniband/ulp/srp/ib_srp.c target->ioc_guid = cpu_to_be64(ull); target 3561 drivers/infiniband/ulp/srp/ib_srp.c ret = hex2bin(target->ib_cm.orig_dgid.raw, p, 16); target 3572 drivers/infiniband/ulp/srp/ib_srp.c target->ib_cm.pkey = cpu_to_be16(token); target 3587 drivers/infiniband/ulp/srp/ib_srp.c target->ib_cm.service_id = cpu_to_be64(ull); target 3597 drivers/infiniband/ulp/srp/ib_srp.c ret = srp_parse_in(net, &target->rdma_cm.src.ss, p, target 3604 drivers/infiniband/ulp/srp/ib_srp.c target->rdma_cm.src_specified = true; target 3614 drivers/infiniband/ulp/srp/ib_srp.c ret = srp_parse_in(net, &target->rdma_cm.dst.ss, p, target 3623 drivers/infiniband/ulp/srp/ib_srp.c target->using_rdma_cm = true; target 3632 drivers/infiniband/ulp/srp/ib_srp.c target->scsi_host->max_sectors = token; target 3640 drivers/infiniband/ulp/srp/ib_srp.c target->scsi_host->can_queue = token; target 3641 drivers/infiniband/ulp/srp/ib_srp.c target->queue_size = token + SRP_RSP_SQ_SIZE + target 3644 drivers/infiniband/ulp/srp/ib_srp.c target->scsi_host->cmd_per_lun = token; target 3653 drivers/infiniband/ulp/srp/ib_srp.c target->scsi_host->cmd_per_lun = token; target 3662 drivers/infiniband/ulp/srp/ib_srp.c target->target_can_queue = token; target 3677 drivers/infiniband/ulp/srp/ib_srp.c target->io_class = token; target 3692 drivers/infiniband/ulp/srp/ib_srp.c target->initiator_ext = cpu_to_be64(ull); target 3702 drivers/infiniband/ulp/srp/ib_srp.c target->cmd_sg_cnt = token; target 3710 drivers/infiniband/ulp/srp/ib_srp.c target->allow_ext_sg = !!token; target 3720 drivers/infiniband/ulp/srp/ib_srp.c target->sg_tablesize = token; target 3728 drivers/infiniband/ulp/srp/ib_srp.c target->comp_vector = token; target 3737 drivers/infiniband/ulp/srp/ib_srp.c target->tl_retry_count = token; target 3756 drivers/infiniband/ulp/srp/ib_srp.c if (target->scsi_host->cmd_per_lun > target->scsi_host->can_queue target 3759 drivers/infiniband/ulp/srp/ib_srp.c target->scsi_host->cmd_per_lun, target 3760 drivers/infiniband/ulp/srp/ib_srp.c target->scsi_host->can_queue); target 3774 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target; target 3798 drivers/infiniband/ulp/srp/ib_srp.c target = host_to_target(target_host); target 3800 drivers/infiniband/ulp/srp/ib_srp.c target->net = kobj_ns_grab_current(KOBJ_NS_TYPE_NET); target 3801 drivers/infiniband/ulp/srp/ib_srp.c target->io_class = SRP_REV16A_IB_IO_CLASS; target 3802 drivers/infiniband/ulp/srp/ib_srp.c target->scsi_host = target_host; target 3803 drivers/infiniband/ulp/srp/ib_srp.c target->srp_host = host; target 3804 drivers/infiniband/ulp/srp/ib_srp.c target->lkey = host->srp_dev->pd->local_dma_lkey; target 3805 drivers/infiniband/ulp/srp/ib_srp.c target->global_rkey = host->srp_dev->global_rkey; target 3806 drivers/infiniband/ulp/srp/ib_srp.c target->cmd_sg_cnt = cmd_sg_entries; target 3807 drivers/infiniband/ulp/srp/ib_srp.c target->sg_tablesize = indirect_sg_entries ? : cmd_sg_entries; target 3808 drivers/infiniband/ulp/srp/ib_srp.c target->allow_ext_sg = allow_ext_sg; target 3809 drivers/infiniband/ulp/srp/ib_srp.c target->tl_retry_count = 7; target 3810 drivers/infiniband/ulp/srp/ib_srp.c target->queue_size = SRP_DEFAULT_QUEUE_SIZE; target 3816 drivers/infiniband/ulp/srp/ib_srp.c scsi_host_get(target->scsi_host); target 3822 drivers/infiniband/ulp/srp/ib_srp.c ret = srp_parse_options(target->net, buf, target); target 3826 drivers/infiniband/ulp/srp/ib_srp.c target->req_ring_size = target->queue_size - SRP_TSK_MGMT_SQ_SIZE; target 3828 drivers/infiniband/ulp/srp/ib_srp.c if (!srp_conn_unique(target->srp_host, target)) { target 3829 drivers/infiniband/ulp/srp/ib_srp.c if (target->using_rdma_cm) { target 3830 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_INFO, target->scsi_host, target 3832 drivers/infiniband/ulp/srp/ib_srp.c be64_to_cpu(target->id_ext), target 3833 drivers/infiniband/ulp/srp/ib_srp.c be64_to_cpu(target->ioc_guid), target 3834 drivers/infiniband/ulp/srp/ib_srp.c &target->rdma_cm.dst); target 3836 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_INFO, target->scsi_host, target 3838 drivers/infiniband/ulp/srp/ib_srp.c be64_to_cpu(target->id_ext), target 3839 drivers/infiniband/ulp/srp/ib_srp.c be64_to_cpu(target->ioc_guid), target 3840 drivers/infiniband/ulp/srp/ib_srp.c be64_to_cpu(target->initiator_ext)); target 3846 drivers/infiniband/ulp/srp/ib_srp.c if (!srp_dev->has_fmr && !srp_dev->has_fr && !target->allow_ext_sg && target 3847 drivers/infiniband/ulp/srp/ib_srp.c target->cmd_sg_cnt < target->sg_tablesize) { target 3849 drivers/infiniband/ulp/srp/ib_srp.c target->sg_tablesize = target->cmd_sg_cnt; target 3873 drivers/infiniband/ulp/srp/ib_srp.c (target->scsi_host->max_sectors + 1 + target 3877 drivers/infiniband/ulp/srp/ib_srp.c (target->sg_tablesize + target 3882 drivers/infiniband/ulp/srp/ib_srp.c target->scsi_host->max_sectors, srp_dev->max_pages_per_mr, srp_dev->mr_page_size, target 3886 drivers/infiniband/ulp/srp/ib_srp.c target_host->sg_tablesize = target->sg_tablesize; target 3887 drivers/infiniband/ulp/srp/ib_srp.c target->mr_pool_size = target->scsi_host->can_queue * mr_per_cmd; target 3888 drivers/infiniband/ulp/srp/ib_srp.c target->mr_per_cmd = mr_per_cmd; target 3889 drivers/infiniband/ulp/srp/ib_srp.c target->indirect_size = target->sg_tablesize * target 3891 drivers/infiniband/ulp/srp/ib_srp.c max_iu_len = srp_max_it_iu_len(target->cmd_sg_cnt, srp_use_imm_data); target 3893 drivers/infiniband/ulp/srp/ib_srp.c INIT_WORK(&target->tl_err_work, srp_tl_err_work); target 3894 drivers/infiniband/ulp/srp/ib_srp.c INIT_WORK(&target->remove_work, srp_remove_work); target 3895 drivers/infiniband/ulp/srp/ib_srp.c spin_lock_init(&target->lock); target 3896 drivers/infiniband/ulp/srp/ib_srp.c ret = rdma_query_gid(ibdev, host->port, 0, &target->sgid); target 3901 drivers/infiniband/ulp/srp/ib_srp.c target->ch_count = max_t(unsigned, num_online_nodes(), target 3906 drivers/infiniband/ulp/srp/ib_srp.c target->ch = kcalloc(target->ch_count, sizeof(*target->ch), target 3908 drivers/infiniband/ulp/srp/ib_srp.c if (!target->ch) target 3913 drivers/infiniband/ulp/srp/ib_srp.c const int ch_start = (node_idx * target->ch_count / target 3915 drivers/infiniband/ulp/srp/ib_srp.c const int ch_end = ((node_idx + 1) * target->ch_count / target 3928 drivers/infiniband/ulp/srp/ib_srp.c ch = &target->ch[ch_start + cpu_idx]; target 3929 drivers/infiniband/ulp/srp/ib_srp.c ch->target = target; target 3950 drivers/infiniband/ulp/srp/ib_srp.c if (target->using_rdma_cm) target 3952 drivers/infiniband/ulp/srp/ib_srp.c &target->rdma_cm.dst); target 3955 drivers/infiniband/ulp/srp/ib_srp.c target->ib_cm.orig_dgid.raw); target 3956 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_ERR, target->scsi_host, target 3959 drivers/infiniband/ulp/srp/ib_srp.c target->ch_count, dst); target 3963 drivers/infiniband/ulp/srp/ib_srp.c srp_free_ch_ib(target, ch); target 3964 drivers/infiniband/ulp/srp/ib_srp.c srp_free_req_data(target, ch); target 3965 drivers/infiniband/ulp/srp/ib_srp.c target->ch_count = ch - target->ch; target 3977 drivers/infiniband/ulp/srp/ib_srp.c target->scsi_host->nr_hw_queues = target->ch_count; target 3979 drivers/infiniband/ulp/srp/ib_srp.c ret = srp_add_target(host, target); target 3983 drivers/infiniband/ulp/srp/ib_srp.c if (target->state != SRP_TARGET_REMOVED) { target 3984 drivers/infiniband/ulp/srp/ib_srp.c if (target->using_rdma_cm) { target 3985 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_DEBUG, target->scsi_host, PFX target 3987 drivers/infiniband/ulp/srp/ib_srp.c be64_to_cpu(target->id_ext), target 3988 drivers/infiniband/ulp/srp/ib_srp.c be64_to_cpu(target->ioc_guid), target 3989 drivers/infiniband/ulp/srp/ib_srp.c target->sgid.raw, &target->rdma_cm.dst); target 3991 drivers/infiniband/ulp/srp/ib_srp.c shost_printk(KERN_DEBUG, target->scsi_host, PFX target 3993 drivers/infiniband/ulp/srp/ib_srp.c be64_to_cpu(target->id_ext), target 3994 drivers/infiniband/ulp/srp/ib_srp.c be64_to_cpu(target->ioc_guid), target 3995 drivers/infiniband/ulp/srp/ib_srp.c be16_to_cpu(target->ib_cm.pkey), target 3996 drivers/infiniband/ulp/srp/ib_srp.c be64_to_cpu(target->ib_cm.service_id), target 3997 drivers/infiniband/ulp/srp/ib_srp.c target->sgid.raw, target 3998 drivers/infiniband/ulp/srp/ib_srp.c target->ib_cm.orig_dgid.raw); target 4008 drivers/infiniband/ulp/srp/ib_srp.c scsi_host_put(target->scsi_host); target 4015 drivers/infiniband/ulp/srp/ib_srp.c if (target->state != SRP_TARGET_REMOVED) target 4016 drivers/infiniband/ulp/srp/ib_srp.c kobj_ns_drop(KOBJ_NS_TYPE_NET, target->net); target 4017 drivers/infiniband/ulp/srp/ib_srp.c scsi_host_put(target->scsi_host); target 4023 drivers/infiniband/ulp/srp/ib_srp.c srp_disconnect_target(target); target 4026 drivers/infiniband/ulp/srp/ib_srp.c for (i = 0; i < target->ch_count; i++) { target 4027 drivers/infiniband/ulp/srp/ib_srp.c ch = &target->ch[i]; target 4028 drivers/infiniband/ulp/srp/ib_srp.c srp_free_ch_ib(target, ch); target 4029 drivers/infiniband/ulp/srp/ib_srp.c srp_free_req_data(target, ch); target 4032 drivers/infiniband/ulp/srp/ib_srp.c kfree(target->ch); target 4202 drivers/infiniband/ulp/srp/ib_srp.c struct srp_target_port *target; target 4220 drivers/infiniband/ulp/srp/ib_srp.c list_for_each_entry(target, &host->target_list, list) target 4221 drivers/infiniband/ulp/srp/ib_srp.c srp_queue_remove_work(target); target 154 drivers/infiniband/ulp/srp/ib_srp.h struct srp_target_port *target ____cacheline_aligned_in_smp; target 134 drivers/input/joystick/gamecon.c unsigned char target) target 140 drivers/input/joystick/gamecon.c unsigned char data = (cmd >> i) & 1 ? target : 0; target 147 drivers/input/joystick/gamecon.c static void gc_n64_send_stop_bit(struct gc *gc, unsigned char target) target 153 drivers/input/joystick/gamecon.c unsigned char data = (GC_N64_STOP_BIT >> i) & 1 ? target : 0; target 257 drivers/input/joystick/gamecon.c unsigned char target = 1 << sdev->idx; /* select desired pin */ target 268 drivers/input/joystick/gamecon.c gc_n64_send_command(gc, GC_N64_CMD_03, target); target 269 drivers/input/joystick/gamecon.c gc_n64_send_command(gc, GC_N64_CMD_80, target); target 270 drivers/input/joystick/gamecon.c gc_n64_send_command(gc, GC_N64_CMD_01, target); target 272 drivers/input/joystick/gamecon.c gc_n64_send_command(gc, GC_N64_CMD_80, target); target 273 drivers/input/joystick/gamecon.c gc_n64_send_stop_bit(gc, target); target 278 drivers/input/joystick/gamecon.c gc_n64_send_command(gc, GC_N64_CMD_03, target); target 279 drivers/input/joystick/gamecon.c gc_n64_send_command(gc, GC_N64_CMD_c0, target); target 280 drivers/input/joystick/gamecon.c gc_n64_send_command(gc, GC_N64_CMD_1b, target); target 282 drivers/input/joystick/gamecon.c gc_n64_send_command(gc, cmd, target); target 283 drivers/input/joystick/gamecon.c gc_n64_send_stop_bit(gc, target); target 861 drivers/iommu/amd_iommu.c u8 *target; target 863 drivers/iommu/amd_iommu.c target = iommu->cmd_buf + iommu->cmd_buf_tail; target 869 drivers/iommu/amd_iommu.c memcpy(target, cmd, sizeof(*cmd)); target 3792 drivers/iommu/intel-iommu.c enum dma_data_direction dir, enum dma_sync_target target) target 3803 drivers/iommu/intel-iommu.c swiotlb_tbl_sync_single(dev, tlb_addr, size, dir, target); target 598 drivers/irqchip/irq-gic-v3-its.c u64 target; target 601 drivers/irqchip/irq-gic-v3-its.c target = desc->its_vmapp_cmd.col->target_address + its->vlpi_redist_offset; target 606 drivers/irqchip/irq-gic-v3-its.c its_encode_target(cmd, target); target 665 drivers/irqchip/irq-gic-v3-its.c u64 target; target 667 drivers/irqchip/irq-gic-v3-its.c target = desc->its_vmovp_cmd.col->target_address + its->vlpi_redist_offset; target 672 drivers/irqchip/irq-gic-v3-its.c its_encode_target(cmd, target); target 2234 drivers/irqchip/irq-gic-v3-its.c u64 target; target 2255 drivers/irqchip/irq-gic-v3-its.c target = gic_data_rdist()->phys_base; target 2258 drivers/irqchip/irq-gic-v3-its.c target = gic_read_typer(gic_data_rdist_rd_base() + GICR_TYPER); target 2259 drivers/irqchip/irq-gic-v3-its.c target = GICR_TYPER_CPU_NUMBER(target) << 16; target 2263 drivers/irqchip/irq-gic-v3-its.c its->collections[cpu].target_address = target; target 91 drivers/isdn/hardware/mISDN/iohelper.h #define ASSIGN_FUNC_IPAC(typ, target) do { \ target 92 drivers/isdn/hardware/mISDN/iohelper.h ASSIGN_FUNC(typ, ISAC, target.isac); \ target 93 drivers/isdn/hardware/mISDN/iohelper.h ASSIGN_FUNC(typ, IPAC, target); \ target 37 drivers/leds/trigger/ledtrig-activity.c unsigned int target; target 130 drivers/leds/trigger/ledtrig-activity.c target = (cpus > 1) ? (100 / cpus) : 50; target 132 drivers/leds/trigger/ledtrig-activity.c if (usage < target) target 135 drivers/leds/trigger/ledtrig-activity.c 990 - 900 * usage / target; /* OFF */ target 138 drivers/leds/trigger/ledtrig-activity.c 10 + 80 * (usage - target) / (100 - target) : /* ON */ target 139 drivers/leds/trigger/ledtrig-activity.c 90 - 80 * (usage - target) / (100 - target); /* OFF */ target 66 drivers/macintosh/windfarm_fcu_controls.c s32 min, max, target; target 156 drivers/macintosh/windfarm_fcu_controls.c fan->target = value; target 213 drivers/macintosh/windfarm_fcu_controls.c fan->target = value; target 36 drivers/macintosh/windfarm_pid.c s32 target; target 67 drivers/macintosh/windfarm_pid.c target = (s32)((integ * (s64)st->param.gr + deriv * (s64)st->param.gd + target 70 drivers/macintosh/windfarm_pid.c target += st->target; target 71 drivers/macintosh/windfarm_pid.c target = max(target, st->param.min); target 72 drivers/macintosh/windfarm_pid.c target = min(target, st->param.max); target 73 drivers/macintosh/windfarm_pid.c st->target = target; target 75 drivers/macintosh/windfarm_pid.c return st->target; target 91 drivers/macintosh/windfarm_pid.c s32 error, target, sval, adj; target 137 drivers/macintosh/windfarm_pid.c target = st->target + (s32)((deriv + prop) >> 36); target 138 drivers/macintosh/windfarm_pid.c target = max(target, st->param.min); target 139 drivers/macintosh/windfarm_pid.c target = min(target, st->param.max); target 140 drivers/macintosh/windfarm_pid.c st->target = target; target 142 drivers/macintosh/windfarm_pid.c return st->target; target 37 drivers/macintosh/windfarm_pid.h s32 target; /* current target value */ target 73 drivers/macintosh/windfarm_pid.h s32 target; /* current target value */ target 266 drivers/macintosh/windfarm_pm112.c int i, t, target = 0; target 306 drivers/macintosh/windfarm_pm112.c target = t; target 311 drivers/macintosh/windfarm_pm112.c DBG_LOTS("fans = %d, t_max = %d.%03d\n", target, FIX32TOPRINT(t_max)); target 314 drivers/macintosh/windfarm_pm112.c if (target < (cpu_last_target - 20)) target 315 drivers/macintosh/windfarm_pm112.c target = cpu_last_target - 20; target 316 drivers/macintosh/windfarm_pm112.c cpu_last_target = target; target 318 drivers/macintosh/windfarm_pm112.c cpu_pid[cpu].target = target; target 329 drivers/macintosh/windfarm_pm112.c err = ct->ops->set_value(ct, target * cpu_fan_scale[i] / 100); target 262 drivers/macintosh/windfarm_pm72.c wf_control_get(cpu_rear_fans[cpu], &sp->target); target 264 drivers/macintosh/windfarm_pm72.c DBG_LOTS(" CPU%d: cur_target = %d RPM\n", cpu, sp->target); target 283 drivers/macintosh/windfarm_pm72.c DBG_LOTS(" CPU%d: target = %d RPM\n", cpu, sp->target); target 286 drivers/macintosh/windfarm_pm72.c err = wf_control_set(cpu_rear_fans[cpu], sp->target); target 295 drivers/macintosh/windfarm_pm72.c intake = (sp->target * CPU_INTAKE_SCALE) >> 16; target 318 drivers/macintosh/windfarm_pm72.c wf_control_get(cpu_rear_fans[0], &sp->target); target 320 drivers/macintosh/windfarm_pm72.c DBG_LOTS(" CPUs: cur_target = %d RPM\n", sp->target); target 351 drivers/macintosh/windfarm_pm72.c intake = (sp->target * CPU_INTAKE_SCALE) >> 16; target 360 drivers/macintosh/windfarm_pm72.c pump = (sp->target * wf_control_get_max(pump0)) / target 363 drivers/macintosh/windfarm_pm72.c DBG_LOTS(" CPUs: target = %d RPM\n", sp->target); target 368 drivers/macintosh/windfarm_pm72.c err = wf_control_set(cpu_rear_fans[cpu], sp->target); target 433 drivers/macintosh/windfarm_pm72.c cpu_pid[cpu].target = 1000; target 480 drivers/macintosh/windfarm_pm72.c backside_pid.target = speed; target 560 drivers/macintosh/windfarm_pm72.c drives_pid.target = speed; target 362 drivers/macintosh/windfarm_pm81.c cputarget = wf_smu_cpu_fans ? wf_smu_cpu_fans->pid.target : 0; target 514 drivers/macintosh/windfarm_pm81.c systarget = wf_smu_sys_fans ? wf_smu_sys_fans->pid.target : 0; target 256 drivers/macintosh/windfarm_rm31.c wf_control_get(cpu_fans[cpu][0], &sp->target); target 275 drivers/macintosh/windfarm_rm31.c DBG_LOTS(" CPU%d: target = %d RPM\n", cpu, sp->target); target 278 drivers/macintosh/windfarm_rm31.c speed = max(sp->target, dimms_output_clamp); target 334 drivers/macintosh/windfarm_rm31.c cpu_pid[cpu].target = 4000; target 382 drivers/macintosh/windfarm_rm31.c backside_pid.target = speed; target 208 drivers/md/bcache/sysfs.c char target[20]; target 222 drivers/md/bcache/sysfs.c bch_hprint(target, dc->writeback_rate_target << 9); target 239 drivers/md/bcache/sysfs.c rate, dirty, target, proportional, target 83 drivers/md/bcache/writeback.c int64_t target = __calc_target_rate(dc); target 85 drivers/md/bcache/writeback.c int64_t error = dirty - target; target 119 drivers/md/bcache/writeback.c dc->writeback_rate_target = target; target 429 drivers/md/dm-cache-policy-smq.c unsigned target, level; target 437 drivers/md/dm-cache-policy-smq.c target = q->target_count[level]; target 442 drivers/md/dm-cache-policy-smq.c while (l->nr_elts < target) { target 457 drivers/md/dm-cache-policy-smq.c while (l->nr_elts > target) { target 48 drivers/md/dm-init.c static int __init dm_verify_target_type(const char *target) target 53 drivers/md/dm-init.c if (!strcmp(dm_allowed_targets[i], target)) target 1054 drivers/md/dm-integrity.c unsigned n_sectors, sector_t target, io_notify_fn fn, void *data) target 1061 drivers/md/dm-integrity.c BUG_ON((target | n_sectors | offset) & (unsigned)(ic->sectors_per_block - 1)); target 1082 drivers/md/dm-integrity.c io_loc.sector = target; target 201 drivers/md/dm-verity-fec.c u64 rsb, u64 target, unsigned block_offset, target 231 drivers/md/dm-verity-fec.c if (ileaved == target) target 20 drivers/md/dm-zoned-target.c struct dmz_target *target; target 32 drivers/md/dm-zoned-target.c struct dmz_target *target; target 84 drivers/md/dm-zoned-target.c bioctx->target->dev->flags |= DMZ_CHECK_BDEV; target 463 drivers/md/dm-zoned-target.c radix_tree_delete(&cw->target->chunk_rxtree, cw->chunk); target 474 drivers/md/dm-zoned-target.c struct dmz_target *dmz = cw->target; target 548 drivers/md/dm-zoned-target.c cw->target = dmz; target 647 drivers/md/dm-zoned-target.c bioctx->target = dmz; target 156 drivers/md/md-bitmap.c sector_t target; target 164 drivers/md/md-bitmap.c target = offset + index * (PAGE_SIZE/512); target 166 drivers/md/md-bitmap.c if (sync_page_io(rdev, target, target 1565 drivers/md/raid5-cache.c unsigned long target; target 1571 drivers/md/raid5-cache.c target = log->reclaim_target; target 1572 drivers/md/raid5-cache.c if (new < target) target 1574 drivers/md/raid5-cache.c } while (cmpxchg(&log->reclaim_target, target, new) != target); target 892 drivers/md/raid5.c static void dispatch_defer_bios(struct r5conf *conf, int target, target 921 drivers/md/raid5.c if (cnt >= target) target 925 drivers/md/raid5.c BUG_ON(conf->pending_data_cnt < 0 || cnt < target); target 1360 drivers/md/raid5.c static void mark_target_uptodate(struct stripe_head *sh, int target) target 1364 drivers/md/raid5.c if (target < 0) target 1367 drivers/md/raid5.c tgt = &sh->dev[target]; target 1381 drivers/md/raid5.c mark_target_uptodate(sh, sh->ops.target); target 1409 drivers/md/raid5.c int target = sh->ops.target; target 1410 drivers/md/raid5.c struct r5dev *tgt = &sh->dev[target]; target 1420 drivers/md/raid5.c __func__, (unsigned long long)sh->sector, target); target 1424 drivers/md/raid5.c if (i != target) target 1491 drivers/md/raid5.c int target; target 1501 drivers/md/raid5.c if (sh->ops.target < 0) target 1502 drivers/md/raid5.c target = sh->ops.target2; target 1504 drivers/md/raid5.c target = sh->ops.target; target 1508 drivers/md/raid5.c BUG_ON(target < 0); target 1510 drivers/md/raid5.c __func__, (unsigned long long)sh->sector, target); target 1512 drivers/md/raid5.c tgt = &sh->dev[target]; target 1518 drivers/md/raid5.c if (target == qd_idx) { target 1530 drivers/md/raid5.c if (i == target || i == qd_idx) target 1551 drivers/md/raid5.c int target = sh->ops.target; target 1553 drivers/md/raid5.c struct r5dev *tgt = &sh->dev[target]; target 1561 drivers/md/raid5.c __func__, (unsigned long long)sh->sector, target, target2); target 1562 drivers/md/raid5.c BUG_ON(target < 0 || target2 < 0); target 1578 drivers/md/raid5.c if (i == target) target 1608 drivers/md/raid5.c if (target == qd_idx) target 1611 drivers/md/raid5.c data_target = target; target 2072 drivers/md/raid5.c if (sh->ops.target2 < 0 || sh->ops.target < 0) target 3658 drivers/md/raid5.c sh->ops.target = disk_idx; target 3689 drivers/md/raid5.c sh->ops.target = disk_idx; target 4110 drivers/md/raid5.c sh->ops.target = sh->pd_idx; target 4270 drivers/md/raid5.c int *target = &sh->ops.target; target 4272 drivers/md/raid5.c sh->ops.target = -1; target 4280 drivers/md/raid5.c *target = pd_idx; target 4281 drivers/md/raid5.c target = &sh->ops.target2; target 4287 drivers/md/raid5.c *target = qd_idx; target 246 drivers/md/raid5.h int target, target2; target 152 drivers/media/common/b2c2/flexcop-common.h flexcop_sram_dest_target_t target); target 32 drivers/media/common/b2c2/flexcop-sram.c flexcop_sram_dest_target_t target) target 37 drivers/media/common/b2c2/flexcop-sram.c if (fc->rev != FLEXCOP_III && target == FC_SRAM_DEST_TARGET_FC3_CA) { target 41 drivers/media/common/b2c2/flexcop-sram.c deb_sram("sram dest: %x target: %x\n", dest, target); target 44 drivers/media/common/b2c2/flexcop-sram.c v.sram_dest_reg_714.NET_Dest = target; target 46 drivers/media/common/b2c2/flexcop-sram.c v.sram_dest_reg_714.CAI_Dest = target; target 48 drivers/media/common/b2c2/flexcop-sram.c v.sram_dest_reg_714.CAO_Dest = target; target 50 drivers/media/common/b2c2/flexcop-sram.c v.sram_dest_reg_714.MEDIA_Dest = target; target 1923 drivers/media/i2c/adv7604.c if (sel->target > V4L2_SEL_TGT_CROP_BOUNDS) target 134 drivers/media/i2c/ak881x.c switch (sel->target) { target 624 drivers/media/i2c/imx214.c if (sel->target != V4L2_SEL_TGT_CROP) target 1002 drivers/media/i2c/imx274.c if (sel->target == V4L2_SEL_TGT_CROP_BOUNDS) { target 1020 drivers/media/i2c/imx274.c switch (sel->target) { target 1115 drivers/media/i2c/imx274.c if (sel->target == V4L2_SEL_TGT_CROP) target 1118 drivers/media/i2c/imx274.c if (sel->target == V4L2_SEL_TGT_COMPOSE) { target 264 drivers/media/i2c/mt9m001.c sel->target != V4L2_SEL_TGT_CROP) target 306 drivers/media/i2c/mt9m001.c switch (sel->target) { target 358 drivers/media/i2c/mt9m001.c .target = V4L2_SEL_TGT_CROP, target 418 drivers/media/i2c/mt9m032.c if (sel->target != V4L2_SEL_TGT_CROP) target 438 drivers/media/i2c/mt9m032.c if (sel->target != V4L2_SEL_TGT_CROP) target 462 drivers/media/i2c/mt9m111.c sel->target != V4L2_SEL_TGT_CROP) target 505 drivers/media/i2c/mt9m111.c switch (sel->target) { target 587 drivers/media/i2c/mt9p031.c if (sel->target != V4L2_SEL_TGT_CROP) target 603 drivers/media/i2c/mt9p031.c if (sel->target != V4L2_SEL_TGT_CROP) target 418 drivers/media/i2c/mt9t001.c if (sel->target != V4L2_SEL_TGT_CROP) target 434 drivers/media/i2c/mt9t001.c if (sel->target != V4L2_SEL_TGT_CROP) target 884 drivers/media/i2c/mt9t112.c switch (sel->target) { target 908 drivers/media/i2c/mt9t112.c sel->target != V4L2_SEL_TGT_CROP) target 544 drivers/media/i2c/mt9v032.c if (sel->target != V4L2_SEL_TGT_CROP) target 560 drivers/media/i2c/mt9v032.c if (sel->target != V4L2_SEL_TGT_CROP) target 1046 drivers/media/i2c/ov2640.c switch (sel->target) { target 1507 drivers/media/i2c/ov5640.c static int ov5640_set_ae_target(struct ov5640_dev *sensor, int target) target 1513 drivers/media/i2c/ov5640.c sensor->ae_low = target * 23 / 25; /* 0.92 */ target 1514 drivers/media/i2c/ov5640.c sensor->ae_high = target * 27 / 25; /* 1.08 */ target 978 drivers/media/i2c/ov5645.c if (sel->target != V4L2_SEL_TGT_CROP) target 458 drivers/media/i2c/ov6650.c switch (sel->target) { target 482 drivers/media/i2c/ov6650.c sel->target != V4L2_SEL_TGT_CROP) target 574 drivers/media/i2c/ov6650.c .target = V4L2_SEL_TGT_CROP, target 1123 drivers/media/i2c/ov7251.c if (sel->target != V4L2_SEL_TGT_CROP) target 1149 drivers/media/i2c/ov772x.c switch (sel->target) { target 576 drivers/media/i2c/ov9640.c switch (sel->target) { target 554 drivers/media/i2c/rj54n1cb0c.c sel->target != V4L2_SEL_TGT_CROP) target 590 drivers/media/i2c/rj54n1cb0c.c switch (sel->target) { target 1348 drivers/media/i2c/s5k5baf.c static enum selection_rect s5k5baf_get_sel_rect(u32 pad, u32 target) target 1350 drivers/media/i2c/s5k5baf.c switch (target) { target 1364 drivers/media/i2c/s5k5baf.c static int s5k5baf_is_bound_target(u32 target) target 1366 drivers/media/i2c/s5k5baf.c return target == V4L2_SEL_TGT_CROP_BOUNDS || target 1367 drivers/media/i2c/s5k5baf.c target == V4L2_SEL_TGT_COMPOSE_BOUNDS; target 1377 drivers/media/i2c/s5k5baf.c rtype = s5k5baf_get_sel_rect(sel->pad, sel->target); target 1411 drivers/media/i2c/s5k5baf.c if (s5k5baf_is_bound_target(sel->target)) { target 1470 drivers/media/i2c/s5k5baf.c rtype = s5k5baf_get_sel_rect(sel->pad, sel->target); target 1471 drivers/media/i2c/s5k5baf.c if (rtype == R_INVALID || s5k5baf_is_bound_target(sel->target)) target 1172 drivers/media/i2c/s5k6aa.c if (sel->target != V4L2_SEL_TGT_CROP) target 1197 drivers/media/i2c/s5k6aa.c if (sel->target != V4L2_SEL_TGT_CROP) target 1695 drivers/media/i2c/smiapp/smiapp-core.c int target) target 1703 drivers/media/i2c/smiapp/smiapp-core.c switch (target) { target 2088 drivers/media/i2c/smiapp/smiapp-core.c switch (sel->target) { target 2212 drivers/media/i2c/smiapp/smiapp-core.c switch (sel->target) { target 2272 drivers/media/i2c/smiapp/smiapp-core.c switch (sel->target) { target 1023 drivers/media/i2c/tvp5150.c sel->target != V4L2_SEL_TGT_CROP) target 1083 drivers/media/i2c/tvp5150.c switch (sel->target) { target 732 drivers/media/i2c/tw9910.c if (sel->target > V4L2_SEL_TGT_CROP_BOUNDS) target 2765 drivers/media/pci/bt8xx/bttv-driver.c switch (sel->target) { target 2803 drivers/media/pci/bt8xx/bttv-driver.c if (sel->target != V4L2_SEL_TGT_CROP) target 1106 drivers/media/pci/cobalt/cobalt-v4l2.c switch (sel->target) { target 452 drivers/media/pci/cx18/cx18-ioctl.c switch (sel->target) { target 678 drivers/media/pci/cx23885/cx23885-video.c switch (sel->target) { target 844 drivers/media/pci/ivtv/ivtv-ioctl.c if (sel->target != V4L2_SEL_TGT_COMPOSE) target 884 drivers/media/pci/ivtv/ivtv-ioctl.c switch (sel->target) { target 900 drivers/media/pci/ivtv/ivtv-ioctl.c switch (sel->target) { target 1628 drivers/media/pci/saa7134/saa7134-video.c switch (sel->target) { target 1654 drivers/media/pci/saa7134/saa7134-video.c if (sel->target != V4L2_SEL_TGT_CROP) target 2095 drivers/media/platform/am437x/am437x-vpfe.c switch (s->target) { target 2143 drivers/media/platform/am437x/am437x-vpfe.c s->target != V4L2_SEL_TGT_CROP) target 930 drivers/media/platform/coda/coda-common.c switch (s->target) { target 963 drivers/media/platform/coda/coda-common.c switch (s->target) { target 656 drivers/media/platform/davinci/vpbe_display.c sel->target != V4L2_SEL_TGT_CROP) target 725 drivers/media/platform/davinci/vpbe_display.c switch (sel->target) { target 1543 drivers/media/platform/davinci/vpfe_capture.c switch (sel->target) { target 1568 drivers/media/platform/davinci/vpfe_capture.c sel->target != V4L2_SEL_TGT_CROP) target 463 drivers/media/platform/exynos-gsc/gsc-m2m.c switch (s->target) { target 513 drivers/media/platform/exynos-gsc/gsc-m2m.c switch (s->target) { target 651 drivers/media/platform/exynos4-is/fimc-capture.c int target) target 669 drivers/media/platform/exynos4-is/fimc-capture.c if (target == V4L2_SEL_TGT_COMPOSE) { target 699 drivers/media/platform/exynos4-is/fimc-capture.c if (target == V4L2_SEL_TGT_COMPOSE) { target 716 drivers/media/platform/exynos4-is/fimc-capture.c target, r->left, r->top, r->width, r->height, target 1275 drivers/media/platform/exynos4-is/fimc-capture.c switch (s->target) { target 1327 drivers/media/platform/exynos4-is/fimc-capture.c if (s->target == V4L2_SEL_TGT_COMPOSE) target 1329 drivers/media/platform/exynos4-is/fimc-capture.c else if (s->target == V4L2_SEL_TGT_CROP) target 1334 drivers/media/platform/exynos4-is/fimc-capture.c fimc_capture_try_selection(ctx, &rect, s->target); target 1611 drivers/media/platform/exynos4-is/fimc-capture.c switch (sel->target) { target 1669 drivers/media/platform/exynos4-is/fimc-capture.c switch (sel->target) { target 1688 drivers/media/platform/exynos4-is/fimc-capture.c if (sel->target == V4L2_SEL_TGT_COMPOSE) target 1693 drivers/media/platform/exynos4-is/fimc-capture.c dbg("target %#x: (%d,%d)/%dx%d", sel->target, r->left, r->top, target 294 drivers/media/platform/exynos4-is/fimc-is-param.c isp->aa.target = ISP_AA_TARGET_AF | ISP_AA_TARGET_AE | target 731 drivers/media/platform/exynos4-is/fimc-is-param.c isp->aa.target = ISP_AA_TARGET_AE | ISP_AA_TARGET_AWB; target 532 drivers/media/platform/exynos4-is/fimc-is-param.h u32 target; target 475 drivers/media/platform/exynos4-is/fimc-isp.c isp->aa.target = ISP_AA_TARGET_AE; target 486 drivers/media/platform/exynos4-is/fimc-isp.c isp->aa.target = ISP_AA_TARGET_AE; target 893 drivers/media/platform/exynos4-is/fimc-lite.c switch (sel->target) { target 919 drivers/media/platform/exynos4-is/fimc-lite.c sel->target != V4L2_SEL_TGT_COMPOSE) target 1136 drivers/media/platform/exynos4-is/fimc-lite.c if ((sel->target != V4L2_SEL_TGT_CROP && target 1137 drivers/media/platform/exynos4-is/fimc-lite.c sel->target != V4L2_SEL_TGT_CROP_BOUNDS) || target 1147 drivers/media/platform/exynos4-is/fimc-lite.c if (sel->target == V4L2_SEL_TGT_CROP) { target 1172 drivers/media/platform/exynos4-is/fimc-lite.c if (sel->target != V4L2_SEL_TGT_CROP || sel->pad != FLITE_SD_PAD_SINK) target 390 drivers/media/platform/exynos4-is/fimc-m2m.c switch (s->target) { target 407 drivers/media/platform/exynos4-is/fimc-m2m.c switch (s->target) { target 445 drivers/media/platform/exynos4-is/fimc-m2m.c if (s->target != V4L2_SEL_TGT_COMPOSE) target 449 drivers/media/platform/exynos4-is/fimc-m2m.c if (s->target != V4L2_SEL_TGT_CROP) target 462 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c switch (s->target) { target 491 drivers/media/platform/mtk-jpeg/mtk_jpeg_core.c switch (s->target) { target 783 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c static inline bool mtk_mdp_is_target_compose(u32 target) target 785 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c if (target == V4L2_SEL_TGT_COMPOSE_DEFAULT target 786 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c || target == V4L2_SEL_TGT_COMPOSE_BOUNDS target 787 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c || target == V4L2_SEL_TGT_COMPOSE) target 792 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c static inline bool mtk_mdp_is_target_crop(u32 target) target 794 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c if (target == V4L2_SEL_TGT_CROP_DEFAULT target 795 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c || target == V4L2_SEL_TGT_CROP_BOUNDS target 796 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c || target == V4L2_SEL_TGT_CROP) target 809 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c if (mtk_mdp_is_target_compose(s->target)) target 812 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c if (mtk_mdp_is_target_crop(s->target)) target 817 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c s->target); target 823 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c switch (s->target) { target 879 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c if (s->target == V4L2_SEL_TGT_COMPOSE) target 882 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c if (s->target == V4L2_SEL_TGT_CROP) target 887 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c s->target); target 896 drivers/media/platform/mtk-mdp/mtk_mdp_m2m.c if (mtk_mdp_is_target_crop(s->target)) target 768 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c switch (s->target) { target 814 drivers/media/platform/mtk-vcodec/mtk_vcodec_dec.c switch (s->target) { target 633 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c switch (s->target) { target 667 drivers/media/platform/mtk-vcodec/mtk_vcodec_enc.c switch (s->target) { target 797 drivers/media/platform/omap/omap_vout.c switch (sel->target) { target 827 drivers/media/platform/omap/omap_vout.c if (sel->target != V4L2_SEL_TGT_CROP) target 2229 drivers/media/platform/omap3isp/ispccdc.c switch (sel->target) { target 2268 drivers/media/platform/omap3isp/ispccdc.c if (sel->target != V4L2_SEL_TGT_CROP || target 1938 drivers/media/platform/omap3isp/isppreview.c switch (sel->target) { target 1978 drivers/media/platform/omap3isp/isppreview.c if (sel->target != V4L2_SEL_TGT_CROP || target 1239 drivers/media/platform/omap3isp/ispresizer.c switch (sel->target) { target 1286 drivers/media/platform/omap3isp/ispresizer.c if (sel->target != V4L2_SEL_TGT_CROP || target 775 drivers/media/platform/omap3isp/ispvideo.c .target = sel->target, target 780 drivers/media/platform/omap3isp/ispvideo.c switch (sel->target) { target 831 drivers/media/platform/omap3isp/ispvideo.c .target = sel->target, target 838 drivers/media/platform/omap3isp/ispvideo.c switch (sel->target) { target 1803 drivers/media/platform/qcom/camss/camss-vfe.c sel.target = V4L2_SEL_TGT_COMPOSE; target 1835 drivers/media/platform/qcom/camss/camss-vfe.c switch (sel->target) { target 1859 drivers/media/platform/qcom/camss/camss-vfe.c switch (sel->target) { target 1903 drivers/media/platform/qcom/camss/camss-vfe.c if (sel->target == V4L2_SEL_TGT_COMPOSE && target 1917 drivers/media/platform/qcom/camss/camss-vfe.c crop.target = V4L2_SEL_TGT_CROP; target 1920 drivers/media/platform/qcom/camss/camss-vfe.c } else if (sel->target == V4L2_SEL_TGT_CROP && target 337 drivers/media/platform/qcom/venus/vdec.c switch (s->target) { target 452 drivers/media/platform/qcom/venus/venc.c switch (s->target) { target 480 drivers/media/platform/qcom/venus/venc.c switch (s->target) { target 322 drivers/media/platform/rcar-vin/rcar-v4l2.c switch (s->target) { target 365 drivers/media/platform/rcar-vin/rcar-v4l2.c switch (s->target) { target 572 drivers/media/platform/rockchip/rga/rga.c switch (s->target) { target 621 drivers/media/platform/rockchip/rga/rga.c switch (s->target) { target 985 drivers/media/platform/s3c-camif/camif-capture.c switch (sel->target) { target 1023 drivers/media/platform/s3c-camif/camif-capture.c sel->target != V4L2_SEL_TGT_COMPOSE) target 1035 drivers/media/platform/s3c-camif/camif-capture.c sel->type, sel->target, sel->flags, target 1355 drivers/media/platform/s3c-camif/camif-capture.c if ((sel->target != V4L2_SEL_TGT_CROP && target 1356 drivers/media/platform/s3c-camif/camif-capture.c sel->target != V4L2_SEL_TGT_CROP_BOUNDS) || target 1367 drivers/media/platform/s3c-camif/camif-capture.c if (sel->target == V4L2_SEL_TGT_CROP) { target 1442 drivers/media/platform/s3c-camif/camif-capture.c if (sel->target != V4L2_SEL_TGT_CROP || sel->pad != CAMIF_SD_PAD_SINK) target 407 drivers/media/platform/s5p-g2d/g2d.c switch (s->target) { target 424 drivers/media/platform/s5p-g2d/g2d.c switch (s->target) { target 459 drivers/media/platform/s5p-g2d/g2d.c if (s->target != V4L2_SEL_TGT_COMPOSE) target 462 drivers/media/platform/s5p-g2d/g2d.c if (s->target != V4L2_SEL_TGT_CROP) target 1811 drivers/media/platform/s5p-jpeg/jpeg-core.c switch (s->target) { target 1848 drivers/media/platform/s5p-jpeg/jpeg-core.c if (s->target == V4L2_SEL_TGT_COMPOSE) { target 1853 drivers/media/platform/s5p-jpeg/jpeg-core.c } else if (s->target == V4L2_SEL_TGT_CROP) { target 789 drivers/media/platform/s5p-mfc/s5p_mfc_dec.c switch (s->target) { target 902 drivers/media/platform/sh_vou.c switch (sel->target) { target 930 drivers/media/platform/sh_vou.c .target = V4L2_SEL_TGT_COMPOSE, target 945 drivers/media/platform/sh_vou.c sel->target != V4L2_SEL_TGT_COMPOSE) target 857 drivers/media/platform/sti/bdisp/bdisp-v4l2.c switch (s->target) { target 877 drivers/media/platform/sti/bdisp/bdisp-v4l2.c switch (s->target) { target 930 drivers/media/platform/sti/bdisp/bdisp-v4l2.c (s->target == V4L2_SEL_TGT_CROP)) target 934 drivers/media/platform/sti/bdisp/bdisp-v4l2.c (s->target == V4L2_SEL_TGT_COMPOSE)) target 742 drivers/media/platform/sti/delta/delta-v4l2.c switch (s->target) { target 1180 drivers/media/platform/stm32/stm32-dcmi.c .target = V4L2_SEL_TGT_CROP_BOUNDS, target 1247 drivers/media/platform/stm32/stm32-dcmi.c switch (s->target) { target 1278 drivers/media/platform/stm32/stm32-dcmi.c s->target != V4L2_SEL_TGT_CROP) target 1804 drivers/media/platform/ti-vpe/vpe.c switch (s->target) { target 1870 drivers/media/platform/ti-vpe/vpe.c switch (s->target) { target 1160 drivers/media/platform/vicodec/vicodec-core.c switch (s->target) { target 1176 drivers/media/platform/vicodec/vicodec-core.c switch (s->target) { target 1208 drivers/media/platform/vicodec/vicodec-core.c if (!ctx->is_enc || s->target != V4L2_SEL_TGT_CROP) target 848 drivers/media/platform/vivid/vivid-vid-cap.c switch (sel->target) { target 896 drivers/media/platform/vivid/vivid-vid-cap.c switch (s->target) { target 646 drivers/media/platform/vivid/vivid-vid-out.c switch (sel->target) { target 692 drivers/media/platform/vivid/vivid-vid-out.c switch (s->target) { target 193 drivers/media/platform/vsp1/vsp1_brx.c switch (sel->target) { target 230 drivers/media/platform/vsp1/vsp1_brx.c if (sel->target != V4L2_SEL_TGT_COMPOSE) target 153 drivers/media/platform/vsp1/vsp1_drm.c sel.target = V4L2_SEL_TGT_CROP; target 208 drivers/media/platform/vsp1/vsp1_drm.c sel.target = V4L2_SEL_TGT_COMPOSE; target 162 drivers/media/platform/vsp1/vsp1_entity.c unsigned int pad, unsigned int target) target 164 drivers/media/platform/vsp1/vsp1_entity.c switch (target) { target 150 drivers/media/platform/vsp1/vsp1_entity.h unsigned int pad, unsigned int target); target 220 drivers/media/platform/vsp1/vsp1_histo.c switch (sel->target) { target 245 drivers/media/platform/vsp1/vsp1_histo.c sel->pad, sel->target); target 359 drivers/media/platform/vsp1/vsp1_histo.c if (sel->target == V4L2_SEL_TGT_CROP) target 361 drivers/media/platform/vsp1/vsp1_histo.c else if (sel->target == V4L2_SEL_TGT_COMPOSE) target 154 drivers/media/platform/vsp1/vsp1_rwpf.c switch (sel->target) { target 195 drivers/media/platform/vsp1/vsp1_rwpf.c if (sel->target != V4L2_SEL_TGT_CROP) target 103 drivers/media/platform/vsp1/vsp1_uif.c switch (sel->target) { target 116 drivers/media/platform/vsp1/vsp1_uif.c sel->pad, sel->target); target 140 drivers/media/platform/vsp1/vsp1_uif.c sel->target != V4L2_SEL_TGT_CROP) target 176 drivers/media/rc/serial_ir.c static void send_pulse_irdeo(unsigned int length, ktime_t target) target 216 drivers/media/rc/serial_ir.c ktime_t now, target = ktime_add_us(edge, length); target 232 drivers/media/rc/serial_ir.c if (ktime_compare(now, target) >= 0) target 241 drivers/media/rc/serial_ir.c if (ktime_compare(now, target) >= 0) target 946 drivers/media/tuners/tda18271-maps.c u8 target; target 951 drivers/media/tuners/tda18271-maps.c { .rfmax = 46000, .target = 0x04, .limit = 1800 }, target 952 drivers/media/tuners/tda18271-maps.c { .rfmax = 52200, .target = 0x0a, .limit = 1500 }, target 953 drivers/media/tuners/tda18271-maps.c { .rfmax = 70100, .target = 0x01, .limit = 4000 }, target 954 drivers/media/tuners/tda18271-maps.c { .rfmax = 136800, .target = 0x18, .limit = 4000 }, target 955 drivers/media/tuners/tda18271-maps.c { .rfmax = 156700, .target = 0x18, .limit = 4000 }, target 956 drivers/media/tuners/tda18271-maps.c { .rfmax = 186250, .target = 0x0a, .limit = 4000 }, target 957 drivers/media/tuners/tda18271-maps.c { .rfmax = 230000, .target = 0x0a, .limit = 4000 }, target 958 drivers/media/tuners/tda18271-maps.c { .rfmax = 345000, .target = 0x18, .limit = 4000 }, target 959 drivers/media/tuners/tda18271-maps.c { .rfmax = 426000, .target = 0x0e, .limit = 4000 }, target 960 drivers/media/tuners/tda18271-maps.c { .rfmax = 489500, .target = 0x1e, .limit = 4000 }, target 961 drivers/media/tuners/tda18271-maps.c { .rfmax = 697500, .target = 0x32, .limit = 4000 }, target 962 drivers/media/tuners/tda18271-maps.c { .rfmax = 842000, .target = 0x3a, .limit = 4000 }, target 963 drivers/media/tuners/tda18271-maps.c { .rfmax = 0, .target = 0x00, .limit = 0 }, /* end */ target 977 drivers/media/tuners/tda18271-maps.c *cid_target = tda18271_cid_target[i].target; target 981 drivers/media/tuners/tda18271-maps.c tda18271_cid_target[i].target, tda18271_cid_target[i].limit); target 1628 drivers/media/usb/au0828/au0828-video.c switch (s->target) { target 457 drivers/media/usb/cpia2/cpia2_v4l.c switch (s->target) { target 1520 drivers/media/usb/cx231xx/cx231xx-417.c switch (s->target) { target 1497 drivers/media/usb/cx231xx/cx231xx-video.c switch (s->target) { target 1655 drivers/media/usb/dvb-usb/dib0700_devices.c u16 target, ltgain, rf_gain_limit; target 1702 drivers/media/usb/dvb-usb/dib0700_devices.c target = (dib0090_get_wbd_target(fe) * 8 * 18 / 33 + 1) / 2; target 1703 drivers/media/usb/dvb-usb/dib0700_devices.c state->dib8000_ops.set_wbd_ref(fe, target); target 2123 drivers/media/usb/dvb-usb/dib0700_devices.c u16 target; target 2134 drivers/media/usb/dvb-usb/dib0700_devices.c target = (dib0090_get_wbd_target(fe) * 8 + 1) / 2; target 2135 drivers/media/usb/dvb-usb/dib0700_devices.c state->dib8000_ops.set_wbd_ref(fe, target); target 2636 drivers/media/usb/dvb-usb/dib0700_devices.c u16 target; target 2646 drivers/media/usb/dvb-usb/dib0700_devices.c target = (dib0090_get_wbd_target(fe) * 8 + 1) / 2; target 2647 drivers/media/usb/dvb-usb/dib0700_devices.c state->dib7000p_ops.set_wbd_ref(fe, target); target 47 drivers/media/usb/gspca/stv06xx/stv06xx_pb0100.c struct v4l2_ctrl *target; target 145 drivers/media/usb/gspca/stv06xx/stv06xx_pb0100.c ctrls->target = v4l2_ctrl_new_custom(hdl, &autogain_target, NULL); target 715 drivers/media/usb/pvrusb2/pvrusb2-v4l2.c switch (sel->target) { target 760 drivers/media/usb/pvrusb2/pvrusb2-v4l2.c sel->target != V4L2_SEL_TGT_CROP) target 1116 drivers/media/usb/uvc/uvc_v4l2.c switch (sel->target) { target 635 drivers/media/v4l2-core/v4l2-ioctl.c p->target, p->flags, target 2305 drivers/media/v4l2-core/v4l2-ioctl.c s.target = V4L2_SEL_TGT_COMPOSE; target 2307 drivers/media/v4l2-core/v4l2-ioctl.c s.target = V4L2_SEL_TGT_CROP; target 2310 drivers/media/v4l2-core/v4l2-ioctl.c s.target = s.target == V4L2_SEL_TGT_COMPOSE ? target 2335 drivers/media/v4l2-core/v4l2-ioctl.c s.target = V4L2_SEL_TGT_COMPOSE; target 2337 drivers/media/v4l2-core/v4l2-ioctl.c s.target = V4L2_SEL_TGT_CROP; target 2340 drivers/media/v4l2-core/v4l2-ioctl.c s.target = s.target == V4L2_SEL_TGT_COMPOSE ? target 2385 drivers/media/v4l2-core/v4l2-ioctl.c s.target = V4L2_SEL_TGT_COMPOSE_BOUNDS; target 2387 drivers/media/v4l2-core/v4l2-ioctl.c s.target = V4L2_SEL_TGT_CROP_BOUNDS; target 2390 drivers/media/v4l2-core/v4l2-ioctl.c s.target = s.target == V4L2_SEL_TGT_COMPOSE_BOUNDS ? target 2399 drivers/media/v4l2-core/v4l2-ioctl.c if (s.target == V4L2_SEL_TGT_COMPOSE_BOUNDS) target 2400 drivers/media/v4l2-core/v4l2-ioctl.c s.target = V4L2_SEL_TGT_COMPOSE_DEFAULT; target 2402 drivers/media/v4l2-core/v4l2-ioctl.c s.target = V4L2_SEL_TGT_CROP_DEFAULT; target 469 drivers/media/v4l2-core/v4l2-subdev.c sel.target = V4L2_SEL_TGT_CROP; target 487 drivers/media/v4l2-core/v4l2-subdev.c sel.target = V4L2_SEL_TGT_CROP; target 108 drivers/message/fusion/mptspi.c mptspi_setTargetNegoParms(MPT_SCSI_HOST *hd, VirtTarget *target, target 113 drivers/message/fusion/mptspi.c int id = (int) target->id; target 121 drivers/message/fusion/mptspi.c target->negoFlags = pspi_data->noQas; target 127 drivers/message/fusion/mptspi.c target->tflags &= ~MPT_TARGET_FLAGS_Q_YES; target 151 drivers/message/fusion/mptspi.c target->negoFlags |= MPT_TAPE_NEGO_IDP; target 162 drivers/message/fusion/mptspi.c if (target->raidVolume == 1) target 171 drivers/message/fusion/mptspi.c target->tflags &= ~MPT_TARGET_FLAGS_Q_YES; target 209 drivers/message/fusion/mptspi.c target->minSyncFactor = factor; target 210 drivers/message/fusion/mptspi.c target->maxOffset = offset; target 211 drivers/message/fusion/mptspi.c target->maxWidth = width; target 217 drivers/message/fusion/mptspi.c target->tflags |= MPT_TARGET_FLAGS_VALID_NEGO; target 222 drivers/message/fusion/mptspi.c target->negoFlags |= MPT_TARGET_NO_NEGO_WIDE; target 225 drivers/message/fusion/mptspi.c target->negoFlags |= MPT_TARGET_NO_NEGO_SYNC; target 232 drivers/message/fusion/mptspi.c target->negoFlags |= MPT_TARGET_NO_NEGO_QAS; target 249 drivers/mfd/arizona-core.c unsigned int mask, unsigned int target) target 258 drivers/mfd/arizona-core.c if ((val & mask) == target) target 889 drivers/misc/apds990x.c static ssize_t apds990x_set_lux_thresh(struct apds990x_chip *chip, u32 *target, target 903 drivers/misc/apds990x.c *target = thresh; target 1046 drivers/misc/bh1770glc.c static ssize_t bh1770_set_lux_thresh(struct bh1770_chip *chip, u16 *target, target 1060 drivers/misc/bh1770glc.c *target = thresh; target 15 drivers/misc/ibmasm/i2o.h u8 target; target 29 drivers/misc/ibmasm/i2o.h .target = 0x00, \ target 183 drivers/misc/lkdtm/bugs.c void *target[2] = { }; target 184 drivers/misc/lkdtm/bugs.c void *redirection = ⌖ target 206 drivers/misc/lkdtm/bugs.c if (target[0] == NULL && target[1] == NULL) target 216 drivers/misc/lkdtm/bugs.c void *target[2] = { }; target 217 drivers/misc/lkdtm/bugs.c void *redirection = ⌖ target 231 drivers/misc/lkdtm/bugs.c if (target[0] == NULL && target[1] == NULL) target 288 drivers/misc/vmw_balloon.c unsigned long target; target 489 drivers/misc/vmw_balloon.c WRITE_ONCE(b->target, local_result); target 964 drivers/misc/vmw_balloon.c int64_t size, target; target 967 drivers/misc/vmw_balloon.c target = READ_ONCE(b->target); target 978 drivers/misc/vmw_balloon.c if (target < size && target != 0 && target 979 drivers/misc/vmw_balloon.c size - target < vmballoon_page_in_frames(VMW_BALLOON_2M_PAGE)) target 983 drivers/misc/vmw_balloon.c if (target > size && time_before(jiffies, READ_ONCE(b->shrink_timeout))) target 986 drivers/misc/vmw_balloon.c return target - size; target 1499 drivers/misc/vmw_balloon.c atomic64_read(&b->size), READ_ONCE(b->target)); target 1678 drivers/misc/vmw_balloon.c seq_printf(f, "%-22s: %16lu\n", "target", READ_ONCE(b->target)); target 5470 drivers/mtd/nand/raw/nand_base.c nand_select_target(chip, pos->target); target 5492 drivers/mtd/nand/raw/nand_base.c nand_select_target(chip, pos->target); target 116 drivers/mtd/nand/spi/core.c int spinand_select_target(struct spinand_device *spinand, unsigned int target) target 121 drivers/mtd/nand/spi/core.c if (WARN_ON(target >= nand->memorg.ntargets)) target 124 drivers/mtd/nand/spi/core.c if (spinand->cur_target == target) target 128 drivers/mtd/nand/spi/core.c spinand->cur_target = target; target 132 drivers/mtd/nand/spi/core.c ret = spinand->select_target(spinand, target); target 136 drivers/mtd/nand/spi/core.c spinand->cur_target = target; target 144 drivers/mtd/nand/spi/core.c unsigned int target; target 154 drivers/mtd/nand/spi/core.c for (target = 0; target < nand->memorg.ntargets; target++) { target 155 drivers/mtd/nand/spi/core.c ret = spinand_select_target(spinand, target); target 164 drivers/mtd/nand/spi/core.c &spinand->cfg_cache[target]); target 499 drivers/mtd/nand/spi/core.c ret = spinand_select_target(spinand, iter.req.pos.target); target 547 drivers/mtd/nand/spi/core.c ret = spinand_select_target(spinand, iter.req.pos.target); target 580 drivers/mtd/nand/spi/core.c spinand_select_target(spinand, pos->target); target 616 drivers/mtd/nand/spi/core.c ret = spinand_select_target(spinand, pos->target); target 648 drivers/mtd/nand/spi/core.c ret = spinand_select_target(spinand, pos->target); target 64 drivers/mtd/nand/spi/winbond.c unsigned int target) target 73 drivers/mtd/nand/spi/winbond.c *spinand->scratchbuf = target; target 254 drivers/net/bonding/bond_netlink.c __be32 target; target 256 drivers/net/bonding/bond_netlink.c if (nla_len(attr) < sizeof(target)) target 259 drivers/net/bonding/bond_netlink.c target = nla_get_be32(attr); target 261 drivers/net/bonding/bond_netlink.c bond_opt_initval(&newval, (__force u64)target); target 33 drivers/net/bonding/bond_options.c static int bond_option_arp_ip_target_add(struct bonding *bond, __be32 target); target 34 drivers/net/bonding/bond_options.c static int bond_option_arp_ip_target_rem(struct bonding *bond, __be32 target); target 886 drivers/net/bonding/bond_options.c int *target) target 903 drivers/net/bonding/bond_options.c *target = value / bond->params.miimon; target 906 drivers/net/bonding/bond_options.c *target * bond->params.miimon); target 984 drivers/net/bonding/bond_options.c __be32 target, target 994 drivers/net/bonding/bond_options.c targets[slot] = target; target 998 drivers/net/bonding/bond_options.c static int _bond_option_arp_ip_target_add(struct bonding *bond, __be32 target) target 1003 drivers/net/bonding/bond_options.c if (!bond_is_ip_target_ok(target)) { target 1005 drivers/net/bonding/bond_options.c &target); target 1009 drivers/net/bonding/bond_options.c if (bond_get_targets_ip(targets, target) != -1) { /* dup */ target 1011 drivers/net/bonding/bond_options.c &target); target 1021 drivers/net/bonding/bond_options.c netdev_dbg(bond->dev, "Adding ARP target %pI4\n", &target); target 1023 drivers/net/bonding/bond_options.c _bond_options_arp_ip_target_set(bond, ind, target, jiffies); target 1028 drivers/net/bonding/bond_options.c static int bond_option_arp_ip_target_add(struct bonding *bond, __be32 target) target 1030 drivers/net/bonding/bond_options.c return _bond_option_arp_ip_target_add(bond, target); target 1033 drivers/net/bonding/bond_options.c static int bond_option_arp_ip_target_rem(struct bonding *bond, __be32 target) target 1041 drivers/net/bonding/bond_options.c if (!bond_is_ip_target_ok(target)) { target 1043 drivers/net/bonding/bond_options.c &target); target 1047 drivers/net/bonding/bond_options.c ind = bond_get_targets_ip(targets, target); target 1050 drivers/net/bonding/bond_options.c &target); target 1057 drivers/net/bonding/bond_options.c netdev_dbg(bond->dev, "Removing ARP target %pI4\n", &target); target 1084 drivers/net/bonding/bond_options.c __be32 target; target 1087 drivers/net/bonding/bond_options.c if (!in4_pton(newval->string+1, -1, (u8 *)&target, -1, NULL)) { target 1089 drivers/net/bonding/bond_options.c &target); target 1093 drivers/net/bonding/bond_options.c ret = bond_option_arp_ip_target_add(bond, target); target 1095 drivers/net/bonding/bond_options.c ret = bond_option_arp_ip_target_rem(bond, target); target 1099 drivers/net/bonding/bond_options.c target = newval->value; target 1100 drivers/net/bonding/bond_options.c ret = bond_option_arp_ip_target_add(bond, target); target 389 drivers/net/can/softing/softing_fw.c ktime_t target; target 398 drivers/net/can/softing/softing_fw.c target = ktime_add_us(card->ts_ref, rawl); target 400 drivers/net/can/softing/softing_fw.c tmp = ktime_add(target, card->ts_overflow); target 403 drivers/net/can/softing/softing_fw.c target = tmp; target 404 drivers/net/can/softing/softing_fw.c tmp = ktime_add(target, card->ts_overflow); target 406 drivers/net/can/softing/softing_fw.c return ktime_add(target, real_offset); target 1138 drivers/net/dsa/mv88e6xxx/chip.c int target, port; target 1145 drivers/net/dsa/mv88e6xxx/chip.c for (target = 0; target < 32; target++) { target 1147 drivers/net/dsa/mv88e6xxx/chip.c if (target < DSA_MAX_SWITCHES) target 1148 drivers/net/dsa/mv88e6xxx/chip.c if (chip->ds->rtable[target] != DSA_RTABLE_NONE) target 1149 drivers/net/dsa/mv88e6xxx/chip.c port = chip->ds->rtable[target]; target 1151 drivers/net/dsa/mv88e6xxx/chip.c err = mv88e6xxx_g2_device_mapping_write(chip, target, port); target 115 drivers/net/dsa/mv88e6xxx/global2.c int mv88e6xxx_g2_device_mapping_write(struct mv88e6xxx_chip *chip, int target, target 118 drivers/net/dsa/mv88e6xxx/global2.c u16 val = (target << 8) | (port & 0x1f); target 341 drivers/net/dsa/mv88e6xxx/global2.h int mv88e6xxx_g2_device_mapping_write(struct mv88e6xxx_chip *chip, int target, target 513 drivers/net/dsa/mv88e6xxx/global2.h int target, int port) target 648 drivers/net/ethernet/8390/mac8390.c unsigned char *target = nubus_slot_addr(IRQ2SLOT(dev->irq)); target 654 drivers/net/ethernet/8390/mac8390.c target[0xC0000] = 0; target 665 drivers/net/ethernet/8390/mac8390.c unsigned char *target = to; target 670 drivers/net/ethernet/8390/mac8390.c *target++ = ptr[-1]; target 675 drivers/net/ethernet/8390/mac8390.c *(unsigned short *)target = *(unsigned short volatile *)ptr; target 677 drivers/net/ethernet/8390/mac8390.c target += 2; target 682 drivers/net/ethernet/8390/mac8390.c *target = *ptr; target 295 drivers/net/ethernet/amd/xgbe/xgbe-i2c.c state->tx_abort_source, state->op->target); target 392 drivers/net/ethernet/amd/xgbe/xgbe-i2c.c xgbe_i2c_set_target(pdata, op->target); target 423 drivers/net/ethernet/amd/xgbe/xgbe-phy-v2.c i2c_op.target = phy_data->redrv_addr; target 437 drivers/net/ethernet/amd/xgbe/xgbe-phy-v2.c i2c_op.target = phy_data->redrv_addr; target 457 drivers/net/ethernet/amd/xgbe/xgbe-phy-v2.c static int xgbe_phy_i2c_write(struct xgbe_prv_data *pdata, unsigned int target, target 467 drivers/net/ethernet/amd/xgbe/xgbe-phy-v2.c i2c_op.target = target; target 477 drivers/net/ethernet/amd/xgbe/xgbe-phy-v2.c static int xgbe_phy_i2c_read(struct xgbe_prv_data *pdata, unsigned int target, target 488 drivers/net/ethernet/amd/xgbe/xgbe-phy-v2.c i2c_op.target = target; target 503 drivers/net/ethernet/amd/xgbe/xgbe-phy-v2.c i2c_op.target = target; target 525 drivers/net/ethernet/amd/xgbe/xgbe-phy-v2.c i2c_op.target = phy_data->sfp_mux_address; target 544 drivers/net/ethernet/amd/xgbe/xgbe-phy-v2.c i2c_op.target = phy_data->sfp_mux_address; target 638 drivers/net/ethernet/amd/xgbe/xgbe.h unsigned int target; target 13465 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c u32 *target = (u32 *)_target; target 13469 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c target[i] = be32_to_cpu(source[i]); target 13479 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c struct raw_op *target = (struct raw_op *)_target; target 13484 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c target[i].op = (tmp >> 24) & 0xff; target 13485 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c target[i].offset = tmp & 0xffffff; target 13486 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c target[i].raw_data = be32_to_cpu(source[j + 1]); target 13496 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c struct iro *target = (struct iro *)_target; target 13500 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c target[i].base = be32_to_cpu(source[j]); target 13503 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c target[i].m1 = (tmp >> 16) & 0xffff; target 13504 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c target[i].m2 = tmp & 0xffff; target 13507 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c target[i].m3 = (tmp >> 16) & 0xffff; target 13508 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c target[i].size = tmp & 0xffff; target 13516 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c u16 *target = (u16 *)_target; target 13520 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c target[i] = be16_to_cpu(source[i]); target 981 drivers/net/ethernet/marvell/mvneta.c u8 target, u8 attr) target 1009 drivers/net/ethernet/marvell/mvneta.c (attr << 8) | target); target 1026 drivers/net/ethernet/marvell/mvneta.c u8 target, attr; target 1031 drivers/net/ethernet/marvell/mvneta.c &target, &attr); target 1039 drivers/net/ethernet/marvell/mvneta.c target, attr); target 295 drivers/net/ethernet/mscc/ocelot_board.c struct regmap *target; target 297 drivers/net/ethernet/mscc/ocelot_board.c target = ocelot_io_platform_init(ocelot, pdev, res[i].name); target 298 drivers/net/ethernet/mscc/ocelot_board.c if (IS_ERR(target)) { target 304 drivers/net/ethernet/mscc/ocelot_board.c return PTR_ERR(target); target 307 drivers/net/ethernet/mscc/ocelot_board.c ocelot->targets[res[i].id] = target; target 15 drivers/net/ethernet/mscc/ocelot_io.c u16 target = reg >> TARGET_OFFSET; target 18 drivers/net/ethernet/mscc/ocelot_io.c WARN_ON(!target); target 20 drivers/net/ethernet/mscc/ocelot_io.c regmap_read(ocelot->targets[target], target 21 drivers/net/ethernet/mscc/ocelot_io.c ocelot->map[target][reg & REG_MASK] + offset, &val); target 28 drivers/net/ethernet/mscc/ocelot_io.c u16 target = reg >> TARGET_OFFSET; target 30 drivers/net/ethernet/mscc/ocelot_io.c WARN_ON(!target); target 32 drivers/net/ethernet/mscc/ocelot_io.c regmap_write(ocelot->targets[target], target 33 drivers/net/ethernet/mscc/ocelot_io.c ocelot->map[target][reg & REG_MASK] + offset, val); target 40 drivers/net/ethernet/mscc/ocelot_io.c u16 target = reg >> TARGET_OFFSET; target 42 drivers/net/ethernet/mscc/ocelot_io.c WARN_ON(!target); target 44 drivers/net/ethernet/mscc/ocelot_io.c regmap_update_bits(ocelot->targets[target], target 45 drivers/net/ethernet/mscc/ocelot_io.c ocelot->map[target][reg & REG_MASK] + offset, target 66 drivers/net/ethernet/mscc/ocelot_io.c u16 target; target 75 drivers/net/ethernet/mscc/ocelot_io.c target = regfields[i].reg >> TARGET_OFFSET; target 77 drivers/net/ethernet/mscc/ocelot_io.c regfield.reg = ocelot->map[target][reg & REG_MASK]; target 83 drivers/net/ethernet/mscc/ocelot_io.c ocelot->targets[target], target 726 drivers/net/ethernet/natsemi/natsemi.c int target = 31; target 738 drivers/net/ethernet/natsemi/natsemi.c if (target == addr) target 739 drivers/net/ethernet/natsemi/natsemi.c target--; target 740 drivers/net/ethernet/natsemi/natsemi.c if (target == np->phy_addr_external) target 741 drivers/net/ethernet/natsemi/natsemi.c target--; target 742 drivers/net/ethernet/natsemi/natsemi.c writew(target, ioaddr + PhyCtrl); target 44 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c u8 target; target 151 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c return NFP_CPP_ISLAND_ID(cpp_id->target, cpp_id->action, cpp_id->token, target 444 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c return cpp_id->target == NFP_CPP_TARGET_ISLAND_XPB && target 513 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c cpp_id = NFP_CPP_ISLAND_ID(cpp_params.target, target 635 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c cpp_params.target = sym->target; target 735 drivers/net/ethernet/netronome/nfp/nfpcore/nfp6000_pcie.c u32 target; target 755 drivers/net/ethernet/netronome/nfp/nfpcore/nfp6000_pcie.c u32 target = NFP_CPP_ID_TARGET_of(dest); target 760 drivers/net/ethernet/netronome/nfp/nfpcore/nfp6000_pcie.c pp = nfp_target_pushpull(NFP_CPP_ID(target, action, token), address); target 780 drivers/net/ethernet/netronome/nfp/nfpcore/nfp6000_pcie.c priv->target = target; target 823 drivers/net/ethernet/netronome/nfp/nfpcore/nfp6000_pcie.c barnum = nfp_alloc_bar(nfp, priv->target, priv->action, priv->token, target 838 drivers/net/ethernet/netronome/nfp/nfpcore/nfp6000_pcie.c priv->bar, priv->target); target 945 drivers/net/ethernet/netronome/nfp/nfpcore/nfp6000_pcie.c if (priv->target == (NFP_CPP_TARGET_MU & NFP_CPP_TARGET_ID_MASK) && target 953 drivers/net/ethernet/netronome/nfp/nfpcore/nfp6000_pcie.c NFP_CPP_ID(priv->target, target 1008 drivers/net/ethernet/netronome/nfp/nfpcore/nfp6000_pcie.c if (priv->target == (NFP_CPP_TARGET_ID_MASK & NFP_CPP_TARGET_MU) && target 1016 drivers/net/ethernet/netronome/nfp/nfpcore/nfp6000_pcie.c NFP_CPP_ID(priv->target, target 165 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_arm.h #define NFP_ARM_GCSR_EXPL2_CSR(target, action, length, \ target 167 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_arm.h (NFP_ARM_GCSR_EXPL2_BAR_TGT(target) | \ target 191 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_arm.h #define NFP_ARM_GCSR_EXPA_CSR(mode, target, token, is_64, action, address) \ target 194 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_arm.h NFP_ARM_GCSR_EXPA_BAR_TGT(target) | \ target 201 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_arm.h #define NFP_ARM_GCSR_BULK_CSR(mode, target, token, is_64, address) \ target 204 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_arm.h NFP_ARM_GCSR_BULK_BAR_TGT(target) | \ target 67 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_cpp.h #define NFP_CPP_ATOMIC_RD(target, island) \ target 68 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_cpp.h NFP_CPP_ISLAND_ID((target), 3, 0, (island)) target 69 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_cpp.h #define NFP_CPP_ATOMIC_WR(target, island) \ target 70 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_cpp.h NFP_CPP_ISLAND_ID((target), 4, 0, (island)) target 85 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_cpp.h #define NFP_CPP_ID(target, action, token) \ target 86 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_cpp.h ((((target) & 0x7f) << 24) | (((token) & 0xff) << 16) | \ target 103 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_cpp.h #define NFP_CPP_ISLAND_ID(target, action, token, island) \ target 104 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_cpp.h ((((target) & 0x7f) << 24) | (((token) & 0xff) << 16) | \ target 272 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_cpp.h int nfp_cpp_mutex_init(struct nfp_cpp *cpp, int target, target 274 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_cpp.h struct nfp_cpp_mutex *nfp_cpp_mutex_alloc(struct nfp_cpp *cpp, int target, target 281 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_cpp.h int nfp_cpp_mutex_reclaim(struct nfp_cpp *cpp, int target, target 16 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_mutex.c int target; target 51 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_mutex.c nfp_cpp_mutex_validate(u16 interface, int *target, unsigned long long address) target 62 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_mutex.c if (*target != NFP_CPP_TARGET_MU) target 87 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_mutex.c int target, unsigned long long address, u32 key) target 89 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_mutex.c const u32 muw = NFP_CPP_ID(target, 4, 0); /* atomic_write */ target 93 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_mutex.c err = nfp_cpp_mutex_validate(interface, &target, address); target 123 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_mutex.c struct nfp_cpp_mutex *nfp_cpp_mutex_alloc(struct nfp_cpp *cpp, int target, target 126 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_mutex.c const u32 mur = NFP_CPP_ID(target, 3, 0); /* atomic_read */ target 132 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_mutex.c err = nfp_cpp_mutex_validate(interface, &target, address); target 148 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_mutex.c mutex->target = target; target 200 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_mutex.c mutex->target, mutex->address, mutex->key); target 219 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_mutex.c const u32 muw = NFP_CPP_ID(mutex->target, 4, 0); /* atomic_write */ target 220 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_mutex.c const u32 mur = NFP_CPP_ID(mutex->target, 3, 0); /* atomic_read */ target 264 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_mutex.c const u32 muw = NFP_CPP_ID(mutex->target, 4, 0); /* atomic_write */ target 265 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_mutex.c const u32 mus = NFP_CPP_ID(mutex->target, 5, 3); /* test_set_imm */ target 266 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_mutex.c const u32 mur = NFP_CPP_ID(mutex->target, 3, 0); /* atomic_read */ target 341 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_mutex.c int nfp_cpp_mutex_reclaim(struct nfp_cpp *cpp, int target, target 344 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_mutex.c const u32 mur = NFP_CPP_ID(target, 3, 0); /* atomic_read */ target 345 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_mutex.c const u32 muw = NFP_CPP_ID(target, 4, 0); /* atomic_write */ target 350 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_mutex.c err = nfp_cpp_mutex_validate(interface, &target, address); target 59 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nffw.h int target; target 30 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_rtsym.c u8 target; target 62 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_rtsym.c switch (fw->target) { target 64 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_rtsym.c sw->target = NFP_RTSYM_TARGET_LMEM; target 67 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_rtsym.c sw->target = NFP_RTSYM_TARGET_EMU_CACHE; target 70 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_rtsym.c sw->target = fw->target; target 237 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_rtsym.c if (sym->target == NFP_RTSYM_TARGET_EMU_CACHE) { target 245 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_rtsym.c } else if (sym->target < 0) { target 247 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_rtsym.c sym->name, sym->target); target 250 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_rtsym.c *cpp_id = NFP_CPP_ISLAND_ID(sym->target, action, token, target 706 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_target.c const int target = NFP_CPP_ID_TARGET_of(cpp_island_id); target 710 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_target.c if (target < 0 || target >= 16) { target 711 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_target.c pr_err("Invalid CPP target: %d\n", target); target 726 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_target.c imb = imb_table[target]; target 729 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_target.c err = nfp_cppat_addr_encode(cpp_target_address, island, target, target 737 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_target.c *cpp_target_id = NFP_CPP_ID(target, target 968 drivers/net/ethernet/nvidia/forcedeth.c static int reg_delay(struct net_device *dev, int offset, u32 mask, u32 target, target 979 drivers/net/ethernet/nvidia/forcedeth.c } while ((readl(base + offset) & mask) != target); target 2314 drivers/net/ethernet/qlogic/qed/qed_hsi.h u8 target; target 971 drivers/net/ethernet/renesas/ravb.h u32 target; target 250 drivers/net/ethernet/renesas/ravb_ptp.c perout->target = (u32)start_ns; target 322 drivers/net/ethernet/renesas/ravb_ptp.c perout->target += perout->period; target 323 drivers/net/ethernet/renesas/ravb_ptp.c ravb_ptp_update_compare(priv, perout->target); target 179 drivers/net/ethernet/sun/ldmvsw.c u64 target = mdesc_arc_target(hp, a); target 182 drivers/net/ethernet/sun/ldmvsw.c name = mdesc_get_property(hp, target, "name", NULL); target 186 drivers/net/ethernet/sun/ldmvsw.c local_mac = mdesc_get_property(hp, target, target 188 drivers/net/ethernet/sun/ldmvsw.c cfghandle = mdesc_get_property(hp, target, target 391 drivers/net/ethernet/sun/sunvnet.c u64 target = mdesc_arc_target(hp, a); target 394 drivers/net/ethernet/sun/sunvnet.c name = mdesc_get_property(hp, target, "name", NULL); target 398 drivers/net/ethernet/sun/sunvnet.c local_mac = mdesc_get_property(hp, target, target 1499 drivers/net/ethernet/toshiba/ps3_gelic_wireless.c struct gelic_wl_scan_info *target, *tmp; target 1542 drivers/net/ethernet/toshiba/ps3_gelic_wireless.c list_for_each_entry_safe(target, tmp, &wl->network_list, list) { target 1543 drivers/net/ethernet/toshiba/ps3_gelic_wireless.c target->valid = 0; target 1545 drivers/net/ethernet/toshiba/ps3_gelic_wireless.c if (time_before(target->last_scanned + wl->scan_age, target 1547 drivers/net/ethernet/toshiba/ps3_gelic_wireless.c kfree(target->hwinfo); target 1548 drivers/net/ethernet/toshiba/ps3_gelic_wireless.c target->hwinfo = NULL; target 1549 drivers/net/ethernet/toshiba/ps3_gelic_wireless.c list_move_tail(&target->list, &wl->network_free_list); target 1574 drivers/net/ethernet/toshiba/ps3_gelic_wireless.c list_for_each_entry(target, &wl->network_list, list) { target 1575 drivers/net/ethernet/toshiba/ps3_gelic_wireless.c if (ether_addr_equal(&target->hwinfo->bssid[2], target 1583 drivers/net/ethernet/toshiba/ps3_gelic_wireless.c (target->last_scanned < oldest->last_scanned)) target 1584 drivers/net/ethernet/toshiba/ps3_gelic_wireless.c oldest = target; target 1591 drivers/net/ethernet/toshiba/ps3_gelic_wireless.c target = oldest; target 1593 drivers/net/ethernet/toshiba/ps3_gelic_wireless.c target = list_entry(wl->network_free_list.next, target 1600 drivers/net/ethernet/toshiba/ps3_gelic_wireless.c target->last_scanned = this_time; target 1601 drivers/net/ethernet/toshiba/ps3_gelic_wireless.c target->valid = 1; target 1602 drivers/net/ethernet/toshiba/ps3_gelic_wireless.c target->eurus_index = i; target 1603 drivers/net/ethernet/toshiba/ps3_gelic_wireless.c kfree(target->hwinfo); target 1604 drivers/net/ethernet/toshiba/ps3_gelic_wireless.c target->hwinfo = kmemdup(scan_info, target 1607 drivers/net/ethernet/toshiba/ps3_gelic_wireless.c if (!target->hwinfo) target 1611 drivers/net/ethernet/toshiba/ps3_gelic_wireless.c target->essid_len = strnlen(scan_info->essid, target 1613 drivers/net/ethernet/toshiba/ps3_gelic_wireless.c target->rate_len = 0; target 1616 drivers/net/ethernet/toshiba/ps3_gelic_wireless.c target->rate_len++; target 1617 drivers/net/ethernet/toshiba/ps3_gelic_wireless.c if (8 < target->rate_len) target 1619 drivers/net/ethernet/toshiba/ps3_gelic_wireless.c target->rate_len); target 1620 drivers/net/ethernet/toshiba/ps3_gelic_wireless.c target->rate_ext_len = 0; target 1623 drivers/net/ethernet/toshiba/ps3_gelic_wireless.c target->rate_ext_len++; target 1624 drivers/net/ethernet/toshiba/ps3_gelic_wireless.c list_move_tail(&target->list, &wl->network_list); target 2457 drivers/net/ethernet/toshiba/ps3_gelic_wireless.c struct gelic_wl_scan_info *target; target 2461 drivers/net/ethernet/toshiba/ps3_gelic_wireless.c list_for_each_entry_safe(target, tmp, &wl->network_list, list) { target 2462 drivers/net/ethernet/toshiba/ps3_gelic_wireless.c list_move_tail(&target->list, &wl->network_free_list); target 379 drivers/net/ipvlan/ipvlan_core.c i6addr = &ndmh->target; target 1848 drivers/net/phy/mscc.c u32 target, u32 reg) target 1864 drivers/net/phy/mscc.c MSCC_PHY_CSR_CNTL_20_TARGET(target >> 2)); target 1870 drivers/net/phy/mscc.c MSCC_PHY_CSR_CNTL_19_TARGET(target & 0x3)); target 1896 drivers/net/phy/mscc.c u32 target, u32 reg, u32 val) target 1911 drivers/net/phy/mscc.c MSCC_PHY_CSR_CNTL_20_TARGET(target >> 2)); target 1923 drivers/net/phy/mscc.c MSCC_PHY_CSR_CNTL_19_TARGET(target & 0x3)); target 344 drivers/net/usb/cdc_mbim.c ipv6_stub->ndisc_send_na(netdev, &iph->saddr, &msg->target, target 1967 drivers/net/vxlan.c na->target = ns->target; target 2003 drivers/net/vxlan.c ipv6_addr_is_multicast(&msg->target)) target 2006 drivers/net/vxlan.c n = neigh_lookup(ipv6_stub->nd_tbl, &msg->target, dev); target 2037 drivers/net/vxlan.c .sin6.sin6_addr = msg->target, target 2418 drivers/net/wireless/ath/ath5k/phy.c ath5k_get_interpolated_value(s16 target, s16 x_left, s16 x_right, target 2437 drivers/net/wireless/ath/ath5k/phy.c result = y_left + (ratio * (target - x_left) / 100); target 2589 drivers/net/wireless/ath/ath5k/phy.c u32 target = channel->center_freq; target 2614 drivers/net/wireless/ath/ath5k/phy.c if (target < pcinfo[0].freq) { target 2622 drivers/net/wireless/ath/ath5k/phy.c if (target > pcinfo[max].freq) { target 2636 drivers/net/wireless/ath/ath5k/phy.c if (pcinfo[i].freq == target) { target 2644 drivers/net/wireless/ath/ath5k/phy.c if (target < pcinfo[i].freq) { target 2677 drivers/net/wireless/ath/ath5k/phy.c u32 target = channel->center_freq; target 2701 drivers/net/wireless/ath/ath5k/phy.c if (target < rpinfo[0].freq) { target 2706 drivers/net/wireless/ath/ath5k/phy.c if (target > rpinfo[max].freq) { target 2713 drivers/net/wireless/ath/ath5k/phy.c if (rpinfo[i].freq == target) { target 2718 drivers/net/wireless/ath/ath5k/phy.c if (target < rpinfo[i].freq) { target 2727 drivers/net/wireless/ath/ath5k/phy.c rates->freq = target; target 2730 drivers/net/wireless/ath/ath5k/phy.c ath5k_get_interpolated_value(target, rpinfo[idx_l].freq, target 2736 drivers/net/wireless/ath/ath5k/phy.c ath5k_get_interpolated_value(target, rpinfo[idx_l].freq, target 2742 drivers/net/wireless/ath/ath5k/phy.c ath5k_get_interpolated_value(target, rpinfo[idx_l].freq, target 2748 drivers/net/wireless/ath/ath5k/phy.c ath5k_get_interpolated_value(target, rpinfo[idx_l].freq, target 2776 drivers/net/wireless/ath/ath5k/phy.c u32 target = channel->center_freq; target 2825 drivers/net/wireless/ath/ath5k/phy.c if (target <= rep[rep_idx].freq) target 3283 drivers/net/wireless/ath/ath5k/phy.c u32 target = channel->center_freq; target 3389 drivers/net/wireless/ath/ath5k/phy.c tmpL[i] = (u8) ath5k_get_interpolated_value(target, target 3408 drivers/net/wireless/ath/ath5k/phy.c ah->ah_txpower.txp_min_pwr = ath5k_get_interpolated_value(target, target 3413 drivers/net/wireless/ath/ath5k/phy.c ah->ah_txpower.txp_max_pwr = ath5k_get_interpolated_value(target, target 900 drivers/net/wireless/ath/ath6kl/core.h void ath6kl_rx(struct htc_target *target, struct htc_packet *packet); target 903 drivers/net/wireless/ath/ath6kl/core.h enum htc_send_full_action ath6kl_tx_queue_full(struct htc_target *target, target 922 drivers/net/wireless/ath/ath6kl/core.h void ath6kl_rx_refill(struct htc_target *target, target 925 drivers/net/wireless/ath/ath6kl/core.h struct htc_packet *ath6kl_alloc_amsdu_rxbuf(struct htc_target *target, target 278 drivers/net/wireless/ath/ath6kl/debug.c void dump_cred_dist_stats(struct htc_target *target) target 282 drivers/net/wireless/ath/ath6kl/debug.c list_for_each_entry(ep_list, &target->cred_dist_list, list) target 287 drivers/net/wireless/ath/ath6kl/debug.c target->credit_info->total_avail_credits, target 288 drivers/net/wireless/ath/ath6kl/debug.c target->credit_info->cur_free_credits); target 708 drivers/net/wireless/ath/ath6kl/debug.c struct htc_target *target = ar->htc_target; target 715 drivers/net/wireless/ath/ath6kl/debug.c get_queue_depth(&target->cred_dist_list) * CREDIT_INFO_LEN; target 722 drivers/net/wireless/ath/ath6kl/debug.c target->credit_info->total_avail_credits); target 725 drivers/net/wireless/ath/ath6kl/debug.c target->credit_info->cur_free_credits); target 732 drivers/net/wireless/ath/ath6kl/debug.c list_for_each_entry(ep_list, &target->cred_dist_list, list) { target 762 drivers/net/wireless/ath/ath6kl/debug.c static unsigned int print_endpoint_stat(struct htc_target *target, char *buf, target 772 drivers/net/wireless/ath/ath6kl/debug.c ep_st = &target->endpoint[i].ep_st; target 786 drivers/net/wireless/ath/ath6kl/debug.c struct htc_target *target = ar->htc_target; target 799 drivers/net/wireless/ath/ath6kl/debug.c len = print_endpoint_stat(target, buf, buf_len, len, \ target 841 drivers/net/wireless/ath/ath6kl/debug.c struct htc_target *target = ar->htc_target; target 851 drivers/net/wireless/ath/ath6kl/debug.c ep_st = &target->endpoint[i].ep_st; target 75 drivers/net/wireless/ath/ath6kl/debug.h void dump_cred_dist_stats(struct htc_target *target); target 105 drivers/net/wireless/ath/ath6kl/debug.h static inline void dump_cred_dist_stats(struct htc_target *target) target 28 drivers/net/wireless/ath/ath6kl/htc-ops.h static inline int ath6kl_htc_wait_target(struct htc_target *target) target 30 drivers/net/wireless/ath/ath6kl/htc-ops.h return target->dev->ar->htc_ops->wait_target(target); target 33 drivers/net/wireless/ath/ath6kl/htc-ops.h static inline int ath6kl_htc_start(struct htc_target *target) target 35 drivers/net/wireless/ath/ath6kl/htc-ops.h return target->dev->ar->htc_ops->start(target); target 38 drivers/net/wireless/ath/ath6kl/htc-ops.h static inline int ath6kl_htc_conn_service(struct htc_target *target, target 42 drivers/net/wireless/ath/ath6kl/htc-ops.h return target->dev->ar->htc_ops->conn_service(target, req, resp); target 45 drivers/net/wireless/ath/ath6kl/htc-ops.h static inline int ath6kl_htc_tx(struct htc_target *target, target 48 drivers/net/wireless/ath/ath6kl/htc-ops.h return target->dev->ar->htc_ops->tx(target, packet); target 51 drivers/net/wireless/ath/ath6kl/htc-ops.h static inline void ath6kl_htc_stop(struct htc_target *target) target 53 drivers/net/wireless/ath/ath6kl/htc-ops.h return target->dev->ar->htc_ops->stop(target); target 56 drivers/net/wireless/ath/ath6kl/htc-ops.h static inline void ath6kl_htc_cleanup(struct htc_target *target) target 58 drivers/net/wireless/ath/ath6kl/htc-ops.h return target->dev->ar->htc_ops->cleanup(target); target 61 drivers/net/wireless/ath/ath6kl/htc-ops.h static inline void ath6kl_htc_flush_txep(struct htc_target *target, target 65 drivers/net/wireless/ath/ath6kl/htc-ops.h return target->dev->ar->htc_ops->flush_txep(target, endpoint, tag); target 68 drivers/net/wireless/ath/ath6kl/htc-ops.h static inline void ath6kl_htc_flush_rx_buf(struct htc_target *target) target 70 drivers/net/wireless/ath/ath6kl/htc-ops.h return target->dev->ar->htc_ops->flush_rx_buf(target); target 73 drivers/net/wireless/ath/ath6kl/htc-ops.h static inline void ath6kl_htc_activity_changed(struct htc_target *target, target 77 drivers/net/wireless/ath/ath6kl/htc-ops.h return target->dev->ar->htc_ops->activity_changed(target, endpoint, target 81 drivers/net/wireless/ath/ath6kl/htc-ops.h static inline int ath6kl_htc_get_rxbuf_num(struct htc_target *target, target 84 drivers/net/wireless/ath/ath6kl/htc-ops.h return target->dev->ar->htc_ops->get_rxbuf_num(target, endpoint); target 87 drivers/net/wireless/ath/ath6kl/htc-ops.h static inline int ath6kl_htc_add_rxbuf_multiple(struct htc_target *target, target 90 drivers/net/wireless/ath/ath6kl/htc-ops.h return target->dev->ar->htc_ops->add_rxbuf_multiple(target, pktq); target 93 drivers/net/wireless/ath/ath6kl/htc-ops.h static inline int ath6kl_htc_credit_setup(struct htc_target *target, target 96 drivers/net/wireless/ath/ath6kl/htc-ops.h return target->dev->ar->htc_ops->credit_setup(target, info); target 519 drivers/net/wireless/ath/ath6kl/htc.h struct htc_target *target; target 550 drivers/net/wireless/ath/ath6kl/htc.h int (*wait_target)(struct htc_target *target); target 551 drivers/net/wireless/ath/ath6kl/htc.h int (*start)(struct htc_target *target); target 552 drivers/net/wireless/ath/ath6kl/htc.h int (*conn_service)(struct htc_target *target, target 555 drivers/net/wireless/ath/ath6kl/htc.h int (*tx)(struct htc_target *target, struct htc_packet *packet); target 556 drivers/net/wireless/ath/ath6kl/htc.h void (*stop)(struct htc_target *target); target 557 drivers/net/wireless/ath/ath6kl/htc.h void (*cleanup)(struct htc_target *target); target 558 drivers/net/wireless/ath/ath6kl/htc.h void (*flush_txep)(struct htc_target *target, target 560 drivers/net/wireless/ath/ath6kl/htc.h void (*flush_rx_buf)(struct htc_target *target); target 561 drivers/net/wireless/ath/ath6kl/htc.h void (*activity_changed)(struct htc_target *target, target 564 drivers/net/wireless/ath/ath6kl/htc.h int (*get_rxbuf_num)(struct htc_target *target, target 566 drivers/net/wireless/ath/ath6kl/htc.h int (*add_rxbuf_multiple)(struct htc_target *target, target 568 drivers/net/wireless/ath/ath6kl/htc.h int (*credit_setup)(struct htc_target *target, target 632 drivers/net/wireless/ath/ath6kl/htc.h int ath6kl_htc_rxmsg_pending_handler(struct htc_target *target, target 28 drivers/net/wireless/ath/ath6kl/htc_mbox.c static void ath6kl_htc_mbox_cleanup(struct htc_target *target); target 29 drivers/net/wireless/ath/ath6kl/htc_mbox.c static void ath6kl_htc_mbox_stop(struct htc_target *target); target 30 drivers/net/wireless/ath/ath6kl/htc_mbox.c static int ath6kl_htc_mbox_add_rxbuf_multiple(struct htc_target *target, target 32 drivers/net/wireless/ath/ath6kl/htc_mbox.c static void ath6kl_htc_set_credit_dist(struct htc_target *target, target 375 drivers/net/wireless/ath/ath6kl/htc_mbox.c static void htc_reclaim_txctrl_buf(struct htc_target *target, target 378 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_lock_bh(&target->htc_lock); target 379 drivers/net/wireless/ath/ath6kl/htc_mbox.c list_add_tail(&pkt->list, &target->free_ctrl_txbuf); target 380 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_unlock_bh(&target->htc_lock); target 383 drivers/net/wireless/ath/ath6kl/htc_mbox.c static struct htc_packet *htc_get_control_buf(struct htc_target *target, target 389 drivers/net/wireless/ath/ath6kl/htc_mbox.c buf_list = tx ? &target->free_ctrl_txbuf : &target->free_ctrl_rxbuf; target 391 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_lock_bh(&target->htc_lock); target 394 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_unlock_bh(&target->htc_lock); target 400 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_unlock_bh(&target->htc_lock); target 408 drivers/net/wireless/ath/ath6kl/htc_mbox.c static void htc_tx_comp_update(struct htc_target *target, target 423 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_lock_bh(&target->tx_lock); target 429 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->credit_info, &target->cred_dist_list); target 431 drivers/net/wireless/ath/ath6kl/htc_mbox.c ath6kl_credit_distribute(target->credit_info, target 432 drivers/net/wireless/ath/ath6kl/htc_mbox.c &target->cred_dist_list, target 435 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_unlock_bh(&target->tx_lock); target 448 drivers/net/wireless/ath/ath6kl/htc_mbox.c ath6kl_tx_complete(endpoint->target, txq); target 451 drivers/net/wireless/ath/ath6kl/htc_mbox.c static void htc_tx_comp_handler(struct htc_target *target, target 454 drivers/net/wireless/ath/ath6kl/htc_mbox.c struct htc_endpoint *endpoint = &target->endpoint[packet->endpoint]; target 460 drivers/net/wireless/ath/ath6kl/htc_mbox.c htc_tx_comp_update(target, endpoint, packet); target 467 drivers/net/wireless/ath/ath6kl/htc_mbox.c static void htc_async_tx_scat_complete(struct htc_target *target, target 485 drivers/net/wireless/ath/ath6kl/htc_mbox.c endpoint = &target->endpoint[packet->endpoint]; target 496 drivers/net/wireless/ath/ath6kl/htc_mbox.c htc_tx_comp_update(target, endpoint, packet); target 501 drivers/net/wireless/ath/ath6kl/htc_mbox.c hif_scatter_req_add(target->dev->ar, scat_req); target 507 drivers/net/wireless/ath/ath6kl/htc_mbox.c static int ath6kl_htc_tx_issue(struct htc_target *target, target 519 drivers/net/wireless/ath/ath6kl/htc_mbox.c padded_len = CALC_TXRX_PADDED_LEN(target, send_len); target 524 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->dev->ar->mbox_info.htc_addr, target 528 drivers/net/wireless/ath/ath6kl/htc_mbox.c status = hif_read_write_sync(target->dev->ar, target 529 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->dev->ar->mbox_info.htc_addr, target 536 drivers/net/wireless/ath/ath6kl/htc_mbox.c status = hif_write_async(target->dev->ar, target 537 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->dev->ar->mbox_info.htc_addr, target 546 drivers/net/wireless/ath/ath6kl/htc_mbox.c static int htc_check_credits(struct htc_target *target, target 551 drivers/net/wireless/ath/ath6kl/htc_mbox.c *req_cred = (len > target->tgt_cred_sz) ? target 552 drivers/net/wireless/ath/ath6kl/htc_mbox.c DIV_ROUND_UP(len, target->tgt_cred_sz) : 1; target 564 drivers/net/wireless/ath/ath6kl/htc_mbox.c ath6kl_credit_seek(target->credit_info, &ep->cred_dist); target 584 drivers/net/wireless/ath/ath6kl/htc_mbox.c ath6kl_credit_seek(target->credit_info, &ep->cred_dist); target 599 drivers/net/wireless/ath/ath6kl/htc_mbox.c static void ath6kl_htc_tx_pkts_get(struct htc_target *target, target 620 drivers/net/wireless/ath/ath6kl/htc_mbox.c len = CALC_TXRX_PADDED_LEN(target, target 623 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (htc_check_credits(target, endpoint, &flags, target 637 drivers/net/wireless/ath/ath6kl/htc_mbox.c packet->context = target; target 679 drivers/net/wireless/ath/ath6kl/htc_mbox.c static int ath6kl_htc_tx_setup_scat_list(struct htc_target *target, target 690 drivers/net/wireless/ath/ath6kl/htc_mbox.c rem_scat = target->max_tx_bndl_sz; target 699 drivers/net/wireless/ath/ath6kl/htc_mbox.c len = CALC_TXRX_PADDED_LEN(target, target 702 drivers/net/wireless/ath/ath6kl/htc_mbox.c cred_pad = htc_get_credit_padding(target->tgt_cred_sz, target 759 drivers/net/wireless/ath/ath6kl/htc_mbox.c struct htc_target *target = endpoint->target; target 769 drivers/net/wireless/ath/ath6kl/htc_mbox.c ac = target->dev->ar->ep2ac_map[endpoint->eid]; target 774 drivers/net/wireless/ath/ath6kl/htc_mbox.c n_scat = min(n_scat, target->msg_per_bndl_max); target 780 drivers/net/wireless/ath/ath6kl/htc_mbox.c scat_req = hif_scatter_req_get(target->dev->ar); target 810 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->tx_bndl_mask &= ~txb_mask; target 812 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->tx_bndl_mask |= txb_mask; target 821 drivers/net/wireless/ath/ath6kl/htc_mbox.c status = ath6kl_htc_tx_setup_scat_list(target, endpoint, target 825 drivers/net/wireless/ath/ath6kl/htc_mbox.c hif_scatter_req_add(target->dev->ar, scat_req); target 844 drivers/net/wireless/ath/ath6kl/htc_mbox.c ath6kl_hif_submit_scat_req(target->dev, scat_req, false); target 858 drivers/net/wireless/ath/ath6kl/htc_mbox.c static void ath6kl_htc_tx_from_queue(struct htc_target *target, target 868 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_lock_bh(&target->tx_lock); target 873 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_unlock_bh(&target->tx_lock); target 886 drivers/net/wireless/ath/ath6kl/htc_mbox.c ac = target->dev->ar->ep2ac_map[endpoint->eid]; target 892 drivers/net/wireless/ath/ath6kl/htc_mbox.c ath6kl_htc_tx_pkts_get(target, endpoint, &txq); target 897 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_unlock_bh(&target->tx_lock); target 904 drivers/net/wireless/ath/ath6kl/htc_mbox.c if ((target->tx_bndl_mask) && target 910 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (target->tx_bndl_mask & (1 << ac)) { target 927 drivers/net/wireless/ath/ath6kl/htc_mbox.c status = ath6kl_htc_tx_issue(target, packet); target 935 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_lock_bh(&target->tx_lock); target 946 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (!(target->tx_bndl_mask & (1 << ac)) && target 948 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (++target->ac_tx_count[ac] >= target 950 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->ac_tx_count[ac] = 0; target 951 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->tx_bndl_mask |= (1 << ac); target 957 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->ac_tx_count[ac] = 0; target 962 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_unlock_bh(&target->tx_lock); target 965 drivers/net/wireless/ath/ath6kl/htc_mbox.c static bool ath6kl_htc_tx_try(struct htc_target *target, target 975 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_lock_bh(&target->tx_lock); target 977 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_unlock_bh(&target->tx_lock); target 989 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (ep_cb.tx_full(endpoint->target, tx_pkt) == target 996 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_lock_bh(&target->tx_lock); target 998 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_unlock_bh(&target->tx_lock); target 1000 drivers/net/wireless/ath/ath6kl/htc_mbox.c ath6kl_htc_tx_from_queue(target, endpoint); target 1005 drivers/net/wireless/ath/ath6kl/htc_mbox.c static void htc_chk_ep_txq(struct htc_target *target) target 1016 drivers/net/wireless/ath/ath6kl/htc_mbox.c list_for_each_entry(cred_dist, &target->cred_dist_list, list) { target 1019 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_lock_bh(&target->tx_lock); target 1026 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_unlock_bh(&target->tx_lock); target 1034 drivers/net/wireless/ath/ath6kl/htc_mbox.c ath6kl_htc_tx_from_queue(target, endpoint); target 1035 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_lock_bh(&target->tx_lock); target 1037 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_unlock_bh(&target->tx_lock); target 1041 drivers/net/wireless/ath/ath6kl/htc_mbox.c static int htc_setup_tx_complete(struct htc_target *target) target 1046 drivers/net/wireless/ath/ath6kl/htc_mbox.c send_pkt = htc_get_control_buf(target, true); target 1051 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (target->htc_tgt_ver >= HTC_VERSION_2P1) { target 1061 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (target->msg_per_bndl_max > 0) { target 1065 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->msg_per_bndl_max; target 1087 drivers/net/wireless/ath/ath6kl/htc_mbox.c status = ath6kl_htc_tx_issue(target, send_pkt); target 1088 drivers/net/wireless/ath/ath6kl/htc_mbox.c htc_reclaim_txctrl_buf(target, send_pkt); target 1093 drivers/net/wireless/ath/ath6kl/htc_mbox.c static void ath6kl_htc_set_credit_dist(struct htc_target *target, target 1100 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->credit_info = credit_info; target 1102 drivers/net/wireless/ath/ath6kl/htc_mbox.c list_add_tail(&target->endpoint[ENDPOINT_0].cred_dist.list, target 1103 drivers/net/wireless/ath/ath6kl/htc_mbox.c &target->cred_dist_list); target 1107 drivers/net/wireless/ath/ath6kl/htc_mbox.c endpoint = &target->endpoint[ep]; target 1110 drivers/net/wireless/ath/ath6kl/htc_mbox.c &target->cred_dist_list); target 1121 drivers/net/wireless/ath/ath6kl/htc_mbox.c static int ath6kl_htc_mbox_tx(struct htc_target *target, target 1136 drivers/net/wireless/ath/ath6kl/htc_mbox.c endpoint = &target->endpoint[packet->endpoint]; target 1138 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (!ath6kl_htc_tx_try(target, endpoint, packet)) { target 1139 drivers/net/wireless/ath/ath6kl/htc_mbox.c packet->status = (target->htc_flags & HTC_OP_STATE_STOPPING) ? target 1150 drivers/net/wireless/ath/ath6kl/htc_mbox.c static void ath6kl_htc_mbox_flush_txep(struct htc_target *target, target 1155 drivers/net/wireless/ath/ath6kl/htc_mbox.c struct htc_endpoint *endpoint = &target->endpoint[eid]; target 1165 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_lock_bh(&target->tx_lock); target 1173 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_unlock_bh(&target->tx_lock); target 1189 drivers/net/wireless/ath/ath6kl/htc_mbox.c static void ath6kl_htc_flush_txep_all(struct htc_target *target) target 1194 drivers/net/wireless/ath/ath6kl/htc_mbox.c dump_cred_dist_stats(target); target 1197 drivers/net/wireless/ath/ath6kl/htc_mbox.c endpoint = &target->endpoint[i]; target 1201 drivers/net/wireless/ath/ath6kl/htc_mbox.c ath6kl_htc_mbox_flush_txep(target, i, HTC_TX_PACKET_TAG_ALL); target 1205 drivers/net/wireless/ath/ath6kl/htc_mbox.c static void ath6kl_htc_mbox_activity_changed(struct htc_target *target, target 1209 drivers/net/wireless/ath/ath6kl/htc_mbox.c struct htc_endpoint *endpoint = &target->endpoint[eid]; target 1217 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_lock_bh(&target->tx_lock); target 1237 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->credit_info, &target->cred_dist_list); target 1239 drivers/net/wireless/ath/ath6kl/htc_mbox.c ath6kl_credit_distribute(target->credit_info, target 1240 drivers/net/wireless/ath/ath6kl/htc_mbox.c &target->cred_dist_list, target 1244 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_unlock_bh(&target->tx_lock); target 1247 drivers/net/wireless/ath/ath6kl/htc_mbox.c htc_chk_ep_txq(target); target 1262 drivers/net/wireless/ath/ath6kl/htc_mbox.c static inline bool htc_valid_rx_frame_len(struct htc_target *target, target 1265 drivers/net/wireless/ath/ath6kl/htc_mbox.c return (eid == target->dev->ar->ctrl_ep) ? target 1269 drivers/net/wireless/ath/ath6kl/htc_mbox.c static int htc_add_rxbuf(struct htc_target *target, struct htc_packet *packet) target 1275 drivers/net/wireless/ath/ath6kl/htc_mbox.c return ath6kl_htc_mbox_add_rxbuf_multiple(target, &queue); target 1278 drivers/net/wireless/ath/ath6kl/htc_mbox.c static void htc_reclaim_rxbuf(struct htc_target *target, target 1285 drivers/net/wireless/ath/ath6kl/htc_mbox.c ep->ep_cb.rx(ep->target, packet); target 1288 drivers/net/wireless/ath/ath6kl/htc_mbox.c htc_add_rxbuf((void *)(target), packet); target 1292 drivers/net/wireless/ath/ath6kl/htc_mbox.c static void reclaim_rx_ctrl_buf(struct htc_target *target, target 1295 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_lock_bh(&target->htc_lock); target 1296 drivers/net/wireless/ath/ath6kl/htc_mbox.c list_add_tail(&packet->list, &target->free_ctrl_rxbuf); target 1297 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_unlock_bh(&target->htc_lock); target 1300 drivers/net/wireless/ath/ath6kl/htc_mbox.c static int ath6kl_htc_rx_packet(struct htc_target *target, target 1304 drivers/net/wireless/ath/ath6kl/htc_mbox.c struct ath6kl_device *dev = target->dev; target 1308 drivers/net/wireless/ath/ath6kl/htc_mbox.c padded_len = CALC_TXRX_PADDED_LEN(target, rx_len); target 1354 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_lock_bh(&endpoint->target->rx_lock); target 1357 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_unlock_bh(&endpoint->target->rx_lock); target 1358 drivers/net/wireless/ath/ath6kl/htc_mbox.c ep_cb.rx_refill(endpoint->target, endpoint->eid); target 1361 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_unlock_bh(&endpoint->target->rx_lock); target 1366 drivers/net/wireless/ath/ath6kl/htc_mbox.c static int ath6kl_htc_rx_setup(struct htc_target *target, target 1377 drivers/net/wireless/ath/ath6kl/htc_mbox.c full_len = CALC_TXRX_PADDED_LEN(target, target 1381 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (!htc_valid_rx_frame_len(target, ep->eid, full_len)) { target 1403 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_unlock_bh(&target->rx_lock); target 1406 drivers/net/wireless/ath/ath6kl/htc_mbox.c packet = ep_cb.rx_allocthresh(ep->target, ep->eid, target 1408 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_lock_bh(&target->rx_lock); target 1413 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_unlock_bh(&target->rx_lock); target 1414 drivers/net/wireless/ath/ath6kl/htc_mbox.c ep_cb.rx_refill(ep->target, ep->eid); target 1415 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_lock_bh(&target->rx_lock); target 1429 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->rx_st_flags |= HTC_RECV_WAIT_BUFFERS; target 1430 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->ep_waiting = ep->eid; target 1450 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (target->htc_flags & HTC_OP_STATE_STOPPING) { target 1469 drivers/net/wireless/ath/ath6kl/htc_mbox.c static int ath6kl_htc_rx_alloc(struct htc_target *target, target 1479 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_lock_bh(&target->rx_lock); target 1523 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (n_msg > target->msg_per_bndl_max) { target 1537 drivers/net/wireless/ath/ath6kl/htc_mbox.c status = ath6kl_htc_rx_setup(target, endpoint, &lk_ahds[i], target 1546 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_unlock_bh(&target->rx_lock); target 1554 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_unlock_bh(&target->rx_lock); target 1559 drivers/net/wireless/ath/ath6kl/htc_mbox.c htc_reclaim_rxbuf(target, packet, target 1560 drivers/net/wireless/ath/ath6kl/htc_mbox.c &target->endpoint[packet->endpoint]); target 1592 drivers/net/wireless/ath/ath6kl/htc_mbox.c static void htc_proc_cred_rpt(struct htc_target *target, target 1601 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_lock_bh(&target->tx_lock); target 1606 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_unlock_bh(&target->tx_lock); target 1610 drivers/net/wireless/ath/ath6kl/htc_mbox.c endpoint = &target->endpoint[rpt->eid]; target 1659 drivers/net/wireless/ath/ath6kl/htc_mbox.c ath6kl_credit_distribute(target->credit_info, target 1660 drivers/net/wireless/ath/ath6kl/htc_mbox.c &target->cred_dist_list, target 1664 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_unlock_bh(&target->tx_lock); target 1667 drivers/net/wireless/ath/ath6kl/htc_mbox.c htc_chk_ep_txq(target); target 1670 drivers/net/wireless/ath/ath6kl/htc_mbox.c static int htc_parse_trailer(struct htc_target *target, target 1688 drivers/net/wireless/ath/ath6kl/htc_mbox.c htc_proc_cred_rpt(target, target 1750 drivers/net/wireless/ath/ath6kl/htc_mbox.c static int htc_proc_trailer(struct htc_target *target, target 1785 drivers/net/wireless/ath/ath6kl/htc_mbox.c status = htc_parse_trailer(target, record, record_buf, target 1803 drivers/net/wireless/ath/ath6kl/htc_mbox.c static int ath6kl_htc_rx_process_hdr(struct htc_target *target, target 1878 drivers/net/wireless/ath/ath6kl/htc_mbox.c status = htc_proc_trailer(target, packet->buf + HTC_HDR_LENGTH target 1907 drivers/net/wireless/ath/ath6kl/htc_mbox.c endpoint->ep_cb.rx(endpoint->target, packet); target 1910 drivers/net/wireless/ath/ath6kl/htc_mbox.c static int ath6kl_htc_rx_bundle(struct htc_target *target, target 1917 drivers/net/wireless/ath/ath6kl/htc_mbox.c int rem_space = target->max_rx_bndl_sz; target 1921 drivers/net/wireless/ath/ath6kl/htc_mbox.c n_scat_pkt = min(n_scat_pkt, target->msg_per_bndl_max); target 1945 drivers/net/wireless/ath/ath6kl/htc_mbox.c scat_req = hif_scatter_req_get(target->dev->ar); target 1956 drivers/net/wireless/ath/ath6kl/htc_mbox.c pad_len = CALC_TXRX_PADDED_LEN(target, target 1990 drivers/net/wireless/ath/ath6kl/htc_mbox.c status = ath6kl_hif_submit_scat_req(target->dev, scat_req, true); target 1996 drivers/net/wireless/ath/ath6kl/htc_mbox.c hif_scatter_req_add(target->dev->ar, scat_req); target 2003 drivers/net/wireless/ath/ath6kl/htc_mbox.c static int ath6kl_htc_rx_process_packets(struct htc_target *target, target 2013 drivers/net/wireless/ath/ath6kl/htc_mbox.c ep = &target->endpoint[packet->endpoint]; target 2019 drivers/net/wireless/ath/ath6kl/htc_mbox.c status = ath6kl_htc_rx_process_hdr(target, packet, lk_ahds, target 2053 drivers/net/wireless/ath/ath6kl/htc_mbox.c static int ath6kl_htc_rx_fetch(struct htc_target *target, target 2069 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (target->rx_bndl_enable && (get_queue_depth(rx_pktq) > 1)) { target 2075 drivers/net/wireless/ath/ath6kl/htc_mbox.c status = ath6kl_htc_rx_bundle(target, rx_pktq, target 2105 drivers/net/wireless/ath/ath6kl/htc_mbox.c status = ath6kl_htc_rx_packet(target, packet, target 2128 drivers/net/wireless/ath/ath6kl/htc_mbox.c htc_reclaim_rxbuf(target, packet, target 2129 drivers/net/wireless/ath/ath6kl/htc_mbox.c &target->endpoint[packet->endpoint]); target 2134 drivers/net/wireless/ath/ath6kl/htc_mbox.c htc_reclaim_rxbuf(target, packet, target 2135 drivers/net/wireless/ath/ath6kl/htc_mbox.c &target->endpoint[packet->endpoint]); target 2141 drivers/net/wireless/ath/ath6kl/htc_mbox.c int ath6kl_htc_rxmsg_pending_handler(struct htc_target *target, target 2168 drivers/net/wireless/ath/ath6kl/htc_mbox.c endpoint = &target->endpoint[id]; target 2184 drivers/net/wireless/ath/ath6kl/htc_mbox.c status = ath6kl_htc_rx_alloc(target, look_aheads, target 2195 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->chk_irq_status_cnt = 1; target 2201 drivers/net/wireless/ath/ath6kl/htc_mbox.c status = ath6kl_htc_rx_fetch(target, &rx_pktq, &comp_pktq); target 2207 drivers/net/wireless/ath/ath6kl/htc_mbox.c status = ath6kl_htc_rx_process_packets(target, &comp_pktq, target 2221 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->chk_irq_status_cnt = 1; target 2232 drivers/net/wireless/ath/ath6kl/htc_mbox.c htc_reclaim_rxbuf(target, packets, target 2233 drivers/net/wireless/ath/ath6kl/htc_mbox.c &target->endpoint[packets->endpoint]); target 2236 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (target->htc_flags & HTC_OP_STATE_STOPPING) { target 2238 drivers/net/wireless/ath/ath6kl/htc_mbox.c ath6kl_hif_rx_control(target->dev, false); target 2246 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (target->rx_st_flags & HTC_RECV_WAIT_BUFFERS) { target 2248 drivers/net/wireless/ath/ath6kl/htc_mbox.c ath6kl_hif_rx_control(target->dev, false); target 2260 drivers/net/wireless/ath/ath6kl/htc_mbox.c static struct htc_packet *htc_wait_for_ctrl_msg(struct htc_target *target) target 2266 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (ath6kl_hif_poll_mboxmsg_rx(target->dev, &look_ahead, target 2278 drivers/net/wireless/ath/ath6kl/htc_mbox.c packet = htc_get_control_buf(target, false); target 2294 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (ath6kl_htc_rx_packet(target, packet, packet->act_len)) target 2301 drivers/net/wireless/ath/ath6kl/htc_mbox.c packet->status = ath6kl_htc_rx_process_hdr(target, packet, NULL, NULL); target 2314 drivers/net/wireless/ath/ath6kl/htc_mbox.c reclaim_rx_ctrl_buf(target, packet); target 2320 drivers/net/wireless/ath/ath6kl/htc_mbox.c static int ath6kl_htc_mbox_add_rxbuf_multiple(struct htc_target *target, target 2342 drivers/net/wireless/ath/ath6kl/htc_mbox.c endpoint = &target->endpoint[first_pkt->endpoint]; target 2344 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (target->htc_flags & HTC_OP_STATE_STOPPING) { target 2357 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_lock_bh(&target->rx_lock); target 2362 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (target->rx_st_flags & HTC_RECV_WAIT_BUFFERS) { target 2363 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (target->ep_waiting == first_pkt->endpoint) { target 2366 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->ep_waiting); target 2367 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->rx_st_flags &= ~HTC_RECV_WAIT_BUFFERS; target 2368 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->ep_waiting = ENDPOINT_MAX; target 2373 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_unlock_bh(&target->rx_lock); target 2375 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (rx_unblock && !(target->htc_flags & HTC_OP_STATE_STOPPING)) target 2377 drivers/net/wireless/ath/ath6kl/htc_mbox.c ath6kl_hif_rx_control(target->dev, true); target 2382 drivers/net/wireless/ath/ath6kl/htc_mbox.c static void ath6kl_htc_mbox_flush_rx_buf(struct htc_target *target) target 2389 drivers/net/wireless/ath/ath6kl/htc_mbox.c endpoint = &target->endpoint[i]; target 2394 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_lock_bh(&target->rx_lock); target 2398 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_unlock_bh(&target->rx_lock); target 2418 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_lock_bh(&target->rx_lock); target 2420 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_unlock_bh(&target->rx_lock); target 2424 drivers/net/wireless/ath/ath6kl/htc_mbox.c static int ath6kl_htc_mbox_conn_service(struct htc_target *target, target 2440 drivers/net/wireless/ath/ath6kl/htc_mbox.c target, conn_req->svc_id); target 2448 drivers/net/wireless/ath/ath6kl/htc_mbox.c tx_pkt = htc_get_control_buf(target, true); target 2466 drivers/net/wireless/ath/ath6kl/htc_mbox.c status = ath6kl_htc_tx_issue(target, tx_pkt); target 2472 drivers/net/wireless/ath/ath6kl/htc_mbox.c rx_pkt = htc_wait_for_ctrl_msg(target); target 2507 drivers/net/wireless/ath/ath6kl/htc_mbox.c endpoint = &target->endpoint[assigned_ep]; target 2529 drivers/net/wireless/ath/ath6kl/htc_mbox.c endpoint->cred_dist.cred_sz = target->tgt_cred_sz; target 2551 drivers/net/wireless/ath/ath6kl/htc_mbox.c conn_req->max_rxmsg_sz / target->tgt_cred_sz; target 2554 drivers/net/wireless/ath/ath6kl/htc_mbox.c max_msg_sz / target->tgt_cred_sz; target 2564 drivers/net/wireless/ath/ath6kl/htc_mbox.c htc_reclaim_txctrl_buf(target, tx_pkt); target 2568 drivers/net/wireless/ath/ath6kl/htc_mbox.c reclaim_rx_ctrl_buf(target, rx_pkt); target 2574 drivers/net/wireless/ath/ath6kl/htc_mbox.c static void reset_ep_state(struct htc_target *target) target 2580 drivers/net/wireless/ath/ath6kl/htc_mbox.c endpoint = &target->endpoint[i]; target 2589 drivers/net/wireless/ath/ath6kl/htc_mbox.c endpoint->target = target; target 2594 drivers/net/wireless/ath/ath6kl/htc_mbox.c INIT_LIST_HEAD(&target->cred_dist_list); target 2597 drivers/net/wireless/ath/ath6kl/htc_mbox.c static int ath6kl_htc_mbox_get_rxbuf_num(struct htc_target *target, target 2602 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_lock_bh(&target->rx_lock); target 2603 drivers/net/wireless/ath/ath6kl/htc_mbox.c num = get_queue_depth(&(target->endpoint[endpoint].rx_bufq)); target 2604 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_unlock_bh(&target->rx_lock); target 2608 drivers/net/wireless/ath/ath6kl/htc_mbox.c static void htc_setup_msg_bndl(struct htc_target *target) target 2611 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->msg_per_bndl_max = min(HTC_HOST_MAX_MSG_PER_BUNDLE, target 2612 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->msg_per_bndl_max); target 2614 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (ath6kl_hif_enable_scatter(target->dev->ar)) { target 2615 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->msg_per_bndl_max = 0; target 2620 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->msg_per_bndl_max = min(target->max_scat_entries, target 2621 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->msg_per_bndl_max); target 2625 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->msg_per_bndl_max); target 2628 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->max_rx_bndl_sz = target->max_xfer_szper_scatreq; target 2630 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->max_tx_bndl_sz = min(HIF_MBOX0_EXT_WIDTH, target 2631 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->max_xfer_szper_scatreq); target 2634 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->max_rx_bndl_sz, target->max_tx_bndl_sz); target 2636 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (target->max_tx_bndl_sz) target 2638 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->tx_bndl_mask = (1 << WMM_NUM_AC) - 1; target 2640 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (target->max_rx_bndl_sz) target 2641 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->rx_bndl_enable = true; target 2643 drivers/net/wireless/ath/ath6kl/htc_mbox.c if ((target->tgt_cred_sz % target->block_sz) != 0) { target 2645 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->tgt_cred_sz); target 2653 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->tx_bndl_mask = 0; target 2657 drivers/net/wireless/ath/ath6kl/htc_mbox.c static int ath6kl_htc_mbox_wait_target(struct htc_target *target) target 2666 drivers/net/wireless/ath/ath6kl/htc_mbox.c packet = htc_wait_for_ctrl_msg(target); target 2685 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->tgt_creds = le16_to_cpu(rdy_msg->ver2_0_info.cred_cnt); target 2686 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->tgt_cred_sz = le16_to_cpu(rdy_msg->ver2_0_info.cred_sz); target 2690 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->tgt_creds, target->tgt_cred_sz); target 2695 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->htc_tgt_ver = rdy_msg->htc_ver; target 2696 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->msg_per_bndl_max = rdy_msg->msg_per_htc_bndl; target 2699 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->htc_tgt_ver = HTC_VERSION_2P0; target 2700 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->msg_per_bndl_max = 0; target 2704 drivers/net/wireless/ath/ath6kl/htc_mbox.c (target->htc_tgt_ver == HTC_VERSION_2P0) ? "2.0" : ">= 2.1", target 2705 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->htc_tgt_ver); target 2707 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (target->msg_per_bndl_max > 0) target 2708 drivers/net/wireless/ath/ath6kl/htc_mbox.c htc_setup_msg_bndl(target); target 2720 drivers/net/wireless/ath/ath6kl/htc_mbox.c status = ath6kl_htc_mbox_conn_service((void *)target, &connect, &resp); target 2727 drivers/net/wireless/ath/ath6kl/htc_mbox.c ath6kl_hif_cleanup_scatter(target->dev->ar); target 2732 drivers/net/wireless/ath/ath6kl/htc_mbox.c reclaim_rx_ctrl_buf(target, packet); target 2742 drivers/net/wireless/ath/ath6kl/htc_mbox.c static int ath6kl_htc_mbox_start(struct htc_target *target) target 2747 drivers/net/wireless/ath/ath6kl/htc_mbox.c memset(&target->dev->irq_proc_reg, 0, target 2748 drivers/net/wireless/ath/ath6kl/htc_mbox.c sizeof(target->dev->irq_proc_reg)); target 2751 drivers/net/wireless/ath/ath6kl/htc_mbox.c ath6kl_hif_disable_intrs(target->dev); target 2753 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->htc_flags = 0; target 2754 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->rx_st_flags = 0; target 2757 drivers/net/wireless/ath/ath6kl/htc_mbox.c while ((packet = htc_get_control_buf(target, false)) != NULL) { target 2758 drivers/net/wireless/ath/ath6kl/htc_mbox.c status = htc_add_rxbuf(target, packet); target 2764 drivers/net/wireless/ath/ath6kl/htc_mbox.c ath6kl_credit_init(target->credit_info, &target->cred_dist_list, target 2765 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->tgt_creds); target 2767 drivers/net/wireless/ath/ath6kl/htc_mbox.c dump_cred_dist_stats(target); target 2770 drivers/net/wireless/ath/ath6kl/htc_mbox.c status = htc_setup_tx_complete(target); target 2776 drivers/net/wireless/ath/ath6kl/htc_mbox.c status = ath6kl_hif_unmask_intrs(target->dev); target 2779 drivers/net/wireless/ath/ath6kl/htc_mbox.c ath6kl_htc_mbox_stop(target); target 2784 drivers/net/wireless/ath/ath6kl/htc_mbox.c static int ath6kl_htc_reset(struct htc_target *target) target 2790 drivers/net/wireless/ath/ath6kl/htc_mbox.c reset_ep_state(target); target 2792 drivers/net/wireless/ath/ath6kl/htc_mbox.c block_size = target->dev->ar->mbox_info.block_size; target 2814 drivers/net/wireless/ath/ath6kl/htc_mbox.c list_add_tail(&packet->list, &target->free_ctrl_rxbuf); target 2816 drivers/net/wireless/ath/ath6kl/htc_mbox.c list_add_tail(&packet->list, &target->free_ctrl_txbuf); target 2824 drivers/net/wireless/ath/ath6kl/htc_mbox.c static void ath6kl_htc_mbox_stop(struct htc_target *target) target 2826 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_lock_bh(&target->htc_lock); target 2827 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->htc_flags |= HTC_OP_STATE_STOPPING; target 2828 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_unlock_bh(&target->htc_lock); target 2835 drivers/net/wireless/ath/ath6kl/htc_mbox.c ath6kl_hif_mask_intrs(target->dev); target 2837 drivers/net/wireless/ath/ath6kl/htc_mbox.c ath6kl_htc_flush_txep_all(target); target 2839 drivers/net/wireless/ath/ath6kl/htc_mbox.c ath6kl_htc_mbox_flush_rx_buf(target); target 2841 drivers/net/wireless/ath/ath6kl/htc_mbox.c ath6kl_htc_reset(target); target 2846 drivers/net/wireless/ath/ath6kl/htc_mbox.c struct htc_target *target = NULL; target 2849 drivers/net/wireless/ath/ath6kl/htc_mbox.c target = kzalloc(sizeof(*target), GFP_KERNEL); target 2850 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (!target) { target 2855 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->dev = kzalloc(sizeof(*target->dev), GFP_KERNEL); target 2856 drivers/net/wireless/ath/ath6kl/htc_mbox.c if (!target->dev) { target 2858 drivers/net/wireless/ath/ath6kl/htc_mbox.c kfree(target); target 2862 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_lock_init(&target->htc_lock); target 2863 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_lock_init(&target->rx_lock); target 2864 drivers/net/wireless/ath/ath6kl/htc_mbox.c spin_lock_init(&target->tx_lock); target 2866 drivers/net/wireless/ath/ath6kl/htc_mbox.c INIT_LIST_HEAD(&target->free_ctrl_txbuf); target 2867 drivers/net/wireless/ath/ath6kl/htc_mbox.c INIT_LIST_HEAD(&target->free_ctrl_rxbuf); target 2868 drivers/net/wireless/ath/ath6kl/htc_mbox.c INIT_LIST_HEAD(&target->cred_dist_list); target 2870 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->dev->ar = ar; target 2871 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->dev->htc_cnxt = target; target 2872 drivers/net/wireless/ath/ath6kl/htc_mbox.c target->ep_waiting = ENDPOINT_MAX; target 2874 drivers/net/wireless/ath/ath6kl/htc_mbox.c status = ath6kl_hif_setup(target->dev); target 2878 drivers/net/wireless/ath/ath6kl/htc_mbox.c status = ath6kl_htc_reset(target); target 2882 drivers/net/wireless/ath/ath6kl/htc_mbox.c return target; target 2885 drivers/net/wireless/ath/ath6kl/htc_mbox.c ath6kl_htc_mbox_cleanup(target); target 2891 drivers/net/wireless/ath/ath6kl/htc_mbox.c static void ath6kl_htc_mbox_cleanup(struct htc_target *target) target 2895 drivers/net/wireless/ath/ath6kl/htc_mbox.c ath6kl_hif_cleanup_scatter(target->dev->ar); target 2898 drivers/net/wireless/ath/ath6kl/htc_mbox.c &target->free_ctrl_txbuf, list) { target 2905 drivers/net/wireless/ath/ath6kl/htc_mbox.c &target->free_ctrl_rxbuf, list) { target 2911 drivers/net/wireless/ath/ath6kl/htc_mbox.c kfree(target->dev); target 2912 drivers/net/wireless/ath/ath6kl/htc_mbox.c kfree(target); target 56 drivers/net/wireless/ath/ath6kl/htc_pipe.c ep->ep_cb.tx_comp_multi(ep->target, queue_to_indicate); target 72 drivers/net/wireless/ath/ath6kl/htc_pipe.c ep->ep_cb.tx_complete(ep->target, packet); target 77 drivers/net/wireless/ath/ath6kl/htc_pipe.c static void send_packet_completion(struct htc_target *target, target 80 drivers/net/wireless/ath/ath6kl/htc_pipe.c struct htc_endpoint *ep = &target->endpoint[packet->endpoint]; target 91 drivers/net/wireless/ath/ath6kl/htc_pipe.c static void get_htc_packet_credit_based(struct htc_target *target, target 118 drivers/net/wireless/ath/ath6kl/htc_pipe.c if (transfer_len <= target->tgt_cred_sz) { target 122 drivers/net/wireless/ath/ath6kl/htc_pipe.c credits_required = transfer_len / target->tgt_cred_sz; target 123 drivers/net/wireless/ath/ath6kl/htc_pipe.c remainder = transfer_len % target->tgt_cred_sz; target 173 drivers/net/wireless/ath/ath6kl/htc_pipe.c static void get_htc_packet(struct htc_target *target, target 203 drivers/net/wireless/ath/ath6kl/htc_pipe.c static int htc_issue_packets(struct htc_target *target, target 247 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_lock_bh(&target->tx_lock); target 252 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_unlock_bh(&target->tx_lock); target 254 drivers/net/wireless/ath/ath6kl/htc_pipe.c status = ath6kl_hif_pipe_send(target->dev->ar, target 268 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_lock_bh(&target->tx_lock); target 273 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_unlock_bh(&target->tx_lock); target 293 drivers/net/wireless/ath/ath6kl/htc_pipe.c send_packet_completion(target, packet); target 300 drivers/net/wireless/ath/ath6kl/htc_pipe.c static enum htc_send_queue_result htc_try_send(struct htc_target *target, target 306 drivers/net/wireless/ath/ath6kl/htc_pipe.c struct ath6kl *ar = target->dev->ar; target 329 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_lock_bh(&target->tx_lock); target 331 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_unlock_bh(&target->tx_lock); target 387 drivers/net/wireless/ath/ath6kl/htc_pipe.c action = ep->ep_cb.tx_full(ep->target, packet); target 418 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_lock_bh(&target->tx_lock); target 424 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_unlock_bh(&target->tx_lock); target 440 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_unlock_bh(&target->tx_lock); target 462 drivers/net/wireless/ath/ath6kl/htc_pipe.c get_htc_packet_credit_based(target, ep, &send_queue); target 468 drivers/net/wireless/ath/ath6kl/htc_pipe.c get_htc_packet(target, ep, &send_queue, tx_resources); target 479 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_unlock_bh(&target->tx_lock); target 482 drivers/net/wireless/ath/ath6kl/htc_pipe.c htc_issue_packets(target, ep, &send_queue); target 490 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_lock_bh(&target->tx_lock); target 495 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_unlock_bh(&target->tx_lock); target 529 drivers/net/wireless/ath/ath6kl/htc_pipe.c static void htc_free_txctrl_packet(struct htc_target *target, target 535 drivers/net/wireless/ath/ath6kl/htc_pipe.c static struct htc_packet *htc_alloc_txctrl_packet(struct htc_target *target) target 540 drivers/net/wireless/ath/ath6kl/htc_pipe.c static void htc_txctrl_complete(struct htc_target *target, target 543 drivers/net/wireless/ath/ath6kl/htc_pipe.c htc_free_txctrl_packet(target, packet); target 548 drivers/net/wireless/ath/ath6kl/htc_pipe.c static int htc_setup_target_buffer_assignments(struct htc_target *target) target 554 drivers/net/wireless/ath/ath6kl/htc_pipe.c credit_per_maxmsg = MAX_MESSAGE_SIZE / target->tgt_cred_sz; target 555 drivers/net/wireless/ath/ath6kl/htc_pipe.c if (MAX_MESSAGE_SIZE % target->tgt_cred_sz) target 560 drivers/net/wireless/ath/ath6kl/htc_pipe.c credits = target->tgt_creds; target 561 drivers/net/wireless/ath/ath6kl/htc_pipe.c entry = &target->pipe.txcredit_alloc[0]; target 639 drivers/net/wireless/ath/ath6kl/htc_pipe.c if (target->pipe.txcredit_alloc[i].service_id != 0) { target 643 drivers/net/wireless/ath/ath6kl/htc_pipe.c target->pipe.txcredit_alloc[i]. target 645 drivers/net/wireless/ath/ath6kl/htc_pipe.c target->pipe.txcredit_alloc[i]. target 654 drivers/net/wireless/ath/ath6kl/htc_pipe.c static void htc_process_credit_report(struct htc_target *target, target 663 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_lock_bh(&target->tx_lock); target 668 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_unlock_bh(&target->tx_lock); target 672 drivers/net/wireless/ath/ath6kl/htc_pipe.c ep = &target->endpoint[rpt->eid]; target 676 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_unlock_bh(&target->tx_lock); target 677 drivers/net/wireless/ath/ath6kl/htc_pipe.c htc_try_send(target, ep, NULL); target 678 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_lock_bh(&target->tx_lock); target 687 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_unlock_bh(&target->tx_lock); target 691 drivers/net/wireless/ath/ath6kl/htc_pipe.c static void htc_flush_tx_endpoint(struct htc_target *target, target 696 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_lock_bh(&target->tx_lock); target 701 drivers/net/wireless/ath/ath6kl/htc_pipe.c send_packet_completion(target, packet); target 703 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_unlock_bh(&target->tx_lock); target 713 drivers/net/wireless/ath/ath6kl/htc_pipe.c static struct htc_packet *htc_lookup_tx_packet(struct htc_target *target, target 719 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_lock_bh(&target->tx_lock); target 737 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_unlock_bh(&target->tx_lock); target 744 drivers/net/wireless/ath/ath6kl/htc_pipe.c struct htc_target *target = ar->htc_target; target 755 drivers/net/wireless/ath/ath6kl/htc_pipe.c ep = &target->endpoint[ep_id]; target 757 drivers/net/wireless/ath/ath6kl/htc_pipe.c packet = htc_lookup_tx_packet(target, ep, skb); target 764 drivers/net/wireless/ath/ath6kl/htc_pipe.c send_packet_completion(target, packet); target 774 drivers/net/wireless/ath/ath6kl/htc_pipe.c htc_try_send(target, ep, NULL); target 780 drivers/net/wireless/ath/ath6kl/htc_pipe.c static int htc_send_packets_multiple(struct htc_target *target, target 796 drivers/net/wireless/ath/ath6kl/htc_pipe.c ep = &target->endpoint[packet->endpoint]; target 798 drivers/net/wireless/ath/ath6kl/htc_pipe.c htc_try_send(target, ep, pkt_queue); target 813 drivers/net/wireless/ath/ath6kl/htc_pipe.c static struct htc_packet *alloc_htc_packet_container(struct htc_target *target) target 816 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_lock_bh(&target->rx_lock); target 818 drivers/net/wireless/ath/ath6kl/htc_pipe.c if (target->pipe.htc_packet_pool == NULL) { target 819 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_unlock_bh(&target->rx_lock); target 823 drivers/net/wireless/ath/ath6kl/htc_pipe.c packet = target->pipe.htc_packet_pool; target 824 drivers/net/wireless/ath/ath6kl/htc_pipe.c target->pipe.htc_packet_pool = (struct htc_packet *) packet->list.next; target 826 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_unlock_bh(&target->rx_lock); target 832 drivers/net/wireless/ath/ath6kl/htc_pipe.c static void free_htc_packet_container(struct htc_target *target, target 837 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_lock_bh(&target->rx_lock); target 839 drivers/net/wireless/ath/ath6kl/htc_pipe.c if (target->pipe.htc_packet_pool == NULL) { target 840 drivers/net/wireless/ath/ath6kl/htc_pipe.c target->pipe.htc_packet_pool = packet; target 843 drivers/net/wireless/ath/ath6kl/htc_pipe.c lh = (struct list_head *) target->pipe.htc_packet_pool; target 845 drivers/net/wireless/ath/ath6kl/htc_pipe.c target->pipe.htc_packet_pool = packet; target 848 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_unlock_bh(&target->rx_lock); target 851 drivers/net/wireless/ath/ath6kl/htc_pipe.c static int htc_process_trailer(struct htc_target *target, u8 *buffer, target 890 drivers/net/wireless/ath/ath6kl/htc_pipe.c htc_process_credit_report(target, report, target 924 drivers/net/wireless/ath/ath6kl/htc_pipe.c ep->ep_cb.rx(ep->target, packet); target 930 drivers/net/wireless/ath/ath6kl/htc_pipe.c static void recv_packet_completion(struct htc_target *target, target 945 drivers/net/wireless/ath/ath6kl/htc_pipe.c struct htc_target *target = ar->htc_target; target 963 drivers/net/wireless/ath/ath6kl/htc_pipe.c if (WARN_ON_ONCE(!target)) { target 982 drivers/net/wireless/ath/ath6kl/htc_pipe.c ep = &target->endpoint[htc_hdr->eid]; target 1012 drivers/net/wireless/ath/ath6kl/htc_pipe.c status = htc_process_trailer(target, trailer, hdr_info, target 1025 drivers/net/wireless/ath/ath6kl/htc_pipe.c if (target->htc_flags & HTC_OP_STATE_SETUP_COMPLETE) { target 1042 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_lock_bh(&target->rx_lock); target 1044 drivers/net/wireless/ath/ath6kl/htc_pipe.c target->pipe.ctrl_response_valid = true; target 1045 drivers/net/wireless/ath/ath6kl/htc_pipe.c target->pipe.ctrl_response_len = min_t(int, netlen, target 1047 drivers/net/wireless/ath/ath6kl/htc_pipe.c memcpy(target->pipe.ctrl_response_buf, netdata, target 1048 drivers/net/wireless/ath/ath6kl/htc_pipe.c target->pipe.ctrl_response_len); target 1050 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_unlock_bh(&target->rx_lock); target 1063 drivers/net/wireless/ath/ath6kl/htc_pipe.c packet = alloc_htc_packet_container(target); target 1083 drivers/net/wireless/ath/ath6kl/htc_pipe.c recv_packet_completion(target, ep, packet); target 1086 drivers/net/wireless/ath/ath6kl/htc_pipe.c free_htc_packet_container(target, packet); target 1095 drivers/net/wireless/ath/ath6kl/htc_pipe.c static void htc_flush_rx_queue(struct htc_target *target, target 1101 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_lock_bh(&target->rx_lock); target 1111 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_unlock_bh(&target->rx_lock); target 1125 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_lock_bh(&target->rx_lock); target 1128 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_unlock_bh(&target->rx_lock); target 1132 drivers/net/wireless/ath/ath6kl/htc_pipe.c static int htc_wait_recv_ctrl_message(struct htc_target *target) target 1137 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_lock_bh(&target->rx_lock); target 1139 drivers/net/wireless/ath/ath6kl/htc_pipe.c if (target->pipe.ctrl_response_valid) { target 1140 drivers/net/wireless/ath/ath6kl/htc_pipe.c target->pipe.ctrl_response_valid = false; target 1141 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_unlock_bh(&target->rx_lock); target 1145 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_unlock_bh(&target->rx_lock); target 1172 drivers/net/wireless/ath/ath6kl/htc_pipe.c static void reset_endpoint_states(struct htc_target *target) target 1178 drivers/net/wireless/ath/ath6kl/htc_pipe.c ep = &target->endpoint[i]; target 1186 drivers/net/wireless/ath/ath6kl/htc_pipe.c ep->target = target; target 1192 drivers/net/wireless/ath/ath6kl/htc_pipe.c static int htc_config_target_hif_pipe(struct htc_target *target) target 1198 drivers/net/wireless/ath/ath6kl/htc_pipe.c static u8 htc_get_credit_alloc(struct htc_target *target, u16 service_id) target 1204 drivers/net/wireless/ath/ath6kl/htc_pipe.c if (target->pipe.txcredit_alloc[i].service_id == service_id) target 1206 drivers/net/wireless/ath/ath6kl/htc_pipe.c target->pipe.txcredit_alloc[i].credit_alloc; target 1218 drivers/net/wireless/ath/ath6kl/htc_pipe.c static int ath6kl_htc_pipe_conn_service(struct htc_target *target, target 1222 drivers/net/wireless/ath/ath6kl/htc_pipe.c struct ath6kl *ar = target->dev->ar; target 1248 drivers/net/wireless/ath/ath6kl/htc_pipe.c tx_alloc = htc_get_credit_alloc(target, conn_req->svc_id); target 1255 drivers/net/wireless/ath/ath6kl/htc_pipe.c packet = htc_alloc_txctrl_packet(target); target 1294 drivers/net/wireless/ath/ath6kl/htc_pipe.c status = ath6kl_htc_pipe_tx(target, packet); target 1302 drivers/net/wireless/ath/ath6kl/htc_pipe.c status = htc_wait_recv_ctrl_message(target); target 1310 drivers/net/wireless/ath/ath6kl/htc_pipe.c target->pipe.ctrl_response_buf; target 1313 drivers/net/wireless/ath/ath6kl/htc_pipe.c (target->pipe.ctrl_response_len < sizeof(*resp_msg))) { target 1352 drivers/net/wireless/ath/ath6kl/htc_pipe.c ep = &target->endpoint[assigned_epid]; target 1369 drivers/net/wireless/ath/ath6kl/htc_pipe.c ep->cred_dist.cred_sz = target->tgt_cred_sz; target 1370 drivers/net/wireless/ath/ath6kl/htc_pipe.c ep->cred_dist.cred_per_msg = max_msg_size / target->tgt_cred_sz; target 1371 drivers/net/wireless/ath/ath6kl/htc_pipe.c if (max_msg_size % target->tgt_cred_sz) target 1400 drivers/net/wireless/ath/ath6kl/htc_pipe.c htc_free_txctrl_packet(target, packet); target 1409 drivers/net/wireless/ath/ath6kl/htc_pipe.c struct htc_target *target = NULL; target 1413 drivers/net/wireless/ath/ath6kl/htc_pipe.c target = kzalloc(sizeof(struct htc_target), GFP_KERNEL); target 1414 drivers/net/wireless/ath/ath6kl/htc_pipe.c if (target == NULL) { target 1420 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_lock_init(&target->htc_lock); target 1421 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_lock_init(&target->rx_lock); target 1422 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_lock_init(&target->tx_lock); target 1424 drivers/net/wireless/ath/ath6kl/htc_pipe.c reset_endpoint_states(target); target 1430 drivers/net/wireless/ath/ath6kl/htc_pipe.c free_htc_packet_container(target, packet); target 1433 drivers/net/wireless/ath/ath6kl/htc_pipe.c target->dev = kzalloc(sizeof(*target->dev), GFP_KERNEL); target 1434 drivers/net/wireless/ath/ath6kl/htc_pipe.c if (!target->dev) { target 1439 drivers/net/wireless/ath/ath6kl/htc_pipe.c target->dev->ar = ar; target 1440 drivers/net/wireless/ath/ath6kl/htc_pipe.c target->dev->htc_cnxt = target; target 1443 drivers/net/wireless/ath/ath6kl/htc_pipe.c ep = &target->endpoint[ENDPOINT_0]; target 1448 drivers/net/wireless/ath/ath6kl/htc_pipe.c return target; target 1452 drivers/net/wireless/ath/ath6kl/htc_pipe.c if (target != NULL) target 1453 drivers/net/wireless/ath/ath6kl/htc_pipe.c ath6kl_htc_pipe_cleanup(target); target 1455 drivers/net/wireless/ath/ath6kl/htc_pipe.c target = NULL; target 1457 drivers/net/wireless/ath/ath6kl/htc_pipe.c return target; target 1461 drivers/net/wireless/ath/ath6kl/htc_pipe.c static void ath6kl_htc_pipe_cleanup(struct htc_target *target) target 1466 drivers/net/wireless/ath/ath6kl/htc_pipe.c packet = alloc_htc_packet_container(target); target 1472 drivers/net/wireless/ath/ath6kl/htc_pipe.c kfree(target->dev); target 1475 drivers/net/wireless/ath/ath6kl/htc_pipe.c kfree(target); target 1478 drivers/net/wireless/ath/ath6kl/htc_pipe.c static int ath6kl_htc_pipe_start(struct htc_target *target) target 1484 drivers/net/wireless/ath/ath6kl/htc_pipe.c htc_config_target_hif_pipe(target); target 1487 drivers/net/wireless/ath/ath6kl/htc_pipe.c packet = htc_alloc_txctrl_packet(target); target 1506 drivers/net/wireless/ath/ath6kl/htc_pipe.c target->htc_flags |= HTC_OP_STATE_SETUP_COMPLETE; target 1508 drivers/net/wireless/ath/ath6kl/htc_pipe.c return ath6kl_htc_pipe_tx(target, packet); target 1511 drivers/net/wireless/ath/ath6kl/htc_pipe.c static void ath6kl_htc_pipe_stop(struct htc_target *target) target 1518 drivers/net/wireless/ath/ath6kl/htc_pipe.c ep = &target->endpoint[i]; target 1519 drivers/net/wireless/ath/ath6kl/htc_pipe.c htc_flush_rx_queue(target, ep); target 1520 drivers/net/wireless/ath/ath6kl/htc_pipe.c htc_flush_tx_endpoint(target, ep, HTC_TX_PACKET_TAG_ALL); target 1523 drivers/net/wireless/ath/ath6kl/htc_pipe.c reset_endpoint_states(target); target 1524 drivers/net/wireless/ath/ath6kl/htc_pipe.c target->htc_flags &= ~HTC_OP_STATE_SETUP_COMPLETE; target 1527 drivers/net/wireless/ath/ath6kl/htc_pipe.c static int ath6kl_htc_pipe_get_rxbuf_num(struct htc_target *target, target 1532 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_lock_bh(&target->rx_lock); target 1533 drivers/net/wireless/ath/ath6kl/htc_pipe.c num = get_queue_depth(&(target->endpoint[endpoint].rx_bufq)); target 1534 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_unlock_bh(&target->rx_lock); target 1539 drivers/net/wireless/ath/ath6kl/htc_pipe.c static int ath6kl_htc_pipe_tx(struct htc_target *target, target 1552 drivers/net/wireless/ath/ath6kl/htc_pipe.c return htc_send_packets_multiple(target, &queue); target 1555 drivers/net/wireless/ath/ath6kl/htc_pipe.c static int ath6kl_htc_pipe_wait_target(struct htc_target *target) target 1562 drivers/net/wireless/ath/ath6kl/htc_pipe.c status = htc_wait_recv_ctrl_message(target); target 1567 drivers/net/wireless/ath/ath6kl/htc_pipe.c if (target->pipe.ctrl_response_len < sizeof(*ready_msg)) { target 1569 drivers/net/wireless/ath/ath6kl/htc_pipe.c target->pipe.ctrl_response_len); target 1573 drivers/net/wireless/ath/ath6kl/htc_pipe.c ready_msg = (struct htc_ready_ext_msg *) target->pipe.ctrl_response_buf; target 1586 drivers/net/wireless/ath/ath6kl/htc_pipe.c target->tgt_creds = le16_to_cpu(ready_msg->ver2_0_info.cred_cnt); target 1587 drivers/net/wireless/ath/ath6kl/htc_pipe.c target->tgt_cred_sz = le16_to_cpu(ready_msg->ver2_0_info.cred_sz); target 1589 drivers/net/wireless/ath/ath6kl/htc_pipe.c if ((target->tgt_creds == 0) || (target->tgt_cred_sz == 0)) target 1592 drivers/net/wireless/ath/ath6kl/htc_pipe.c htc_setup_target_buffer_assignments(target); target 1603 drivers/net/wireless/ath/ath6kl/htc_pipe.c status = ath6kl_htc_pipe_conn_service(target, &connect, &resp); target 1608 drivers/net/wireless/ath/ath6kl/htc_pipe.c static void ath6kl_htc_pipe_flush_txep(struct htc_target *target, target 1611 drivers/net/wireless/ath/ath6kl/htc_pipe.c struct htc_endpoint *ep = &target->endpoint[endpoint]; target 1619 drivers/net/wireless/ath/ath6kl/htc_pipe.c htc_flush_tx_endpoint(target, ep, tag); target 1622 drivers/net/wireless/ath/ath6kl/htc_pipe.c static int ath6kl_htc_pipe_add_rxbuf_multiple(struct htc_target *target, target 1643 drivers/net/wireless/ath/ath6kl/htc_pipe.c ep = &target->endpoint[first->endpoint]; target 1645 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_lock_bh(&target->rx_lock); target 1650 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_unlock_bh(&target->rx_lock); target 1664 drivers/net/wireless/ath/ath6kl/htc_pipe.c static void ath6kl_htc_pipe_activity_changed(struct htc_target *target, target 1671 drivers/net/wireless/ath/ath6kl/htc_pipe.c static void ath6kl_htc_pipe_flush_rx_buf(struct htc_target *target) target 1678 drivers/net/wireless/ath/ath6kl/htc_pipe.c endpoint = &target->endpoint[i]; target 1680 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_lock_bh(&target->rx_lock); target 1685 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_unlock_bh(&target->rx_lock); target 1691 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_lock_bh(&target->rx_lock); target 1694 drivers/net/wireless/ath/ath6kl/htc_pipe.c spin_unlock_bh(&target->rx_lock); target 1698 drivers/net/wireless/ath/ath6kl/htc_pipe.c static int ath6kl_htc_pipe_credit_setup(struct htc_target *target, target 734 drivers/net/wireless/ath/ath6kl/sdio.c struct htc_target *target = ar->htc_target; target 762 drivers/net/wireless/ath/ath6kl/sdio.c target->max_scat_entries = MAX_SCATTER_ENTRIES_PER_REQ; target 763 drivers/net/wireless/ath/ath6kl/sdio.c target->max_xfer_szper_scatreq = target 786 drivers/net/wireless/ath/ath6kl/sdio.c target->max_scat_entries = ATH6KL_SCATTER_ENTRIES_PER_REQ; target 787 drivers/net/wireless/ath/ath6kl/sdio.c target->max_xfer_szper_scatreq = target 586 drivers/net/wireless/ath/ath6kl/txrx.c enum htc_send_full_action ath6kl_tx_queue_full(struct htc_target *target, target 589 drivers/net/wireless/ath/ath6kl/txrx.c struct ath6kl *ar = target->dev->ar; target 617 drivers/net/wireless/ath/ath6kl/txrx.c target->endpoint[endpoint].tx_drop_packet_threshold) target 680 drivers/net/wireless/ath/ath6kl/txrx.c void ath6kl_tx_complete(struct htc_target *target, target 683 drivers/net/wireless/ath/ath6kl/txrx.c struct ath6kl *ar = target->dev->ar; target 874 drivers/net/wireless/ath/ath6kl/txrx.c void ath6kl_rx_refill(struct htc_target *target, enum htc_endpoint_id endpoint) target 876 drivers/net/wireless/ath/ath6kl/txrx.c struct ath6kl *ar = target->dev->ar; target 947 drivers/net/wireless/ath/ath6kl/txrx.c struct htc_packet *ath6kl_alloc_amsdu_rxbuf(struct htc_target *target, target 951 drivers/net/wireless/ath/ath6kl/txrx.c struct ath6kl *ar = target->dev->ar; target 1301 drivers/net/wireless/ath/ath6kl/txrx.c void ath6kl_rx(struct htc_target *target, struct htc_packet *packet) target 1303 drivers/net/wireless/ath/ath6kl/txrx.c struct ath6kl *ar = target->dev->ar; target 37 drivers/net/wireless/ath/ath9k/eeprom.c int16_t ath9k_hw_interpolate(u16 target, u16 srcLeft, u16 srcRight, target 45 drivers/net/wireless/ath/ath9k/eeprom.c rv = (int16_t) (((target - srcLeft) * targetRight + target 46 drivers/net/wireless/ath/ath9k/eeprom.c (srcRight - target) * targetLeft) / target 52 drivers/net/wireless/ath/ath9k/eeprom.c bool ath9k_hw_get_lower_upper_index(u8 target, u8 *pList, u16 listSize, target 57 drivers/net/wireless/ath/ath9k/eeprom.c if (target <= pList[0]) { target 61 drivers/net/wireless/ath/ath9k/eeprom.c if (target >= pList[listSize - 1]) { target 67 drivers/net/wireless/ath/ath9k/eeprom.c if (pList[i] == target) { target 71 drivers/net/wireless/ath/ath9k/eeprom.c if (target < pList[i + 1]) { target 674 drivers/net/wireless/ath/ath9k/eeprom.h int16_t ath9k_hw_interpolate(u16 target, u16 srcLeft, u16 srcRight, target 677 drivers/net/wireless/ath/ath9k/eeprom.h bool ath9k_hw_get_lower_upper_index(u8 target, u8 *pList, u16 listSize, target 21 drivers/net/wireless/ath/ath9k/htc_hst.c static int htc_issue_send(struct htc_target *target, struct sk_buff* skb, target 26 drivers/net/wireless/ath/ath9k/htc_hst.c struct htc_endpoint *endpoint = &target->endpoint[epid]; target 34 drivers/net/wireless/ath/ath9k/htc_hst.c status = target->hif->send(target->hif_dev, endpoint->ul_pipeid, skb); target 87 drivers/net/wireless/ath/ath9k/htc_hst.c static void htc_process_target_rdy(struct htc_target *target, target 93 drivers/net/wireless/ath/ath9k/htc_hst.c target->credit_size = be16_to_cpu(htc_ready_msg->credit_size); target 95 drivers/net/wireless/ath/ath9k/htc_hst.c endpoint = &target->endpoint[ENDPOINT0]; target 98 drivers/net/wireless/ath/ath9k/htc_hst.c atomic_inc(&target->tgt_ready); target 99 drivers/net/wireless/ath/ath9k/htc_hst.c complete(&target->target_wait); target 102 drivers/net/wireless/ath/ath9k/htc_hst.c static void htc_process_conn_rsp(struct htc_target *target, target 121 drivers/net/wireless/ath/ath9k/htc_hst.c endpoint = &target->endpoint[epid]; target 124 drivers/net/wireless/ath/ath9k/htc_hst.c tmp_endpoint = &target->endpoint[tepid]; target 140 drivers/net/wireless/ath/ath9k/htc_hst.c target->conn_rsp_epid = epid; target 141 drivers/net/wireless/ath/ath9k/htc_hst.c complete(&target->cmd_wait); target 143 drivers/net/wireless/ath/ath9k/htc_hst.c target->conn_rsp_epid = ENDPOINT_UNUSED; target 147 drivers/net/wireless/ath/ath9k/htc_hst.c static int htc_config_pipe_credits(struct htc_target *target) target 156 drivers/net/wireless/ath/ath9k/htc_hst.c dev_err(target->dev, "failed to allocate send buffer\n"); target 165 drivers/net/wireless/ath/ath9k/htc_hst.c cp_msg->credits = target->credits; target 167 drivers/net/wireless/ath/ath9k/htc_hst.c target->htc_flags |= HTC_OP_CONFIG_PIPE_CREDITS; target 169 drivers/net/wireless/ath/ath9k/htc_hst.c ret = htc_issue_send(target, skb, skb->len, 0, ENDPOINT0); target 173 drivers/net/wireless/ath/ath9k/htc_hst.c time_left = wait_for_completion_timeout(&target->cmd_wait, HZ); target 175 drivers/net/wireless/ath/ath9k/htc_hst.c dev_err(target->dev, "HTC credit config timeout\n"); target 185 drivers/net/wireless/ath/ath9k/htc_hst.c static int htc_setup_complete(struct htc_target *target) target 194 drivers/net/wireless/ath/ath9k/htc_hst.c dev_err(target->dev, "failed to allocate send buffer\n"); target 202 drivers/net/wireless/ath/ath9k/htc_hst.c target->htc_flags |= HTC_OP_START_WAIT; target 204 drivers/net/wireless/ath/ath9k/htc_hst.c ret = htc_issue_send(target, skb, skb->len, 0, ENDPOINT0); target 208 drivers/net/wireless/ath/ath9k/htc_hst.c time_left = wait_for_completion_timeout(&target->cmd_wait, HZ); target 210 drivers/net/wireless/ath/ath9k/htc_hst.c dev_err(target->dev, "HTC start timeout\n"); target 223 drivers/net/wireless/ath/ath9k/htc_hst.c int htc_init(struct htc_target *target) target 227 drivers/net/wireless/ath/ath9k/htc_hst.c ret = htc_config_pipe_credits(target); target 231 drivers/net/wireless/ath/ath9k/htc_hst.c return htc_setup_complete(target); target 234 drivers/net/wireless/ath/ath9k/htc_hst.c int htc_connect_service(struct htc_target *target, target 245 drivers/net/wireless/ath/ath9k/htc_hst.c endpoint = get_next_avail_ep(target->endpoint); target 247 drivers/net/wireless/ath/ath9k/htc_hst.c dev_err(target->dev, "Endpoint is not available for service %d\n", target 261 drivers/net/wireless/ath/ath9k/htc_hst.c dev_err(target->dev, "Failed to allocate buf to send" target 275 drivers/net/wireless/ath/ath9k/htc_hst.c ret = htc_issue_send(target, skb, skb->len, 0, ENDPOINT0); target 279 drivers/net/wireless/ath/ath9k/htc_hst.c time_left = wait_for_completion_timeout(&target->cmd_wait, HZ); target 281 drivers/net/wireless/ath/ath9k/htc_hst.c dev_err(target->dev, "Service connection timeout for: %d\n", target 286 drivers/net/wireless/ath/ath9k/htc_hst.c *conn_rsp_epid = target->conn_rsp_epid; target 293 drivers/net/wireless/ath/ath9k/htc_hst.c int htc_send(struct htc_target *target, struct sk_buff *skb) target 298 drivers/net/wireless/ath/ath9k/htc_hst.c return htc_issue_send(target, skb, skb->len, 0, tx_ctl->epid); target 301 drivers/net/wireless/ath/ath9k/htc_hst.c int htc_send_epid(struct htc_target *target, struct sk_buff *skb, target 304 drivers/net/wireless/ath/ath9k/htc_hst.c return htc_issue_send(target, skb, skb->len, 0, epid); target 307 drivers/net/wireless/ath/ath9k/htc_hst.c void htc_stop(struct htc_target *target) target 309 drivers/net/wireless/ath/ath9k/htc_hst.c target->hif->stop(target->hif_dev); target 312 drivers/net/wireless/ath/ath9k/htc_hst.c void htc_start(struct htc_target *target) target 314 drivers/net/wireless/ath/ath9k/htc_hst.c target->hif->start(target->hif_dev); target 317 drivers/net/wireless/ath/ath9k/htc_hst.c void htc_sta_drain(struct htc_target *target, u8 idx) target 319 drivers/net/wireless/ath/ath9k/htc_hst.c target->hif->sta_drain(target->hif_dev, idx); target 469 drivers/net/wireless/ath/ath9k/htc_hst.c struct htc_target *target; target 471 drivers/net/wireless/ath/ath9k/htc_hst.c target = kzalloc(sizeof(struct htc_target), GFP_KERNEL); target 472 drivers/net/wireless/ath/ath9k/htc_hst.c if (!target) target 475 drivers/net/wireless/ath/ath9k/htc_hst.c init_completion(&target->target_wait); target 476 drivers/net/wireless/ath/ath9k/htc_hst.c init_completion(&target->cmd_wait); target 478 drivers/net/wireless/ath/ath9k/htc_hst.c target->hif = hif; target 479 drivers/net/wireless/ath/ath9k/htc_hst.c target->hif_dev = hif_handle; target 480 drivers/net/wireless/ath/ath9k/htc_hst.c target->dev = dev; target 483 drivers/net/wireless/ath/ath9k/htc_hst.c endpoint = &target->endpoint[ENDPOINT0]; target 487 drivers/net/wireless/ath/ath9k/htc_hst.c atomic_set(&target->tgt_ready, 0); target 489 drivers/net/wireless/ath/ath9k/htc_hst.c return target; target 497 drivers/net/wireless/ath/ath9k/htc_hst.c int ath9k_htc_hw_init(struct htc_target *target, target 501 drivers/net/wireless/ath/ath9k/htc_hst.c if (ath9k_htc_probe_device(target, dev, devid, product, drv_info)) { target 509 drivers/net/wireless/ath/ath9k/htc_hst.c void ath9k_htc_hw_deinit(struct htc_target *target, bool hot_unplug) target 511 drivers/net/wireless/ath/ath9k/htc_hst.c if (target) target 512 drivers/net/wireless/ath/ath9k/htc_hst.c ath9k_htc_disconnect_device(target, hot_unplug); target 205 drivers/net/wireless/ath/ath9k/htc_hst.h int htc_init(struct htc_target *target); target 206 drivers/net/wireless/ath/ath9k/htc_hst.h int htc_connect_service(struct htc_target *target, target 209 drivers/net/wireless/ath/ath9k/htc_hst.h int htc_send(struct htc_target *target, struct sk_buff *skb); target 210 drivers/net/wireless/ath/ath9k/htc_hst.h int htc_send_epid(struct htc_target *target, struct sk_buff *skb, target 212 drivers/net/wireless/ath/ath9k/htc_hst.h void htc_stop(struct htc_target *target); target 213 drivers/net/wireless/ath/ath9k/htc_hst.h void htc_start(struct htc_target *target); target 214 drivers/net/wireless/ath/ath9k/htc_hst.h void htc_sta_drain(struct htc_target *target, u8 idx); target 225 drivers/net/wireless/ath/ath9k/htc_hst.h int ath9k_htc_hw_init(struct htc_target *target, target 228 drivers/net/wireless/ath/ath9k/htc_hst.h void ath9k_htc_hw_deinit(struct htc_target *target, bool hot_unplug); target 627 drivers/net/wireless/broadcom/b43/phy_ht.c u8 target[3]; target 635 drivers/net/wireless/broadcom/b43/phy_ht.c target[c] = sprom->core_pwr_info[c].maxpwr_2g; target 642 drivers/net/wireless/broadcom/b43/phy_ht.c target[c] = sprom->core_pwr_info[c].maxpwr_5gl; target 649 drivers/net/wireless/broadcom/b43/phy_ht.c target[c] = sprom->core_pwr_info[c].maxpwr_5g; target 656 drivers/net/wireless/broadcom/b43/phy_ht.c target[c] = sprom->core_pwr_info[c].maxpwr_5gh; target 662 drivers/net/wireless/broadcom/b43/phy_ht.c target[0] = target[1] = target[2] = 52; target 707 drivers/net/wireless/broadcom/b43/phy_ht.c target[0] << B43_PHY_HT_TXPCTL_TARG_PWR_C1_SHIFT); target 710 drivers/net/wireless/broadcom/b43/phy_ht.c target[1] << B43_PHY_HT_TXPCTL_TARG_PWR_C2_SHIFT); target 713 drivers/net/wireless/broadcom/b43/phy_ht.c target[2] << B43_PHY_HT_TXPCTL_TARG_PWR2_C3_SHIFT); target 3551 drivers/net/wireless/broadcom/b43/phy_n.c struct nphy_txgains target, target 3559 drivers/net/wireless/broadcom/b43/phy_n.c params->tx_lpf = target.tx_lpf[core]; /* Rev 7+ */ target 3560 drivers/net/wireless/broadcom/b43/phy_n.c params->txgm = target.txgm[core]; target 3561 drivers/net/wireless/broadcom/b43/phy_n.c params->pga = target.pga[core]; target 3562 drivers/net/wireless/broadcom/b43/phy_n.c params->pad = target.pad[core]; target 3563 drivers/net/wireless/broadcom/b43/phy_n.c params->ipa = target.ipa[core]; target 3574 drivers/net/wireless/broadcom/b43/phy_n.c gain = (target.pad[core]) | (target.pga[core] << 4) | target 3575 drivers/net/wireless/broadcom/b43/phy_n.c (target.txgm[core] << 8); target 4052 drivers/net/wireless/broadcom/b43/phy_n.c s8 target[2]; target 4084 drivers/net/wireless/broadcom/b43/phy_n.c target[0] = target[1] = 52; target 4092 drivers/net/wireless/broadcom/b43/phy_n.c target[c] = sprom->core_pwr_info[c].maxpwr_2g; target 4100 drivers/net/wireless/broadcom/b43/phy_n.c target[c] = sprom->core_pwr_info[c].maxpwr_5gl; target 4108 drivers/net/wireless/broadcom/b43/phy_n.c target[c] = sprom->core_pwr_info[c].maxpwr_5g; target 4116 drivers/net/wireless/broadcom/b43/phy_n.c target[c] = sprom->core_pwr_info[c].maxpwr_5gh; target 4124 drivers/net/wireless/broadcom/b43/phy_n.c target[0] = target[1] = 52; target 4133 drivers/net/wireless/broadcom/b43/phy_n.c target[0] = ppr_max; target 4134 drivers/net/wireless/broadcom/b43/phy_n.c target[1] = ppr_max; target 4194 drivers/net/wireless/broadcom/b43/phy_n.c target[0] << B43_NPHY_TXPCTL_TPWR_0_SHIFT | target 4195 drivers/net/wireless/broadcom/b43/phy_n.c target[1] << B43_NPHY_TXPCTL_TPWR_1_SHIFT); target 4203 drivers/net/wireless/broadcom/b43/phy_n.c pwr = max(pwr, target[c] + 1); target 5011 drivers/net/wireless/broadcom/b43/phy_n.c struct nphy_txgains target; target 5025 drivers/net/wireless/broadcom/b43/phy_n.c target.ipa[i] = curr_gain[i] & 0x0007; target 5026 drivers/net/wireless/broadcom/b43/phy_n.c target.pad[i] = (curr_gain[i] & 0x00F8) >> 3; target 5027 drivers/net/wireless/broadcom/b43/phy_n.c target.pga[i] = (curr_gain[i] & 0x0F00) >> 8; target 5028 drivers/net/wireless/broadcom/b43/phy_n.c target.txgm[i] = (curr_gain[i] & 0x7000) >> 12; target 5029 drivers/net/wireless/broadcom/b43/phy_n.c target.tx_lpf[i] = (curr_gain[i] & 0x8000) >> 15; target 5031 drivers/net/wireless/broadcom/b43/phy_n.c target.ipa[i] = curr_gain[i] & 0x000F; target 5032 drivers/net/wireless/broadcom/b43/phy_n.c target.pad[i] = (curr_gain[i] & 0x00F0) >> 4; target 5033 drivers/net/wireless/broadcom/b43/phy_n.c target.pga[i] = (curr_gain[i] & 0x0F00) >> 8; target 5034 drivers/net/wireless/broadcom/b43/phy_n.c target.txgm[i] = (curr_gain[i] & 0x7000) >> 12; target 5036 drivers/net/wireless/broadcom/b43/phy_n.c target.ipa[i] = curr_gain[i] & 0x0003; target 5037 drivers/net/wireless/broadcom/b43/phy_n.c target.pad[i] = (curr_gain[i] & 0x000C) >> 2; target 5038 drivers/net/wireless/broadcom/b43/phy_n.c target.pga[i] = (curr_gain[i] & 0x0070) >> 4; target 5039 drivers/net/wireless/broadcom/b43/phy_n.c target.txgm[i] = (curr_gain[i] & 0x0380) >> 7; target 5058 drivers/net/wireless/broadcom/b43/phy_n.c target.ipa[i] = (table[index[i]] >> 16) & 0x7; target 5059 drivers/net/wireless/broadcom/b43/phy_n.c target.pad[i] = (table[index[i]] >> 19) & 0x1F; target 5060 drivers/net/wireless/broadcom/b43/phy_n.c target.pga[i] = (table[index[i]] >> 24) & 0xF; target 5061 drivers/net/wireless/broadcom/b43/phy_n.c target.txgm[i] = (table[index[i]] >> 28) & 0x7; target 5062 drivers/net/wireless/broadcom/b43/phy_n.c target.tx_lpf[i] = (table[index[i]] >> 31) & 0x1; target 5064 drivers/net/wireless/broadcom/b43/phy_n.c target.ipa[i] = (table[index[i]] >> 16) & 0xF; target 5065 drivers/net/wireless/broadcom/b43/phy_n.c target.pad[i] = (table[index[i]] >> 20) & 0xF; target 5066 drivers/net/wireless/broadcom/b43/phy_n.c target.pga[i] = (table[index[i]] >> 24) & 0xF; target 5067 drivers/net/wireless/broadcom/b43/phy_n.c target.txgm[i] = (table[index[i]] >> 28) & 0xF; target 5069 drivers/net/wireless/broadcom/b43/phy_n.c target.ipa[i] = (table[index[i]] >> 16) & 0x3; target 5070 drivers/net/wireless/broadcom/b43/phy_n.c target.pad[i] = (table[index[i]] >> 18) & 0x3; target 5071 drivers/net/wireless/broadcom/b43/phy_n.c target.pga[i] = (table[index[i]] >> 20) & 0x7; target 5072 drivers/net/wireless/broadcom/b43/phy_n.c target.txgm[i] = (table[index[i]] >> 23) & 0x7; target 5077 drivers/net/wireless/broadcom/b43/phy_n.c return target; target 5371 drivers/net/wireless/broadcom/b43/phy_n.c struct nphy_txgains target, target 5402 drivers/net/wireless/broadcom/b43/phy_n.c b43_nphy_iq_cal_gain_params(dev, i, target, ¶ms[i]); target 5639 drivers/net/wireless/broadcom/b43/phy_n.c struct nphy_txgains target, u8 type, bool debug) target 5670 drivers/net/wireless/broadcom/b43/phy_n.c b43_nphy_iq_cal_gain_params(dev, i, target, &cal_params[i]); target 5819 drivers/net/wireless/broadcom/b43/phy_n.c struct nphy_txgains target, u8 type, bool debug) target 5826 drivers/net/wireless/broadcom/b43/phy_n.c struct nphy_txgains target, u8 type, bool debug) target 5832 drivers/net/wireless/broadcom/b43/phy_n.c return b43_nphy_rev3_cal_rx_iq(dev, target, type, debug); target 5834 drivers/net/wireless/broadcom/b43/phy_n.c return b43_nphy_rev2_cal_rx_iq(dev, target, type, debug); target 6030 drivers/net/wireless/broadcom/b43/phy_n.c struct nphy_txgains target; target 6197 drivers/net/wireless/broadcom/b43/phy_n.c target = b43_nphy_get_tx_gains(dev); target 6209 drivers/net/wireless/broadcom/b43/phy_n.c target = b43_nphy_get_tx_gains(dev); target 6211 drivers/net/wireless/broadcom/b43/phy_n.c if (!b43_nphy_cal_tx_iq_lo(dev, target, true, false)) target 6212 drivers/net/wireless/broadcom/b43/phy_n.c if (b43_nphy_cal_rx_iq(dev, target, 2, 0) == 0) target 880 drivers/net/wireless/broadcom/brcm80211/brcmfmac/sdio.c static int brcmf_sdio_clkctl(struct brcmf_sdio *bus, uint target, bool pendok) target 889 drivers/net/wireless/broadcom/brcm80211/brcmfmac/sdio.c if (bus->clkstate == target) target 892 drivers/net/wireless/broadcom/brcm80211/brcmfmac/sdio.c switch (target) { target 909 drivers/net/wireless/broadcom/brcm80211/brcmfmac/sdio.c bus->clkstate, target); target 2062 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_cmn.c power->target[rate] = pi->tx_power_target[rate]; target 141 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_hal.h u8 target[WL_TX_POWER_RATES]; target 948 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c #define wlc_lcnphy_set_target_tx_pwr(pi, target) \ target 951 drivers/net/wireless/broadcom/brcm80211/brcmsmac/phy/phy_lcn.c (u16)(target) << 0) target 117 drivers/net/wireless/broadcom/brcm80211/brcmsmac/stf.c if (power.target[siso_mcs_id] > (power.target[cdd_mcs_id] + 12)) target 127 drivers/net/wireless/broadcom/brcm80211/brcmsmac/stf.c if (power.target[siso_mcs_id] <= (power.target[stbc_mcs_id] + 12)) target 7580 drivers/net/wireless/intel/ipw2x00/ipw2200.c struct libipw_network *target; target 7582 drivers/net/wireless/intel/ipw2x00/ipw2200.c list_for_each_entry(target, &priv->ieee->network_list, list) { target 7584 drivers/net/wireless/intel/ipw2x00/ipw2200.c (target->last_scanned < oldest->last_scanned)) target 7585 drivers/net/wireless/intel/ipw2x00/ipw2200.c oldest = target; target 7590 drivers/net/wireless/intel/ipw2x00/ipw2200.c target = oldest; target 7592 drivers/net/wireless/intel/ipw2x00/ipw2200.c target->ssid_len, target->ssid, target 7593 drivers/net/wireless/intel/ipw2x00/ipw2200.c target->bssid); target 7594 drivers/net/wireless/intel/ipw2x00/ipw2200.c list_add_tail(&target->list, target 1534 drivers/net/wireless/intel/ipw2x00/libipw_rx.c struct libipw_network *target; target 1582 drivers/net/wireless/intel/ipw2x00/libipw_rx.c list_for_each_entry(target, &ieee->network_list, list) { target 1583 drivers/net/wireless/intel/ipw2x00/libipw_rx.c if (is_same_network(target, &network)) target 1587 drivers/net/wireless/intel/ipw2x00/libipw_rx.c time_before(target->last_scanned, oldest->last_scanned)) target 1588 drivers/net/wireless/intel/ipw2x00/libipw_rx.c oldest = target; target 1593 drivers/net/wireless/intel/ipw2x00/libipw_rx.c if (&target->list == &ieee->network_list) { target 1597 drivers/net/wireless/intel/ipw2x00/libipw_rx.c target = oldest; target 1599 drivers/net/wireless/intel/ipw2x00/libipw_rx.c target->ssid_len, target->ssid, target 1600 drivers/net/wireless/intel/ipw2x00/libipw_rx.c target->bssid); target 1603 drivers/net/wireless/intel/ipw2x00/libipw_rx.c target = list_entry(ieee->network_free_list.next, target 1615 drivers/net/wireless/intel/ipw2x00/libipw_rx.c memcpy(target, &network, sizeof(*target)); target 1616 drivers/net/wireless/intel/ipw2x00/libipw_rx.c list_add_tail(&target->list, &ieee->network_list); target 1619 drivers/net/wireless/intel/ipw2x00/libipw_rx.c target->ssid_len, target->ssid, target 1620 drivers/net/wireless/intel/ipw2x00/libipw_rx.c target->bssid, target 1623 drivers/net/wireless/intel/ipw2x00/libipw_rx.c update_network(target, &network); target 1630 drivers/net/wireless/intel/ipw2x00/libipw_rx.c ieee->handle_beacon(dev, beacon, target); target 1633 drivers/net/wireless/intel/ipw2x00/libipw_rx.c ieee->handle_probe_response(dev, beacon, target); target 140 drivers/net/wireless/intel/iwlwifi/fw/api/nvm-reg.h u8 target; target 250 drivers/net/wireless/intel/iwlwifi/mvm/ftm-initiator.c struct iwl_tof_range_req_ap_entry_v2 *target) target 254 drivers/net/wireless/intel/iwlwifi/mvm/ftm-initiator.c ret = iwl_mvm_ftm_target_chandef(mvm, peer, &target->channel_num, target 255 drivers/net/wireless/intel/iwlwifi/mvm/ftm-initiator.c &target->bandwidth, target 256 drivers/net/wireless/intel/iwlwifi/mvm/ftm-initiator.c &target->ctrl_ch_position); target 260 drivers/net/wireless/intel/iwlwifi/mvm/ftm-initiator.c memcpy(target->bssid, peer->addr, ETH_ALEN); target 261 drivers/net/wireless/intel/iwlwifi/mvm/ftm-initiator.c target->burst_period = target 263 drivers/net/wireless/intel/iwlwifi/mvm/ftm-initiator.c target->samples_per_burst = peer->ftm.ftms_per_burst; target 264 drivers/net/wireless/intel/iwlwifi/mvm/ftm-initiator.c target->num_of_bursts = peer->ftm.num_bursts_exp; target 265 drivers/net/wireless/intel/iwlwifi/mvm/ftm-initiator.c target->measure_type = 0; /* regular two-sided FTM */ target 266 drivers/net/wireless/intel/iwlwifi/mvm/ftm-initiator.c target->retries_per_sample = peer->ftm.ftmr_retries; target 267 drivers/net/wireless/intel/iwlwifi/mvm/ftm-initiator.c target->asap_mode = peer->ftm.asap; target 268 drivers/net/wireless/intel/iwlwifi/mvm/ftm-initiator.c target->enable_dyn_ack = IWL_MVM_FTM_INITIATOR_DYNACK; target 271 drivers/net/wireless/intel/iwlwifi/mvm/ftm-initiator.c target->location_req |= IWL_TOF_LOC_LCI; target 273 drivers/net/wireless/intel/iwlwifi/mvm/ftm-initiator.c target->location_req |= IWL_TOF_LOC_CIVIC; target 275 drivers/net/wireless/intel/iwlwifi/mvm/ftm-initiator.c target->algo_type = IWL_MVM_FTM_INITIATOR_ALGO; target 280 drivers/net/wireless/intel/iwlwifi/mvm/ftm-initiator.c #define FTM_PUT_FLAG(flag) (target->initiator_ap_flags |= \ target 285 drivers/net/wireless/intel/iwlwifi/mvm/ftm-initiator.c struct iwl_tof_range_req_ap_entry *target) target 289 drivers/net/wireless/intel/iwlwifi/mvm/ftm-initiator.c ret = iwl_mvm_ftm_target_chandef(mvm, peer, &target->channel_num, target 290 drivers/net/wireless/intel/iwlwifi/mvm/ftm-initiator.c &target->bandwidth, target 291 drivers/net/wireless/intel/iwlwifi/mvm/ftm-initiator.c &target->ctrl_ch_position); target 295 drivers/net/wireless/intel/iwlwifi/mvm/ftm-initiator.c memcpy(target->bssid, peer->addr, ETH_ALEN); target 296 drivers/net/wireless/intel/iwlwifi/mvm/ftm-initiator.c target->burst_period = target 298 drivers/net/wireless/intel/iwlwifi/mvm/ftm-initiator.c target->samples_per_burst = peer->ftm.ftms_per_burst; target 299 drivers/net/wireless/intel/iwlwifi/mvm/ftm-initiator.c target->num_of_bursts = peer->ftm.num_bursts_exp; target 300 drivers/net/wireless/intel/iwlwifi/mvm/ftm-initiator.c target->ftmr_max_retries = peer->ftm.ftmr_retries; target 301 drivers/net/wireless/intel/iwlwifi/mvm/ftm-initiator.c target->initiator_ap_flags = cpu_to_le32(0); target 430 drivers/net/wireless/intersil/p54/eeprom.c void *source, *target; target 444 drivers/net/wireless/intersil/p54/eeprom.c target = ((struct pda_pa_curve_data *) priv->curve_data->data)->data; target 448 drivers/net/wireless/intersil/p54/eeprom.c *((__le16 *)target) = *freq; target 449 drivers/net/wireless/intersil/p54/eeprom.c target += sizeof(__le16); target 451 drivers/net/wireless/intersil/p54/eeprom.c dst = target; target 464 drivers/net/wireless/intersil/p54/eeprom.c target += sizeof(*dst); target 482 drivers/net/wireless/intersil/p54/eeprom.c void *source, *target; target 496 drivers/net/wireless/intersil/p54/eeprom.c target = ((struct pda_pa_curve_data *) priv->curve_data->data)->data; target 500 drivers/net/wireless/intersil/p54/eeprom.c *((__le16 *)target) = *freq; target 501 drivers/net/wireless/intersil/p54/eeprom.c target += sizeof(__le16); target 503 drivers/net/wireless/intersil/p54/eeprom.c memcpy(target, source, sizeof(*src)); target 505 drivers/net/wireless/intersil/p54/eeprom.c target += sizeof(*dst); target 289 drivers/net/wireless/marvell/libertas/if_spi.c u16 target_mask, u16 target) target 299 drivers/net/wireless/marvell/libertas/if_spi.c if ((val & target_mask) == target) target 308 drivers/net/wireless/marvell/libertas/if_spi.c __func__, val, target_mask, target); target 318 drivers/net/wireless/marvell/libertas/if_spi.c static int spu_wait_for_u32(struct if_spi_card *card, u32 reg, u32 target) target 326 drivers/net/wireless/marvell/libertas/if_spi.c if (val == target) target 1403 drivers/net/wireless/realtek/rtw88/main.h bool check_hw_ready(struct rtw_dev *rtwdev, u32 addr, u32 mask, u32 target); target 9 drivers/net/wireless/realtek/rtw88/util.c bool check_hw_ready(struct rtw_dev *rtwdev, u32 addr, u32 mask, u32 target) target 14 drivers/net/wireless/realtek/rtw88/util.c if (rtw_read32_mask(rtwdev, addr, mask) == target) target 2112 drivers/net/xen-netfront.c unsigned long target; target 2117 drivers/net/xen-netfront.c target = simple_strtoul(buf, &endp, 0); target 302 drivers/nfc/microread/microread.c struct nfc_target *target, u8 comm_mode, target 308 drivers/nfc/microread/microread.c r = nfc_hci_get_param(hdev, target->hci_reader_gate, target 321 drivers/nfc/microread/microread.c r = nfc_dep_link_is_up(hdev->ndev, target->idx, comm_mode, target 336 drivers/nfc/microread/microread.c struct nfc_target *target) target 340 drivers/nfc/microread/microread.c target->supported_protocols = NFC_PROTO_NFC_DEP_MASK; target 351 drivers/nfc/microread/microread.c struct nfc_target *target) target 400 drivers/nfc/microread/microread.c struct nfc_target *target, target 408 drivers/nfc/microread/microread.c pr_info("data exchange to gate 0x%x\n", target->hci_reader_gate); target 410 drivers/nfc/microread/microread.c if (target->hci_reader_gate == MICROREAD_GATE_ID_P2P_INITIATOR) { target 413 drivers/nfc/microread/microread.c return nfc_hci_send_event(hdev, target->hci_reader_gate, target 418 drivers/nfc/microread/microread.c switch (target->hci_reader_gate) { target 441 drivers/nfc/microread/microread.c target->hci_reader_gate); target 451 drivers/nfc/microread/microread.c return nfc_hci_send_cmd_async(hdev, target->hci_reader_gate, target 1636 drivers/nfc/pn533/pn533.c struct nfc_target *target, u32 protocol) target 1708 drivers/nfc/pn533/pn533.c struct nfc_target *target, u8 mode) target 1801 drivers/nfc/pn533/pn533.c static int pn533_dep_link_up(struct nfc_dev *nfc_dev, struct nfc_target *target, target 1829 drivers/nfc/pn533/pn533.c if (target && !target->nfcid2_len) { target 1850 drivers/nfc/pn533/pn533.c if (target && target->nfcid2_len) target 1851 drivers/nfc/pn533/pn533.c memcpy(skb_put(skb, NFC_NFCID3_MAXSIZE), target->nfcid2, target 1852 drivers/nfc/pn533/pn533.c target->nfcid2_len); target 2103 drivers/nfc/pn533/pn533.c struct nfc_target *target, struct sk_buff *skb, target 445 drivers/nfc/pn544/pn544.c struct nfc_target *target, u8 comm_mode, target 451 drivers/nfc/pn544/pn544.c r = nfc_hci_get_param(hdev, target->hci_reader_gate, target 467 drivers/nfc/pn544/pn544.c r = nfc_dep_link_is_up(hdev->ndev, target->idx, comm_mode, target 482 drivers/nfc/pn544/pn544.c struct nfc_target *target) target 486 drivers/nfc/pn544/pn544.c target->supported_protocols = NFC_PROTO_FELICA_MASK; target 489 drivers/nfc/pn544/pn544.c target->supported_protocols = NFC_PROTO_JEWEL_MASK; target 490 drivers/nfc/pn544/pn544.c target->sens_res = 0x0c00; target 493 drivers/nfc/pn544/pn544.c target->supported_protocols = NFC_PROTO_NFC_DEP_MASK; target 504 drivers/nfc/pn544/pn544.c struct nfc_target *target) target 512 drivers/nfc/pn544/pn544.c if (target->supported_protocols & NFC_PROTO_NFC_DEP_MASK) { target 519 drivers/nfc/pn544/pn544.c target->hci_reader_gate = PN544_RF_READER_NFCIP1_INITIATOR_GATE; target 520 drivers/nfc/pn544/pn544.c } else if (target->supported_protocols & NFC_PROTO_MIFARE_MASK) { target 521 drivers/nfc/pn544/pn544.c if (target->nfcid1_len != 4 && target->nfcid1_len != 7 && target 522 drivers/nfc/pn544/pn544.c target->nfcid1_len != 10) target 527 drivers/nfc/pn544/pn544.c target->nfcid1, target->nfcid1_len, NULL); target 528 drivers/nfc/pn544/pn544.c } else if (target->supported_protocols & NFC_PROTO_FELICA_MASK) { target 549 drivers/nfc/pn544/pn544.c target->supported_protocols = NFC_PROTO_NFC_DEP_MASK; target 550 drivers/nfc/pn544/pn544.c target->hci_reader_gate = target 558 drivers/nfc/pn544/pn544.c } else if (target->supported_protocols & NFC_PROTO_ISO14443_MASK) { target 563 drivers/nfc/pn544/pn544.c if (target->sens_res == 0x4403) /* Type 4 Mifare DESFire */ target 604 drivers/nfc/pn544/pn544.c struct nfc_target *target, target 611 drivers/nfc/pn544/pn544.c target->hci_reader_gate); target 613 drivers/nfc/pn544/pn544.c switch (target->hci_reader_gate) { target 615 drivers/nfc/pn544/pn544.c if (target->supported_protocols & NFC_PROTO_MIFARE_MASK) { target 634 drivers/nfc/pn544/pn544.c target->hci_reader_gate, target 648 drivers/nfc/pn544/pn544.c return nfc_hci_send_cmd_async(hdev, target->hci_reader_gate, target 653 drivers/nfc/pn544/pn544.c return nfc_hci_send_cmd_async(hdev, target->hci_reader_gate, target 659 drivers/nfc/pn544/pn544.c return nfc_hci_send_event(hdev, target->hci_reader_gate, target 683 drivers/nfc/pn544/pn544.c struct nfc_target *target) target 685 drivers/nfc/pn544/pn544.c pr_debug("supported protocol %d\b", target->supported_protocols); target 686 drivers/nfc/pn544/pn544.c if (target->supported_protocols & (NFC_PROTO_ISO14443_MASK | target 688 drivers/nfc/pn544/pn544.c return nfc_hci_send_cmd(hdev, target->hci_reader_gate, target 691 drivers/nfc/pn544/pn544.c } else if (target->supported_protocols & NFC_PROTO_MIFARE_MASK) { target 692 drivers/nfc/pn544/pn544.c if (target->nfcid1_len != 4 && target->nfcid1_len != 7 && target 693 drivers/nfc/pn544/pn544.c target->nfcid1_len != 10) target 698 drivers/nfc/pn544/pn544.c target->nfcid1, target->nfcid1_len, NULL); target 699 drivers/nfc/pn544/pn544.c } else if (target->supported_protocols & (NFC_PROTO_JEWEL_MASK | target 702 drivers/nfc/pn544/pn544.c } else if (target->supported_protocols & NFC_PROTO_NFC_DEP_MASK) { target 703 drivers/nfc/pn544/pn544.c return nfc_hci_send_cmd(hdev, target->hci_reader_gate, target 21 drivers/nfc/s3fwrn5/firmware.c __u8 target; target 570 drivers/nfc/st21nfca/core.c struct nfc_target *target) target 589 drivers/nfc/st21nfca/core.c memcpy(target->iso15693_uid, inventory_skb->data, inventory_skb->len); target 590 drivers/nfc/st21nfca/core.c target->iso15693_dsfid = inventory_skb->data[1]; target 591 drivers/nfc/st21nfca/core.c target->is_iso15693 = 1; target 598 drivers/nfc/st21nfca/core.c struct nfc_target *target, u8 comm_mode, target 603 drivers/nfc/st21nfca/core.c info->dep_info.idx = target->idx; target 618 drivers/nfc/st21nfca/core.c struct nfc_target *target) target 627 drivers/nfc/st21nfca/core.c target->supported_protocols = NFC_PROTO_FELICA_MASK; target 635 drivers/nfc/st21nfca/core.c target->supported_protocols = NFC_PROTO_JEWEL_MASK; target 636 drivers/nfc/st21nfca/core.c target->sens_res = 0x0c00; target 646 drivers/nfc/st21nfca/core.c target->supported_protocols = target 648 drivers/nfc/st21nfca/core.c if (target->supported_protocols == 0xffffffff) target 651 drivers/nfc/st21nfca/core.c target->sens_res = atqa; target 652 drivers/nfc/st21nfca/core.c target->sel_res = sak; target 653 drivers/nfc/st21nfca/core.c memcpy(target->nfcid1, uid, len); target 654 drivers/nfc/st21nfca/core.c target->nfcid1_len = len; target 659 drivers/nfc/st21nfca/core.c target->supported_protocols = NFC_PROTO_ISO15693_MASK; target 660 drivers/nfc/st21nfca/core.c r = st21nfca_get_iso15693_inventory(hdev, target); target 673 drivers/nfc/st21nfca/core.c struct nfc_target *target) target 698 drivers/nfc/st21nfca/core.c memcpy(target->sensf_res, nfcid_skb->data, target 700 drivers/nfc/st21nfca/core.c target->sensf_res_len = nfcid_skb->len; target 702 drivers/nfc/st21nfca/core.c if (target->sensf_res[0] == 0x01 && target 703 drivers/nfc/st21nfca/core.c target->sensf_res[1] == 0xfe) target 704 drivers/nfc/st21nfca/core.c target->supported_protocols = target 707 drivers/nfc/st21nfca/core.c target->supported_protocols = target 723 drivers/nfc/st21nfca/core.c memcpy(target->sensf_res, nfcid_skb->data, target 725 drivers/nfc/st21nfca/core.c target->sensf_res_len = nfcid_skb->len; target 726 drivers/nfc/st21nfca/core.c target->supported_protocols = NFC_PROTO_NFC_DEP_MASK; target 728 drivers/nfc/st21nfca/core.c target->hci_reader_gate = ST21NFCA_RF_READER_F_GATE; target 761 drivers/nfc/st21nfca/core.c struct nfc_target *target, target 768 drivers/nfc/st21nfca/core.c target->hci_reader_gate, skb->len); target 770 drivers/nfc/st21nfca/core.c switch (target->hci_reader_gate) { target 772 drivers/nfc/st21nfca/core.c if (target->supported_protocols == NFC_PROTO_NFC_DEP_MASK) target 776 drivers/nfc/st21nfca/core.c return nfc_hci_send_cmd_async(hdev, target->hci_reader_gate, target 782 drivers/nfc/st21nfca/core.c return nfc_hci_send_cmd_async(hdev, target->hci_reader_gate, target 792 drivers/nfc/st21nfca/core.c return nfc_hci_send_cmd_async(hdev, target->hci_reader_gate, target 809 drivers/nfc/st21nfca/core.c struct nfc_target *target) target 813 drivers/nfc/st21nfca/core.c switch (target->hci_reader_gate) { target 823 drivers/nfc/st21nfca/core.c return nfc_hci_send_cmd(hdev, target->hci_reader_gate, target 826 drivers/nfc/st21nfca/core.c return nfc_hci_send_cmd(hdev, target->hci_reader_gate, target 519 drivers/nfc/st21nfca/dep.c struct nfc_target *target; target 544 drivers/nfc/st21nfca/dep.c target = hdev->ndev->targets; target 546 drivers/nfc/st21nfca/dep.c if (target->sensf_res_len > 0) target 547 drivers/nfc/st21nfca/dep.c memcpy(atr_req->nfcid3, target->sensf_res, target 548 drivers/nfc/st21nfca/dep.c target->sensf_res_len); target 728 drivers/nvdimm/label.c guid_t *target) target 743 drivers/nvdimm/label.c return target; target 2296 drivers/nvme/host/core.c __le64 target = cpu_to_le64(0); target 2308 drivers/nvme/host/core.c if (target) target 2309 drivers/nvme/host/core.c table->entries[state] = target; target 2345 drivers/nvme/host/core.c target = cpu_to_le64((state << 3) | target 619 drivers/nvme/target/configfs.c struct config_item *target) target 626 drivers/nvme/target/configfs.c if (target->ci_type != &nvmet_subsys_type) { target 630 drivers/nvme/target/configfs.c subsys = to_subsys(target); target 662 drivers/nvme/target/configfs.c struct config_item *target) target 665 drivers/nvme/target/configfs.c struct nvmet_subsys *subsys = to_subsys(target); target 698 drivers/nvme/target/configfs.c struct config_item *target) target 705 drivers/nvme/target/configfs.c if (target->ci_type != &nvmet_host_type) { target 710 drivers/nvme/target/configfs.c host = to_host(target); target 740 drivers/nvme/target/configfs.c struct config_item *target) target 743 drivers/nvme/target/configfs.c struct nvmet_host *host = to_host(target); target 2262 drivers/of/base.c struct device_node **target, u32 *id_out) target 2268 drivers/of/base.c if (!np || !map_name || (!target && !id_out)) target 2273 drivers/of/base.c if (target) target 2318 drivers/of/base.c if (target) { target 2319 drivers/of/base.c if (*target) target 2322 drivers/of/base.c *target = phandle_node; target 2324 drivers/of/base.c if (*target != phandle_node) target 2338 drivers/of/base.c rid, target && *target ? *target : NULL); target 316 drivers/of/of_reserved_mem.c struct device_node *target; target 323 drivers/of/of_reserved_mem.c target = of_parse_phandle(np, "memory-region", idx); target 324 drivers/of/of_reserved_mem.c if (!target) target 327 drivers/of/of_reserved_mem.c if (!of_device_is_available(target)) { target 328 drivers/of/of_reserved_mem.c of_node_put(target); target 332 drivers/of/of_reserved_mem.c rmem = __find_rmem(target); target 333 drivers/of/of_reserved_mem.c of_node_put(target); target 53 drivers/of/overlay.c struct device_node *target; target 95 drivers/of/overlay.c struct target *target, const struct device_node *overlay_node); target 168 drivers/of/overlay.c nd.target = fragment->target; target 178 drivers/of/overlay.c of_overlay_action_name[action], ret, nd.target); target 244 drivers/of/overlay.c target_path = kasprintf(GFP_KERNEL, "%pOF", fragment->target); target 305 drivers/of/overlay.c struct target *target, struct property *overlay_prop, target 311 drivers/of/overlay.c if (target->in_livetree) target 317 drivers/of/overlay.c if (target->in_livetree) target 318 drivers/of/overlay.c prop = of_find_property(target->np, overlay_prop->name, NULL); target 326 drivers/of/overlay.c target->np); target 334 drivers/of/overlay.c target->np); target 353 drivers/of/overlay.c if (!target->in_livetree) { target 354 drivers/of/overlay.c new_prop->next = target->np->deadprops; target 355 drivers/of/overlay.c target->np->deadprops = new_prop; target 357 drivers/of/overlay.c ret = of_changeset_add_property(&ovcs->cset, target->np, target 360 drivers/of/overlay.c ret = of_changeset_update_property(&ovcs->cset, target->np, target 364 drivers/of/overlay.c if (!of_node_check_flag(target->np, OF_OVERLAY)) target 366 drivers/of/overlay.c target->np, new_prop->name); target 410 drivers/of/overlay.c struct target *target, struct device_node *node) target 415 drivers/of/overlay.c struct target target_child; target 420 drivers/of/overlay.c for_each_child_of_node(target->np, tchild) target 429 drivers/of/overlay.c tchild->parent = target->np; target 458 drivers/of/overlay.c target_child.in_livetree = target->in_livetree; target 482 drivers/of/overlay.c struct target *target, const struct device_node *overlay_node) target 489 drivers/of/overlay.c ret = add_changeset_property(ovcs, target, prop, 0); target 492 drivers/of/overlay.c target->np, prop->name, ret); target 498 drivers/of/overlay.c ret = add_changeset_node(ovcs, target, child); target 501 drivers/of/overlay.c target->np, child, ret); target 514 drivers/of/overlay.c struct target *target, target 521 drivers/of/overlay.c ret = add_changeset_property(ovcs, target, prop, 1); target 524 drivers/of/overlay.c target->np, prop->name, ret); target 639 drivers/of/overlay.c struct target target; target 654 drivers/of/overlay.c target.np = fragment->target; target 655 drivers/of/overlay.c target.in_livetree = true; target 656 drivers/of/overlay.c ret = build_changeset_next_level(ovcs, &target, target 660 drivers/of/overlay.c fragment->target); target 668 drivers/of/overlay.c target.np = fragment->target; target 669 drivers/of/overlay.c target.in_livetree = true; target 670 drivers/of/overlay.c ret = build_changeset_symbols_node(ovcs, &target, target 674 drivers/of/overlay.c fragment->target); target 795 drivers/of/overlay.c fragment->target = find_target(node); target 796 drivers/of/overlay.c if (!fragment->target) { target 814 drivers/of/overlay.c fragment->target = of_find_node_by_path("/__symbols__"); target 816 drivers/of/overlay.c if (!fragment->target) { target 858 drivers/of/overlay.c of_node_put(ovcs->fragments[i].target); target 301 drivers/pci/controller/pci-mvebu.c unsigned int target, unsigned int attribute, target 311 drivers/pci/controller/pci-mvebu.c ret = mvebu_mbus_add_window_remap_by_id(target, attribute, base, target 333 drivers/pci/controller/pci-mvebu.c unsigned int target, unsigned int attribute, target 356 drivers/pci/controller/pci-mvebu.c mvebu_pcie_add_windows(port, target, attribute, desired->base, target 90 drivers/pci/hotplug/pnv_php.c struct pnv_php_slot *target, *tmp; target 98 drivers/pci/hotplug/pnv_php.c target = pnv_php_match(dn, tmp); target 99 drivers/pci/hotplug/pnv_php.c if (target) target 100 drivers/pci/hotplug/pnv_php.c return target; target 142 drivers/pci/pcie/aer_inject.c u32 *target = NULL; target 149 drivers/pci/pcie/aer_inject.c target = &err->uncor_status; target 153 drivers/pci/pcie/aer_inject.c target = &err->cor_status; target 157 drivers/pci/pcie/aer_inject.c target = &err->header_log0; target 160 drivers/pci/pcie/aer_inject.c target = &err->header_log1; target 163 drivers/pci/pcie/aer_inject.c target = &err->header_log2; target 166 drivers/pci/pcie/aer_inject.c target = &err->header_log3; target 169 drivers/pci/pcie/aer_inject.c target = &err->root_status; target 173 drivers/pci/pcie/aer_inject.c target = &err->source_id; target 178 drivers/pci/pcie/aer_inject.c return target; target 1446 drivers/perf/arm-cci.c int target; target 1451 drivers/perf/arm-cci.c target = cpumask_any_but(cpu_online_mask, cpu); target 1452 drivers/perf/arm-cci.c if (target >= nr_cpu_ids) target 1455 drivers/perf/arm-cci.c perf_pmu_migrate_context(&g_cci_pmu->pmu, cpu, target); target 1456 drivers/perf/arm-cci.c g_cci_pmu->cpu = target; target 1209 drivers/perf/arm-ccn.c unsigned int target; target 1213 drivers/perf/arm-ccn.c target = cpumask_any_but(cpu_online_mask, cpu); target 1214 drivers/perf/arm-ccn.c if (target >= nr_cpu_ids) target 1216 drivers/perf/arm-ccn.c perf_pmu_migrate_context(&dt->pmu, cpu, target); target 1217 drivers/perf/arm-ccn.c dt->cpu = target; target 588 drivers/perf/arm_smmuv3_pmu.c unsigned int target; target 594 drivers/perf/arm_smmuv3_pmu.c target = cpumask_any_but(cpu_online_mask, cpu); target 595 drivers/perf/arm_smmuv3_pmu.c if (target >= nr_cpu_ids) target 598 drivers/perf/arm_smmuv3_pmu.c perf_pmu_migrate_context(&smmu_pmu->pmu, cpu, target); target 599 drivers/perf/arm_smmuv3_pmu.c smmu_pmu->on_cpu = target; target 600 drivers/perf/arm_smmuv3_pmu.c WARN_ON(irq_set_affinity_hint(smmu_pmu->irq, cpumask_of(target))); target 521 drivers/perf/fsl_imx8_ddr_perf.c int target; target 526 drivers/perf/fsl_imx8_ddr_perf.c target = cpumask_any_but(cpu_online_mask, cpu); target 527 drivers/perf/fsl_imx8_ddr_perf.c if (target >= nr_cpu_ids) target 530 drivers/perf/fsl_imx8_ddr_perf.c perf_pmu_migrate_context(&pmu->pmu, cpu, target); target 531 drivers/perf/fsl_imx8_ddr_perf.c pmu->cpu = target; target 411 drivers/perf/hisilicon/hisi_uncore_pmu.c unsigned int target; target 426 drivers/perf/hisilicon/hisi_uncore_pmu.c target = cpumask_any_but(&pmu_online_cpus, cpu); target 427 drivers/perf/hisilicon/hisi_uncore_pmu.c if (target >= nr_cpu_ids) target 430 drivers/perf/hisilicon/hisi_uncore_pmu.c perf_pmu_migrate_context(&hisi_pmu->pmu, cpu, target); target 432 drivers/perf/hisilicon/hisi_uncore_pmu.c hisi_pmu->on_cpu = target; target 433 drivers/perf/hisilicon/hisi_uncore_pmu.c WARN_ON(irq_set_affinity(hisi_pmu->irq, cpumask_of(target))); target 852 drivers/perf/qcom_l2_pmu.c unsigned int target; target 870 drivers/perf/qcom_l2_pmu.c target = cpumask_any_but(&cluster_online_cpus, cpu); target 871 drivers/perf/qcom_l2_pmu.c if (target >= nr_cpu_ids) { target 876 drivers/perf/qcom_l2_pmu.c perf_pmu_migrate_context(&l2cache_pmu->pmu, cpu, target); target 877 drivers/perf/qcom_l2_pmu.c cluster->on_cpu = target; target 878 drivers/perf/qcom_l2_pmu.c cpumask_set_cpu(target, &l2cache_pmu->cpumask); target 879 drivers/perf/qcom_l2_pmu.c WARN_ON(irq_set_affinity(cluster->irq, cpumask_of(target))); target 720 drivers/perf/qcom_l3_pmu.c unsigned int target; target 724 drivers/perf/qcom_l3_pmu.c target = cpumask_any_but(cpu_online_mask, cpu); target 725 drivers/perf/qcom_l3_pmu.c if (target >= nr_cpu_ids) target 727 drivers/perf/qcom_l3_pmu.c perf_pmu_migrate_context(&l3pmu->pmu, cpu, target); target 728 drivers/perf/qcom_l3_pmu.c cpumask_set_cpu(target, &l3pmu->cpumask); target 1813 drivers/perf/xgene_pmu.c unsigned int target; target 1817 drivers/perf/xgene_pmu.c target = cpumask_any_but(cpu_online_mask, cpu); target 1818 drivers/perf/xgene_pmu.c if (target >= nr_cpu_ids) target 1822 drivers/perf/xgene_pmu.c perf_pmu_migrate_context(&ctx->pmu_dev->pmu, cpu, target); target 1825 drivers/perf/xgene_pmu.c perf_pmu_migrate_context(&ctx->pmu_dev->pmu, cpu, target); target 1828 drivers/perf/xgene_pmu.c perf_pmu_migrate_context(&ctx->pmu_dev->pmu, cpu, target); target 1831 drivers/perf/xgene_pmu.c perf_pmu_migrate_context(&ctx->pmu_dev->pmu, cpu, target); target 1834 drivers/perf/xgene_pmu.c cpumask_set_cpu(target, &xgene_pmu->cpu); target 1907 drivers/power/supply/ab8500_fg.c static int ab8500_fg_battok_calc(struct ab8500_fg *di, int target) target 1909 drivers/power/supply/ab8500_fg.c if (target > BATT_OK_MIN + target 1912 drivers/power/supply/ab8500_fg.c if (target < BATT_OK_MIN) target 1914 drivers/power/supply/ab8500_fg.c return (target - BATT_OK_MIN) / BATT_OK_INCREMENT; target 244 drivers/regulator/devres.c struct regulator_supply_alias_match *target = data; target 246 drivers/regulator/devres.c return match->dev == target->dev && strcmp(match->id, target->id) == 0; target 407 drivers/regulator/devres.c struct regulator_notifier_match *target = data; target 409 drivers/regulator/devres.c return match->regulator == target->regulator && match->nb == target->nb; target 62 drivers/regulator/gpio-regulator.c int ptr, target = 0, state, best_val = INT_MAX; target 68 drivers/regulator/gpio-regulator.c target = data->states[ptr].gpios; target 78 drivers/regulator/gpio-regulator.c state = (target & (1 << ptr)) >> ptr; target 81 drivers/regulator/gpio-regulator.c data->state = target; target 101 drivers/regulator/gpio-regulator.c int ptr, target = 0, state, best_val = 0; target 107 drivers/regulator/gpio-regulator.c target = data->states[ptr].gpios; target 115 drivers/regulator/gpio-regulator.c state = (target & (1 << ptr)) >> ptr; target 118 drivers/regulator/gpio-regulator.c data->state = target; target 145 drivers/s390/block/dasd.c device->target = DASD_STATE_NEW; target 490 drivers/s390/block/dasd.c device->target >= DASD_STATE_KNOWN) target 495 drivers/s390/block/dasd.c device->target >= DASD_STATE_BASIC) target 500 drivers/s390/block/dasd.c device->target >= DASD_STATE_READY) target 505 drivers/s390/block/dasd.c device->target > DASD_STATE_UNFMT) target 510 drivers/s390/block/dasd.c device->target >= DASD_STATE_ONLINE) target 525 drivers/s390/block/dasd.c device->target <= DASD_STATE_READY) target 530 drivers/s390/block/dasd.c device->target <= DASD_STATE_BASIC) target 535 drivers/s390/block/dasd.c device->target <= DASD_STATE_BASIC) target 540 drivers/s390/block/dasd.c device->target <= DASD_STATE_KNOWN) target 545 drivers/s390/block/dasd.c device->target <= DASD_STATE_NEW) target 558 drivers/s390/block/dasd.c if (device->state == device->target) target 561 drivers/s390/block/dasd.c if (device->state < device->target) target 568 drivers/s390/block/dasd.c device->target = device->state; target 573 drivers/s390/block/dasd.c if (device->state == device->target) target 646 drivers/s390/block/dasd.c void dasd_set_target_state(struct dasd_device *device, int target) target 651 drivers/s390/block/dasd.c if (dasd_probeonly && target > DASD_STATE_READY) target 652 drivers/s390/block/dasd.c target = DASD_STATE_READY; target 653 drivers/s390/block/dasd.c if (device->target != target) { target 654 drivers/s390/block/dasd.c if (device->state == target) target 656 drivers/s390/block/dasd.c device->target = target; target 658 drivers/s390/block/dasd.c if (device->state != device->target) target 670 drivers/s390/block/dasd.c return (device->state == device->target); target 1890 drivers/s390/block/dasd.c device->state != device->target || target 503 drivers/s390/block/dasd_int.h int state, target; target 883 drivers/scsi/FlashPoint.c unsigned char target); target 1746 drivers/scsi/FlashPoint.c unsigned char i, target; target 1912 drivers/scsi/FlashPoint.c target = target 1918 drivers/scsi/FlashPoint.c (unsigned char)(target | target << target 2195 drivers/scsi/FlashPoint.c unsigned char target) target 2201 drivers/scsi/FlashPoint.c currTar_Info = &FPT_sccbMgrTbl[p_card][target]; target 2219 drivers/scsi/FlashPoint.c target) { target 2307 drivers/scsi/FlashPoint.c unsigned char auto_loaded, i, target, *theCCB; target 2317 drivers/scsi/FlashPoint.c target = currSCCB->TargID; target 2318 drivers/scsi/FlashPoint.c currTar_Info = &FPT_sccbMgrTbl[p_card][target]; target 2405 drivers/scsi/FlashPoint.c WR_HARPOON(port + hp_select_id, target); target 2406 drivers/scsi/FlashPoint.c WR_HARPOON(port + hp_gp_reg_3, target); /* Use by new automation logic */ target 2430 drivers/scsi/FlashPoint.c FPT_sssyncv(port, target, NARROW_SCSI, currTar_Info); target 2431 drivers/scsi/FlashPoint.c FPT_SccbMgrTableInitTarget(p_card, target); target 2116 drivers/scsi/NCR5380.c int target = ffs(target_mask) - 1; target 2125 drivers/scsi/NCR5380.c hostdata->busy[target] &= ~(1 << lun); target 595 drivers/scsi/a100u2w.c static int orc_device_reset(struct orc_host * host, struct scsi_cmnd *cmd, unsigned int target) target 638 drivers/scsi/a100u2w.c scb->target = target; target 857 drivers/scsi/a100u2w.c scb->target = cmd->device->id; target 149 drivers/scsi/a100u2w.h u8 target; /*02 Target Id */ target 1518 drivers/scsi/aacraid/aachba.c int bus, target; target 1541 drivers/scsi/aacraid/aachba.c target = scmd_id(cmd); target 1542 drivers/scsi/aacraid/aachba.c hbacmd->it_nexus = dev->hba_map[bus][target].rmw_nexus; target 1739 drivers/scsi/aacraid/aachba.c static void aac_set_safw_target_qd(struct aac_dev *dev, int bus, int target) target 1744 drivers/scsi/aacraid/aachba.c if (dev->hba_map[bus][target].devtype != AAC_DEVTYPE_NATIVE_RAW) target 1747 drivers/scsi/aacraid/aachba.c identify_resp = dev->hba_map[bus][target].safw_identify_resp; target 1749 drivers/scsi/aacraid/aachba.c dev->hba_map[bus][target].qd_limit = 32; target 1755 drivers/scsi/aacraid/aachba.c dev->hba_map[bus][target].qd_limit = 32; target 1757 drivers/scsi/aacraid/aachba.c dev->hba_map[bus][target].qd_limit = target 1762 drivers/scsi/aacraid/aachba.c struct aac_ciss_identify_pd **identify_resp, u32 bus, u32 target) target 1780 drivers/scsi/aacraid/aachba.c srbcmd->cdb[2] = (u8)((AAC_MAX_LUN + target) & 0x00FF); target 1886 drivers/scsi/aacraid/aachba.c int bus, int target) target 1888 drivers/scsi/aacraid/aachba.c kfree(dev->hba_map[bus][target].safw_identify_resp); target 1889 drivers/scsi/aacraid/aachba.c dev->hba_map[bus][target].safw_identify_resp = NULL; target 1898 drivers/scsi/aacraid/aachba.c u32 target; target 1909 drivers/scsi/aacraid/aachba.c target = aac_get_safw_phys_target(dev, i); target 1911 drivers/scsi/aacraid/aachba.c aac_free_safw_identify_resp(dev, bus, target); target 1921 drivers/scsi/aacraid/aachba.c u32 target; target 1929 drivers/scsi/aacraid/aachba.c target = aac_get_safw_phys_target(dev, i); target 1932 drivers/scsi/aacraid/aachba.c &identify_resp, bus, target); target 1937 drivers/scsi/aacraid/aachba.c dev->hba_map[bus][target].safw_identify_resp = identify_resp; target 1959 drivers/scsi/aacraid/aachba.c u32 i, bus, target; target 1969 drivers/scsi/aacraid/aachba.c target = aac_get_safw_phys_target(dev, i); target 1974 drivers/scsi/aacraid/aachba.c if (bus >= AAC_MAX_BUSES || target >= AAC_MAX_TARGETS) target 1978 drivers/scsi/aacraid/aachba.c dev->hba_map[bus][target].devtype = target 1984 drivers/scsi/aacraid/aachba.c dev->hba_map[bus][target].devtype = target 1986 drivers/scsi/aacraid/aachba.c dev->hba_map[bus][target].rmw_nexus = target 1989 drivers/scsi/aacraid/aachba.c dev->hba_map[bus][target].devtype = target 1992 drivers/scsi/aacraid/aachba.c dev->hba_map[bus][target].scan_counter = dev->scan_counter; target 1994 drivers/scsi/aacraid/aachba.c aac_set_safw_target_qd(dev, bus, target); target 2032 drivers/scsi/aacraid/aachba.c u32 tmp, bus, target; target 2093 drivers/scsi/aacraid/aachba.c for (target = 0; target < AAC_MAX_TARGETS; target++) { target 2094 drivers/scsi/aacraid/aachba.c dev->hba_map[bus][target].devtype = 0; target 2095 drivers/scsi/aacraid/aachba.c dev->hba_map[bus][target].qd_limit = 0; target 1842 drivers/scsi/aacraid/commsup.c static inline int is_safw_raid_volume(struct aac_dev *aac, int bus, int target) target 1844 drivers/scsi/aacraid/commsup.c return bus == CONTAINER_CHANNEL && target < aac->maximum_num_containers; target 1849 drivers/scsi/aacraid/commsup.c int target) target 1854 drivers/scsi/aacraid/commsup.c return scsi_device_lookup(dev->scsi_host_ptr, bus, target, 0); target 1857 drivers/scsi/aacraid/commsup.c static int aac_add_safw_device(struct aac_dev *dev, int bus, int target) target 1862 drivers/scsi/aacraid/commsup.c return scsi_add_device(dev->scsi_host_ptr, bus, target, 0); target 1871 drivers/scsi/aacraid/commsup.c static void aac_remove_safw_device(struct aac_dev *dev, int bus, int target) target 1875 drivers/scsi/aacraid/commsup.c sdev = aac_lookup_safw_scsi_device(dev, bus, target); target 1881 drivers/scsi/aacraid/commsup.c int bus, int target) target 1883 drivers/scsi/aacraid/commsup.c return dev->hba_map[bus][target].scan_counter == dev->scan_counter; target 1886 drivers/scsi/aacraid/commsup.c static int aac_is_safw_target_valid(struct aac_dev *dev, int bus, int target) target 1888 drivers/scsi/aacraid/commsup.c if (is_safw_raid_volume(dev, bus, target)) target 1889 drivers/scsi/aacraid/commsup.c return dev->fsa_dev[target].valid; target 1891 drivers/scsi/aacraid/commsup.c return aac_is_safw_scan_count_equal(dev, bus, target); target 1894 drivers/scsi/aacraid/commsup.c static int aac_is_safw_device_exposed(struct aac_dev *dev, int bus, int target) target 1899 drivers/scsi/aacraid/commsup.c sdev = aac_lookup_safw_scsi_device(dev, bus, target); target 1911 drivers/scsi/aacraid/commsup.c int target; target 1923 drivers/scsi/aacraid/commsup.c target = get_target_number(i); target 1925 drivers/scsi/aacraid/commsup.c is_exposed = aac_is_safw_device_exposed(dev, bus, target); target 1927 drivers/scsi/aacraid/commsup.c if (aac_is_safw_target_valid(dev, bus, target) && !is_exposed) target 1928 drivers/scsi/aacraid/commsup.c aac_add_safw_device(dev, bus, target); target 1929 drivers/scsi/aacraid/commsup.c else if (!aac_is_safw_target_valid(dev, bus, target) && target 1931 drivers/scsi/aacraid/commsup.c aac_remove_safw_device(dev, bus, target); target 459 drivers/scsi/aha152x.c int target; target 525 drivers/scsi/aha152x.c #define RECONN_TARGET (HOSTDATA(shpnt)->target) target 661 drivers/scsi/aha152x.c int target, int lun) target 666 drivers/scsi/aha152x.c ptr && ((ptr->device->id != target) || (ptr->device->lun != lun)); target 1573 drivers/scsi/aha152x.c int target; target 1606 drivers/scsi/aha152x.c for(target=7; !(selid & (1 << target)); target--) target 1609 drivers/scsi/aha152x.c if(selid & ~(1 << target)) { target 1615 drivers/scsi/aha152x.c SETPORT(SCSIID, (shpnt->this_id << OID_) | target); target 1618 drivers/scsi/aha152x.c SETRATE(HOSTDATA(shpnt)->syncrate[target]); target 1620 drivers/scsi/aha152x.c RECONN_TARGET=target; target 397 drivers/scsi/aha1542.c u8 target = cmd->device->id; target 419 drivers/scsi/aha1542.c target, *cmd->cmnd, i, bufflen); target 499 drivers/scsi/aha1542.c ccb[mbo].idlun = (target & 7) << 5 | direction | (lun & 7); /*SCSI Target Id */ target 858 drivers/scsi/aha1542.c u8 target = cmd->device->id; target 892 drivers/scsi/aha1542.c ccb[mbo].idlun = (target & 7) << 5 | (lun & 7); /*SCSI Target Id */ target 319 drivers/scsi/aha1740.c unchar target = scmd_id(SCpnt); target 341 drivers/scsi/aha1740.c target, *cmd, i, bufflen); target 479 drivers/scsi/aha1740.c outb(ATTN_START | (target & 7), ATTN(base)); /* Start it up */ target 106 drivers/scsi/aic7xxx/aic79xx.h #define BUILD_TCL_RAW(target, channel, lun) \ target 107 drivers/scsi/aic7xxx/aic79xx.h ((lun) | ((target) << 8)) target 117 drivers/scsi/aic7xxx/aic79xx.h #define AHD_BUILD_COL_IDX(target, lun) \ target 118 drivers/scsi/aic7xxx/aic79xx.h ((((u8)lun) << 4) | target) target 1289 drivers/scsi/aic7xxx/aic79xx.h u_int target; target 1376 drivers/scsi/aic7xxx/aic79xx.h int ahd_search_qinfifo(struct ahd_softc *ahd, int target, target 1380 drivers/scsi/aic7xxx/aic79xx.h int ahd_search_disc_list(struct ahd_softc *ahd, int target, target 1388 drivers/scsi/aic7xxx/aic79xx.h u_int our_id, u_int target, target 188 drivers/scsi/aic7xxx/aic79xx_core.c static int ahd_search_scb_list(struct ahd_softc *ahd, int target, target 232 drivers/scsi/aic7xxx/aic79xx_core.c static int ahd_abort_scbs(struct ahd_softc *ahd, int target, target 258 drivers/scsi/aic7xxx/aic79xx_core.c int target, char channel, int lun, target 1942 drivers/scsi/aic7xxx/aic79xx_core.c devinfo.target, target 1977 drivers/scsi/aic7xxx/aic79xx_core.c ahd_send_async(ahd, devinfo.channel, devinfo.target, target 2248 drivers/scsi/aic7xxx/aic79xx_core.c ahd_name(ahd), devinfo.channel, devinfo.target, target 3035 drivers/scsi/aic7xxx/aic79xx_core.c u_int target; target 3049 drivers/scsi/aic7xxx/aic79xx_core.c target = SCSIID_TARGET(ahd, saved_scsiid); target 3052 drivers/scsi/aic7xxx/aic79xx_core.c target, saved_lun, 'A', ROLE_INITIATOR); target 3103 drivers/scsi/aic7xxx/aic79xx_core.c found = ahd_abort_scbs(ahd, target, 'A', saved_lun, target 3118 drivers/scsi/aic7xxx/aic79xx_core.c && ahd_match_scb(ahd, scb, target, 'A', target 3148 drivers/scsi/aic7xxx/aic79xx_core.c devinfo.target, &tstate); target 3290 drivers/scsi/aic7xxx/aic79xx_core.c aborted = ahd_abort_scbs(ahd, target, 'A', target 3428 drivers/scsi/aic7xxx/aic79xx_core.c devinfo->target, target 3904 drivers/scsi/aic7xxx/aic79xx_core.c devinfo->target, &tstate); target 3933 drivers/scsi/aic7xxx/aic79xx_core.c ahd_send_async(ahd, devinfo->channel, devinfo->target, target 3941 drivers/scsi/aic7xxx/aic79xx_core.c ahd_name(ahd), devinfo->target, target 3971 drivers/scsi/aic7xxx/aic79xx_core.c ahd_name(ahd), devinfo->target, target 4041 drivers/scsi/aic7xxx/aic79xx_core.c devinfo->target, &tstate); target 4055 drivers/scsi/aic7xxx/aic79xx_core.c ahd_send_async(ahd, devinfo->channel, devinfo->target, target 4059 drivers/scsi/aic7xxx/aic79xx_core.c ahd_name(ahd), devinfo->target, target 4089 drivers/scsi/aic7xxx/aic79xx_core.c ahd_send_async(ahd, devinfo->channel, devinfo->target, target 4109 drivers/scsi/aic7xxx/aic79xx_core.c ahd_outb(ahd, NEGOADDR, devinfo->target); target 4241 drivers/scsi/aic7xxx/aic79xx_core.c devinfo.target, &tstate); target 4335 drivers/scsi/aic7xxx/aic79xx_core.c devinfo->target, devinfo->lun); target 4357 drivers/scsi/aic7xxx/aic79xx_core.c ahd_compile_devinfo(struct ahd_devinfo *devinfo, u_int our_id, u_int target, target 4361 drivers/scsi/aic7xxx/aic79xx_core.c devinfo->target = target; target 4363 drivers/scsi/aic7xxx/aic79xx_core.c devinfo->target_offset = target; target 4533 drivers/scsi/aic7xxx/aic79xx_core.c devinfo->target, &tstate); target 4622 drivers/scsi/aic7xxx/aic79xx_core.c ahd_name(ahd), devinfo->channel, devinfo->target, target 4640 drivers/scsi/aic7xxx/aic79xx_core.c ahd_name(ahd), devinfo->channel, devinfo->target, target 4670 drivers/scsi/aic7xxx/aic79xx_core.c devinfo->channel, devinfo->target, devinfo->lun, target 5108 drivers/scsi/aic7xxx/aic79xx_core.c devinfo->target, &tstate); target 5179 drivers/scsi/aic7xxx/aic79xx_core.c devinfo->target, devinfo->lun, target 5208 drivers/scsi/aic7xxx/aic79xx_core.c devinfo->target, devinfo->lun); target 5250 drivers/scsi/aic7xxx/aic79xx_core.c devinfo->target, devinfo->lun, target 5266 drivers/scsi/aic7xxx/aic79xx_core.c devinfo->target, devinfo->lun, target 5279 drivers/scsi/aic7xxx/aic79xx_core.c devinfo->target, devinfo->lun); target 5391 drivers/scsi/aic7xxx/aic79xx_core.c devinfo->target, devinfo->lun); target 5396 drivers/scsi/aic7xxx/aic79xx_core.c devinfo->target, devinfo->lun); target 5410 drivers/scsi/aic7xxx/aic79xx_core.c devinfo->target, devinfo->lun, target 5456 drivers/scsi/aic7xxx/aic79xx_core.c ahd_abort_scbs(ahd, devinfo->target, devinfo->channel, target 5533 drivers/scsi/aic7xxx/aic79xx_core.c devinfo->target, &tstate); target 5549 drivers/scsi/aic7xxx/aic79xx_core.c devinfo->target, devinfo->lun); target 5564 drivers/scsi/aic7xxx/aic79xx_core.c devinfo->target, devinfo->lun); target 5580 drivers/scsi/aic7xxx/aic79xx_core.c devinfo->channel, devinfo->target, devinfo->lun); target 5609 drivers/scsi/aic7xxx/aic79xx_core.c devinfo->target, devinfo->lun); target 5619 drivers/scsi/aic7xxx/aic79xx_core.c devinfo->channel, devinfo->target, devinfo->lun); target 5625 drivers/scsi/aic7xxx/aic79xx_core.c ahd_name(ahd), devinfo->channel, devinfo->target, target 5675 drivers/scsi/aic7xxx/aic79xx_core.c ahd_name(ahd), devinfo->channel, devinfo->target, target 5926 drivers/scsi/aic7xxx/aic79xx_core.c found = ahd_abort_scbs(ahd, devinfo->target, devinfo->channel, target 5971 drivers/scsi/aic7xxx/aic79xx_core.c ahd_send_async(ahd, devinfo->channel, devinfo->target, target 5976 drivers/scsi/aic7xxx/aic79xx_core.c message, devinfo->channel, devinfo->target, found); target 7246 drivers/scsi/aic7xxx/aic79xx_core.c u_int target; target 7409 drivers/scsi/aic7xxx/aic79xx_core.c for (target = 0; target < AHD_NUM_TARGETS; target++) { target 7410 drivers/scsi/aic7xxx/aic79xx_core.c ahd_outb(ahd, NEGOADDR, target); target 7416 drivers/scsi/aic7xxx/aic79xx_core.c for (target = 0; target < AHD_NUM_TARGETS; target++) { target 7422 drivers/scsi/aic7xxx/aic79xx_core.c target, &tstate); target 7424 drivers/scsi/aic7xxx/aic79xx_core.c target, CAM_LUN_WILDCARD, target 7511 drivers/scsi/aic7xxx/aic79xx_core.c for (target = 0; target < AHD_NUM_TARGETS; target++) { target 7515 drivers/scsi/aic7xxx/aic79xx_core.c ahd_unbusy_tcl(ahd, BUILD_TCL_RAW(target, 'A', lun)); target 7991 drivers/scsi/aic7xxx/aic79xx_core.c ahd_match_scb(struct ahd_softc *ahd, struct scb *scb, int target, target 8001 drivers/scsi/aic7xxx/aic79xx_core.c match = ((targ == target) || (target == CAM_TARGET_WILDCARD)); target 8029 drivers/scsi/aic7xxx/aic79xx_core.c int target; target 8033 drivers/scsi/aic7xxx/aic79xx_core.c target = SCB_GET_TARGET(ahd, scb); target 8037 drivers/scsi/aic7xxx/aic79xx_core.c ahd_search_qinfifo(ahd, target, channel, lun, target 8145 drivers/scsi/aic7xxx/aic79xx_core.c ahd_search_qinfifo(struct ahd_softc *ahd, int target, char channel, target 8208 drivers/scsi/aic7xxx/aic79xx_core.c if (ahd_match_scb(ahd, scb, target, channel, lun, tag, role)) { target 8281 drivers/scsi/aic7xxx/aic79xx_core.c if (ahd_match_scb(ahd, scb, target, channel, CAM_LUN_WILDCARD, target 8293 drivers/scsi/aic7xxx/aic79xx_core.c found += ahd_search_scb_list(ahd, target, channel, target 8302 drivers/scsi/aic7xxx/aic79xx_core.c && ahd_match_scb(ahd, mk_msg_scb, target, channel, target 8346 drivers/scsi/aic7xxx/aic79xx_core.c && ahd_match_scb(ahd, scb, target, channel, CAM_LUN_WILDCARD, target 8375 drivers/scsi/aic7xxx/aic79xx_core.c ahd_search_scb_list(struct ahd_softc *ahd, int target, char channel, target 8408 drivers/scsi/aic7xxx/aic79xx_core.c if (ahd_match_scb(ahd, scb, target, channel, target 8532 drivers/scsi/aic7xxx/aic79xx_core.c ahd_abort_scbs(struct ahd_softc *ahd, int target, char channel, target 8548 drivers/scsi/aic7xxx/aic79xx_core.c found = ahd_search_qinfifo(ahd, target, channel, lun, SCB_LIST_NULL, target 8556 drivers/scsi/aic7xxx/aic79xx_core.c if (target != CAM_TARGET_WILDCARD) { target 8557 drivers/scsi/aic7xxx/aic79xx_core.c i = target; target 8583 drivers/scsi/aic7xxx/aic79xx_core.c || ahd_match_scb(ahd, scbp, target, channel, target 8607 drivers/scsi/aic7xxx/aic79xx_core.c if (ahd_match_scb(ahd, scbp, target, channel, lun, tag, role)) { target 8622 drivers/scsi/aic7xxx/aic79xx_core.c ahd_platform_abort_scbs(ahd, target, channel, lun, tag, role, status); target 8662 drivers/scsi/aic7xxx/aic79xx_core.c u_int target; target 8778 drivers/scsi/aic7xxx/aic79xx_core.c for (target = 0; target <= max_scsiid; target++) { target 8782 drivers/scsi/aic7xxx/aic79xx_core.c tstate = ahd->enabled_targets[target]; target 8801 drivers/scsi/aic7xxx/aic79xx_core.c for (target = 0; target <= max_scsiid; target++) { target 8803 drivers/scsi/aic7xxx/aic79xx_core.c if (ahd->enabled_targets[target] == NULL) target 8808 drivers/scsi/aic7xxx/aic79xx_core.c ahd_compile_devinfo(&devinfo, target, initiator, target 9000 drivers/scsi/aic7xxx/aic79xx_core.c devinfo.target, target 10274 drivers/scsi/aic7xxx/aic79xx_core.c u_int target; target 10346 drivers/scsi/aic7xxx/aic79xx_core.c target = ccb->ccb_h.target_id; target 10349 drivers/scsi/aic7xxx/aic79xx_core.c target_mask = 0x01 << target; target 10379 drivers/scsi/aic7xxx/aic79xx_core.c if (target != CAM_TARGET_WILDCARD && tstate == NULL) { target 10380 drivers/scsi/aic7xxx/aic79xx_core.c tstate = ahd_alloc_tstate(ahd, target, channel); target 10410 drivers/scsi/aic7xxx/aic79xx_core.c if (target != CAM_TARGET_WILDCARD) { target 10432 drivers/scsi/aic7xxx/aic79xx_core.c if (target != our_id) { target 10443 drivers/scsi/aic7xxx/aic79xx_core.c ahd->our_id = target; target 10449 drivers/scsi/aic7xxx/aic79xx_core.c ahd_outb(ahd, SCSIID, target); target 10518 drivers/scsi/aic7xxx/aic79xx_core.c if (target != CAM_TARGET_WILDCARD) { target 10528 drivers/scsi/aic7xxx/aic79xx_core.c ahd_free_tstate(ahd, target, channel, target 10665 drivers/scsi/aic7xxx/aic79xx_core.c int target; target 10669 drivers/scsi/aic7xxx/aic79xx_core.c target = SCSIID_OUR_ID(cmd->scsiid); target 10673 drivers/scsi/aic7xxx/aic79xx_core.c tstate = ahd->enabled_targets[target]; target 10696 drivers/scsi/aic7xxx/aic79xx_core.c initiator, target, lun, target 10703 drivers/scsi/aic7xxx/aic79xx_core.c atio->ccb_h.target_id = target; target 10760 drivers/scsi/aic7xxx/aic79xx_core.c initiator, target, lun, ahd->pending_device); target 1486 drivers/scsi/aic7xxx/aic79xx_osm.c ahd_platform_abort_scbs(struct ahd_softc *ahd, int target, char channel, target 1542 drivers/scsi/aic7xxx/aic79xx_osm.c ahd_send_async(ahd, devinfo.channel, devinfo.target, target 1548 drivers/scsi/aic7xxx/aic79xx_osm.c ahd_send_async(ahd, devinfo.channel, devinfo.target, target 1687 drivers/scsi/aic7xxx/aic79xx_osm.c u_int target, u_int lun, ac_code code) target 1697 drivers/scsi/aic7xxx/aic79xx_osm.c BUG_ON(target == CAM_TARGET_WILDCARD); target 1700 drivers/scsi/aic7xxx/aic79xx_osm.c target, &tstate); target 1717 drivers/scsi/aic7xxx/aic79xx_osm.c starget = ahd->platform_data->starget[target]; target 1756 drivers/scsi/aic7xxx/aic79xx_osm.c channel - 'A', target); target 665 drivers/scsi/aic7xxx/aic79xx_osm.h int ahd_platform_abort_scbs(struct ahd_softc *ahd, int target, target 672 drivers/scsi/aic7xxx/aic79xx_osm.h u_int target, u_int lun, ac_code); target 1097 drivers/scsi/aic7xxx/aic7xxx.h u_int target; target 1152 drivers/scsi/aic7xxx/aic7xxx.h int target, char channel, int lun, target 1178 drivers/scsi/aic7xxx/aic7xxx.h int ahc_search_qinfifo(struct ahc_softc *ahc, int target, target 1184 drivers/scsi/aic7xxx/aic7xxx.h int target, char channel, target 1187 drivers/scsi/aic7xxx/aic7xxx.h int ahc_search_disc_list(struct ahc_softc *ahc, int target, target 1196 drivers/scsi/aic7xxx/aic7xxx.h u_int our_id, u_int target, target 261 drivers/scsi/aic7xxx/aic7xxx_core.c static int ahc_abort_scbs(struct ahc_softc *ahc, int target, target 1070 drivers/scsi/aic7xxx/aic7xxx_core.c devinfo.target, target 1160 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_name(ahc), devinfo.channel, devinfo.target); target 1195 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_name(ahc), devinfo.channel, devinfo.target, rejbyte); target 1216 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_name(ahc), devinfo.channel, devinfo.target, target 1227 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_name(ahc), devinfo.channel, devinfo.target, target 1450 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_name(ahc), devinfo.channel, devinfo.target, target 1778 drivers/scsi/aic7xxx/aic7xxx_core.c u_int target; target 1810 drivers/scsi/aic7xxx/aic7xxx_core.c target = SCSIID_TARGET(ahc, saved_scsiid); target 1814 drivers/scsi/aic7xxx/aic7xxx_core.c target, saved_lun, channel, ROLE_INITIATOR); target 1830 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_abort_scbs(ahc, target, channel, target 1845 drivers/scsi/aic7xxx/aic7xxx_core.c && ahc_match_scb(ahc, scb, target, channel, target 1854 drivers/scsi/aic7xxx/aic7xxx_core.c target, target 1875 drivers/scsi/aic7xxx/aic7xxx_core.c devinfo.target, target 1921 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_abort_scbs(ahc, target, channel, target 1973 drivers/scsi/aic7xxx/aic7xxx_core.c devinfo->target, target 2507 drivers/scsi/aic7xxx/aic7xxx_core.c devinfo->target, &tstate); target 2580 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_send_async(ahc, devinfo->channel, devinfo->target, target 2586 drivers/scsi/aic7xxx/aic7xxx_core.c devinfo->target, syncrate->rate, target 2592 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_name(ahc), devinfo->target); target 2625 drivers/scsi/aic7xxx/aic7xxx_core.c devinfo->target, &tstate); target 2650 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_send_async(ahc, devinfo->channel, devinfo->target, target 2654 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_name(ahc), devinfo->target, target 2675 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_send_async(ahc, devinfo->channel, devinfo->target, target 2707 drivers/scsi/aic7xxx/aic7xxx_core.c devinfo.target, &tstate); target 2813 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_compile_devinfo(struct ahc_devinfo *devinfo, u_int our_id, u_int target, target 2817 drivers/scsi/aic7xxx/aic7xxx_core.c devinfo->target = target; target 2819 drivers/scsi/aic7xxx/aic7xxx_core.c devinfo->target_offset = target; target 2831 drivers/scsi/aic7xxx/aic7xxx_core.c devinfo->target, devinfo->lun); target 2973 drivers/scsi/aic7xxx/aic7xxx_core.c devinfo->target, &tstate); target 3063 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_name(ahc), devinfo->channel, devinfo->target, target 3081 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_name(ahc), devinfo->channel, devinfo->target, target 3104 drivers/scsi/aic7xxx/aic7xxx_core.c devinfo->channel, devinfo->target, devinfo->lun, target 3590 drivers/scsi/aic7xxx/aic7xxx_core.c devinfo->target, &tstate); target 3665 drivers/scsi/aic7xxx/aic7xxx_core.c devinfo->target, devinfo->lun, target 3695 drivers/scsi/aic7xxx/aic7xxx_core.c devinfo->target, devinfo->lun); target 3737 drivers/scsi/aic7xxx/aic7xxx_core.c devinfo->target, devinfo->lun, target 3753 drivers/scsi/aic7xxx/aic7xxx_core.c devinfo->target, devinfo->lun, target 3766 drivers/scsi/aic7xxx/aic7xxx_core.c devinfo->target, devinfo->lun); target 3884 drivers/scsi/aic7xxx/aic7xxx_core.c devinfo->target, devinfo->lun); target 3889 drivers/scsi/aic7xxx/aic7xxx_core.c devinfo->target, devinfo->lun); target 3903 drivers/scsi/aic7xxx/aic7xxx_core.c devinfo->target, devinfo->lun, target 3949 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_abort_scbs(ahc, devinfo->target, devinfo->channel, target 4018 drivers/scsi/aic7xxx/aic7xxx_core.c devinfo->target, &tstate); target 4031 drivers/scsi/aic7xxx/aic7xxx_core.c devinfo->target, devinfo->lun); target 4046 drivers/scsi/aic7xxx/aic7xxx_core.c devinfo->channel, devinfo->target, devinfo->lun); target 4075 drivers/scsi/aic7xxx/aic7xxx_core.c devinfo->target, devinfo->lun); target 4085 drivers/scsi/aic7xxx/aic7xxx_core.c devinfo->channel, devinfo->target, devinfo->lun); target 4091 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_name(ahc), devinfo->channel, devinfo->target, target 4140 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_name(ahc), devinfo->channel, devinfo->target, target 4324 drivers/scsi/aic7xxx/aic7xxx_core.c found = ahc_abort_scbs(ahc, devinfo->target, devinfo->channel, target 4359 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_send_async(ahc, devinfo->channel, devinfo->target, target 4365 drivers/scsi/aic7xxx/aic7xxx_core.c message, devinfo->channel, devinfo->target, found); target 5722 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_match_scb(struct ahc_softc *ahc, struct scb *scb, int target, target 5732 drivers/scsi/aic7xxx/aic7xxx_core.c match = ((targ == target) || (target == CAM_TARGET_WILDCARD)); target 5760 drivers/scsi/aic7xxx/aic7xxx_core.c int target; target 5764 drivers/scsi/aic7xxx/aic7xxx_core.c target = SCB_GET_TARGET(ahc, scb); target 5768 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_search_qinfifo(ahc, target, channel, lun, target 5829 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_search_qinfifo(struct ahc_softc *ahc, int target, char channel, target 5878 drivers/scsi/aic7xxx/aic7xxx_core.c if (ahc_match_scb(ahc, scb, target, channel, lun, tag, role)) { target 5989 drivers/scsi/aic7xxx/aic7xxx_core.c if (ahc_match_scb(ahc, scb, target, channel, target 6029 drivers/scsi/aic7xxx/aic7xxx_core.c found += ahc_search_untagged_queues(ahc, /*ahc_io_ctx_t*/NULL, target, target 6039 drivers/scsi/aic7xxx/aic7xxx_core.c int target, char channel, int lun, uint32_t status, target 6060 drivers/scsi/aic7xxx/aic7xxx_core.c if (target != CAM_TARGET_WILDCARD) { target 6062 drivers/scsi/aic7xxx/aic7xxx_core.c i = target; target 6094 drivers/scsi/aic7xxx/aic7xxx_core.c if (ahc_match_scb(ahc, scb, target, channel, lun, target 6136 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_search_disc_list(struct ahc_softc *ahc, int target, char channel, target 6176 drivers/scsi/aic7xxx/aic7xxx_core.c if (ahc_match_scb(ahc, scbp, target, channel, lun, target 6299 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_abort_scbs(struct ahc_softc *ahc, int target, char channel, target 6321 drivers/scsi/aic7xxx/aic7xxx_core.c found = ahc_search_qinfifo(ahc, target, channel, lun, SCB_LIST_NULL, target 6329 drivers/scsi/aic7xxx/aic7xxx_core.c if (target != CAM_TARGET_WILDCARD) { target 6330 drivers/scsi/aic7xxx/aic7xxx_core.c i = target; target 6363 drivers/scsi/aic7xxx/aic7xxx_core.c || ahc_match_scb(ahc, scbp, target, channel, target 6376 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_search_disc_list(ahc, target, channel, lun, tag, target 6396 drivers/scsi/aic7xxx/aic7xxx_core.c && ahc_match_scb(ahc, scbp, target, channel, lun, tag, role))) target 6410 drivers/scsi/aic7xxx/aic7xxx_core.c if (ahc_match_scb(ahc, scbp, target, channel, lun, tag, role)) { target 6425 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_platform_abort_scbs(ahc, target, channel, lun, tag, role, status); target 6453 drivers/scsi/aic7xxx/aic7xxx_core.c u_int initiator, target, max_scsiid; target 6562 drivers/scsi/aic7xxx/aic7xxx_core.c for (target = 0; target <= max_scsiid; target++) { target 6566 drivers/scsi/aic7xxx/aic7xxx_core.c tstate = ahc->enabled_targets[target]; target 6589 drivers/scsi/aic7xxx/aic7xxx_core.c for (target = 0; target <= max_scsiid; target++) { target 6591 drivers/scsi/aic7xxx/aic7xxx_core.c if (ahc->enabled_targets[target] == NULL) target 6596 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_compile_devinfo(&devinfo, target, initiator, target 7141 drivers/scsi/aic7xxx/aic7xxx_core.c int target; target 7296 drivers/scsi/aic7xxx/aic7xxx_core.c for (target = 0; target <= maxtarget; target++) { target 7297 drivers/scsi/aic7xxx/aic7xxx_core.c untagged_q = &ahc->untagged_queues[target]; target 7300 drivers/scsi/aic7xxx/aic7xxx_core.c printk("Untagged Q(%d): ", target); target 7367 drivers/scsi/aic7xxx/aic7xxx_core.c u_int target; target 7484 drivers/scsi/aic7xxx/aic7xxx_core.c target = ccb->ccb_h.target_id; target 7487 drivers/scsi/aic7xxx/aic7xxx_core.c target_mask = 0x01 << target; target 7517 drivers/scsi/aic7xxx/aic7xxx_core.c if (target != CAM_TARGET_WILDCARD && tstate == NULL) { target 7518 drivers/scsi/aic7xxx/aic7xxx_core.c tstate = ahc_alloc_tstate(ahc, target, channel); target 7548 drivers/scsi/aic7xxx/aic7xxx_core.c if (target != CAM_TARGET_WILDCARD) { target 7574 drivers/scsi/aic7xxx/aic7xxx_core.c if (target != our_id) { target 7586 drivers/scsi/aic7xxx/aic7xxx_core.c ahc->our_id = target; target 7588 drivers/scsi/aic7xxx/aic7xxx_core.c ahc->our_id_b = target; target 7594 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_outb(ahc, SCSIID, target); target 7663 drivers/scsi/aic7xxx/aic7xxx_core.c if (target != CAM_TARGET_WILDCARD) { target 7673 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_free_tstate(ahc, target, channel, target 7832 drivers/scsi/aic7xxx/aic7xxx_core.c int target; target 7836 drivers/scsi/aic7xxx/aic7xxx_core.c target = SCSIID_OUR_ID(cmd->scsiid); target 7840 drivers/scsi/aic7xxx/aic7xxx_core.c tstate = ahc->enabled_targets[target]; target 7864 drivers/scsi/aic7xxx/aic7xxx_core.c initiator, target, lun, target 7871 drivers/scsi/aic7xxx/aic7xxx_core.c atio->ccb_h.target_id = target; target 7927 drivers/scsi/aic7xxx/aic7xxx_core.c initiator, target, lun, ahc->pending_device); target 1350 drivers/scsi/aic7xxx/aic7xxx_osm.c ahc_platform_abort_scbs(struct ahc_softc *ahc, int target, char channel, target 1407 drivers/scsi/aic7xxx/aic7xxx_osm.c ahc_send_async(ahc, devinfo.channel, devinfo.target, target 1413 drivers/scsi/aic7xxx/aic7xxx_osm.c ahc_send_async(ahc, devinfo.channel, devinfo.target, target 1605 drivers/scsi/aic7xxx/aic7xxx_osm.c u_int target, u_int lun, ac_code code) target 1617 drivers/scsi/aic7xxx/aic7xxx_osm.c BUG_ON(target == CAM_TARGET_WILDCARD); target 1622 drivers/scsi/aic7xxx/aic7xxx_osm.c target, &tstate); target 1639 drivers/scsi/aic7xxx/aic7xxx_osm.c target_offset = target; target 1672 drivers/scsi/aic7xxx/aic7xxx_osm.c channel - 'A', target); target 681 drivers/scsi/aic7xxx/aic7xxx_osm.h int ahc_platform_abort_scbs(struct ahc_softc *ahc, int target, target 689 drivers/scsi/aic7xxx/aic7xxx_osm.h u_int target, u_int lun, ac_code); target 790 drivers/scsi/arcmsr/arcmsr_hba.c int target, lun; target 835 drivers/scsi/arcmsr/arcmsr_hba.c for (target = 0; target < ARCMSR_MAX_TARGETID - 1; target 836 drivers/scsi/arcmsr/arcmsr_hba.c target++) { target 846 drivers/scsi/arcmsr/arcmsr_hba.c 0, target, lun); target 850 drivers/scsi/arcmsr/arcmsr_hba.c 0, target, lun); target 1532 drivers/scsi/arcmsr/arcmsr_hba.c int target, lun, i; target 1545 drivers/scsi/arcmsr/arcmsr_hba.c for (target = 0; target < ARCMSR_MAX_TARGETID; target++) { target 1551 drivers/scsi/arcmsr/arcmsr_hba.c 0, target, lun); target 3094 drivers/scsi/arcmsr/arcmsr_hba.c int target = cmd->device->id; target 3104 drivers/scsi/arcmsr/arcmsr_hba.c if (target == 16) { target 495 drivers/scsi/arm/acornscsi.c acornscsi_dumplogline(AS_Host *host, int target, int line) target 500 drivers/scsi/arm/acornscsi.c ptr = host->status_ptr[target] - STATUS_BUFFER_TO_PRINT; target 504 drivers/scsi/arm/acornscsi.c printk("%c: %3s:", target == 8 ? 'H' : '0' + target, target 507 drivers/scsi/arm/acornscsi.c prev = host->status[target][ptr].when; target 509 drivers/scsi/arm/acornscsi.c for (; ptr != host->status_ptr[target]; ptr = (ptr + 1) & (STATUS_BUFFER_SIZE - 1)) { target 512 drivers/scsi/arm/acornscsi.c if (!host->status[target][ptr].when) target 517 drivers/scsi/arm/acornscsi.c printk("%c%02X", host->status[target][ptr].irq ? '-' : ' ', target 518 drivers/scsi/arm/acornscsi.c host->status[target][ptr].ph); target 522 drivers/scsi/arm/acornscsi.c printk(" %02X", host->status[target][ptr].ssr); target 526 drivers/scsi/arm/acornscsi.c time_diff = host->status[target][ptr].when - prev; target 527 drivers/scsi/arm/acornscsi.c prev = host->status[target][ptr].when; target 542 drivers/scsi/arm/acornscsi.c void acornscsi_dumplog(AS_Host *host, int target) target 545 drivers/scsi/arm/acornscsi.c acornscsi_dumplogline(host, target, 0); target 546 drivers/scsi/arm/acornscsi.c acornscsi_dumplogline(host, target, 1); target 547 drivers/scsi/arm/acornscsi.c acornscsi_dumplogline(host, target, 2); target 549 drivers/scsi/arm/acornscsi.c if (target == 8) target 552 drivers/scsi/arm/acornscsi.c target = 8; target 1767 drivers/scsi/arm/acornscsi.c unsigned int target, lun, ok = 0; target 1769 drivers/scsi/arm/acornscsi.c target = sbic_arm_read(host, SBIC_SOURCEID); target 1771 drivers/scsi/arm/acornscsi.c if (!(target & 8)) target 1776 drivers/scsi/arm/acornscsi.c target &= 7; target 1781 drivers/scsi/arm/acornscsi.c host->host->host_no, target, host->SCpnt->device->id); target 1787 drivers/scsi/arm/acornscsi.c host->scsi.reconnected.target = target; target 1792 drivers/scsi/arm/acornscsi.c host->SCpnt->device->id == target && host->SCpnt->device->lun == lun) target 1795 drivers/scsi/arm/acornscsi.c if (!ok && queue_probetgtlun(&host->queues.disconnected, target, lun)) target 1798 drivers/scsi/arm/acornscsi.c ADD_STATUS(target, 0x81, host->scsi.phase, 0); target 1806 drivers/scsi/arm/acornscsi.c host->host->host_no, '0' + target); target 1807 drivers/scsi/arm/acornscsi.c acornscsi_dumplog(host, target); target 1829 drivers/scsi/arm/acornscsi.c if (host->SCpnt->device->id == host->scsi.reconnected.target && target 1848 drivers/scsi/arm/acornscsi.c host->scsi.reconnected.target, target 2384 drivers/scsi/arm/acornscsi.c clear_bit(host->scsi.reconnected.target * 8 + host->scsi.reconnected.lun, target 287 drivers/scsi/arm/acornscsi.h unsigned char target; /* reconnected target */ target 290 drivers/scsi/arm/fas216.c fas216_do_log(FAS216_Info *info, char target, char *fmt, va_list ap) target 295 drivers/scsi/arm/fas216.c printk("scsi%d.%c: %s", info->host->host_no, target, buf); target 314 drivers/scsi/arm/fas216.c fas216_log_target(FAS216_Info *info, int level, int target, char *fmt, ...) target 321 drivers/scsi/arm/fas216.c if (target < 0) target 322 drivers/scsi/arm/fas216.c target = 'H'; target 324 drivers/scsi/arm/fas216.c target += '0'; target 327 drivers/scsi/arm/fas216.c fas216_do_log(info, target, fmt, args); target 468 drivers/scsi/arm/fas216.c static void fas216_set_sync(FAS216_Info *info, int target) target 472 drivers/scsi/arm/fas216.c fas216_writeb(info, REG_SOF, info->device[target].sof); target 473 drivers/scsi/arm/fas216.c fas216_writeb(info, REG_STP, info->device[target].stp); target 476 drivers/scsi/arm/fas216.c if (info->device[target].period >= (200 / 4)) target 942 drivers/scsi/arm/fas216.c unsigned char target, lun, tag; target 969 drivers/scsi/arm/fas216.c target = msg[0] & ~(1 << info->host->this_id); target 970 drivers/scsi/arm/fas216.c target = ffs(target) - 1; target 982 drivers/scsi/arm/fas216.c fas216_writeb(info, REG_SDID, target); target 983 drivers/scsi/arm/fas216.c fas216_set_sync(info, target); target 987 drivers/scsi/arm/fas216.c target, lun, tag); target 991 drivers/scsi/arm/fas216.c if (info->SCpnt->device->id == target && target 1003 drivers/scsi/arm/fas216.c target, lun, tag); target 1750 drivers/scsi/arm/fas216.c static int parity_test(FAS216_Info *info, int target) target 1753 drivers/scsi/arm/fas216.c if (target == 3) { target 1754 drivers/scsi/arm/fas216.c info->device[target].parity_check = 0; target 1758 drivers/scsi/arm/fas216.c return info->device[target].parity_check; target 2481 drivers/scsi/arm/fas216.c int i, res = FAILED, target = SCpnt->device->id; target 2483 drivers/scsi/arm/fas216.c fas216_log(info, LOG_ERROR, "device reset for target %d", target); target 2504 drivers/scsi/arm/fas216.c queue_remove_all_target(&info->queues.issue, target); target 2505 drivers/scsi/arm/fas216.c queue_remove_all_target(&info->queues.disconnected, target); target 2506 drivers/scsi/arm/fas216.c if (info->origSCpnt && info->origSCpnt->device->id == target) target 2508 drivers/scsi/arm/fas216.c if (info->reqSCpnt && info->reqSCpnt->device->id == target) target 2511 drivers/scsi/arm/fas216.c clear_bit(target * 8 + i, info->busyluns); target 75 drivers/scsi/arm/fas216.h #define BUSID(target) ((target) & 7) target 206 drivers/scsi/arm/queue.c struct scsi_cmnd *queue_remove_tgtluntag(Queue_t *queue, int target, int lun, target 216 drivers/scsi/arm/queue.c if (q->SCpnt->device->id == target && q->SCpnt->device->lun == lun && target 234 drivers/scsi/arm/queue.c void queue_remove_all_target(Queue_t *queue, int target) target 242 drivers/scsi/arm/queue.c if (q->SCpnt->device->id == target) target 257 drivers/scsi/arm/queue.c int queue_probetgtlun (Queue_t *queue, int target, int lun) target 266 drivers/scsi/arm/queue.c if (q->SCpnt->device->id == target && q->SCpnt->device->lun == lun) { target 72 drivers/scsi/arm/queue.h extern struct scsi_cmnd *queue_remove_tgtluntag(Queue_t *queue, int target, target 82 drivers/scsi/arm/queue.h extern void queue_remove_all_target(Queue_t *queue, int target); target 93 drivers/scsi/arm/queue.h extern int queue_probetgtlun (Queue_t *queue, int target, int lun); target 1017 drivers/scsi/bfa/bfa_defs_svc.h u32 target; target 494 drivers/scsi/bfa/bfa_fc.h u32 target:1; target 509 drivers/scsi/bfa/bfa_fc.h u32 target:1; target 418 drivers/scsi/bfa/bfa_fcbuild.c if (prli->parampage.servparams.target != 1) target 2172 drivers/scsi/bfa/bfa_fcs_rport.c if (prli->parampage.servparams.target) { target 669 drivers/scsi/bfa/bfad_bsg.c iocmd->target = drv_itnim->scsi_tgt_id; target 351 drivers/scsi/bfa/bfad_bsg.h u32 target; target 209 drivers/scsi/dc395x.c struct NVRamTarget target[DC395x_MAX_SCSI_ID]; target 683 drivers/scsi/dc395x.c eeprom->target[id].cfg0 = target 687 drivers/scsi/dc395x.c eeprom->target[id].period = target 1181 drivers/scsi/dc395x.c dcb->dev_mode = eeprom->target[dcb->target_id].cfg0; target 1182 drivers/scsi/dc395x.c period_index = eeprom->target[dcb->target_id].period & 0x07; target 3623 drivers/scsi/dc395x.c u8 target, u8 lun) target 3626 drivers/scsi/dc395x.c u8 period_index = eeprom->target[target].period & 0x07; target 3630 drivers/scsi/dc395x.c dprintkdbg(DBG_0, "device_alloc: <%02i-%i>\n", target, lun); target 3639 drivers/scsi/dc395x.c dcb->target_id = target; target 3641 drivers/scsi/dc395x.c dcb->dev_mode = eeprom->target[target].cfg0; target 4090 drivers/scsi/dc395x.c for (d_addr = 0, d_eeprom = (u32 *)eeprom->target; target 4130 drivers/scsi/dc395x.c eeprom->target[0].period, target 4131 drivers/scsi/dc395x.c clock_speed[eeprom->target[0].period] / 10, target 4132 drivers/scsi/dc395x.c clock_speed[eeprom->target[0].period] % 10, target 4133 drivers/scsi/dc395x.c eeprom->target[0].cfg0); target 703 drivers/scsi/esas2r/esas2r_int.c u16 target, u32 length) target 705 drivers/scsi/esas2r/esas2r_int.c struct esas2r_target *t = a->targetdb + target; target 774 drivers/scsi/esas2r/esas2r_int.c u16 target; target 823 drivers/scsi/esas2r/esas2r_int.c target = ae->lu.id.tgtlun.wtarget_id; target 825 drivers/scsi/esas2r/esas2r_int.c if (target < ESAS2R_MAX_TARGETS) target 826 drivers/scsi/esas2r/esas2r_int.c esas2r_lun_event(a, ae, target, length); target 240 drivers/scsi/esp_scsi.c esp->target[i].esp_config3 = val; target 301 drivers/scsi/esp_scsi.c esp->prev_cfg3 = esp->target[0].esp_config3; target 314 drivers/scsi/esp_scsi.c u8 cfg3 = esp->target[0].esp_config3; target 321 drivers/scsi/esp_scsi.c u32 cfg3 = esp->target[0].esp_config3; target 326 drivers/scsi/esp_scsi.c esp->prev_cfg3 = esp->target[0].esp_config3; target 342 drivers/scsi/esp_scsi.c (esp->target[0].esp_config3 | target 344 drivers/scsi/esp_scsi.c esp->prev_cfg3 = esp->target[0].esp_config3; target 495 drivers/scsi/esp_scsi.c u8 val = esp->target[tgt].esp_config3; target 506 drivers/scsi/esp_scsi.c u8 off = esp->target[tgt].esp_offset; target 507 drivers/scsi/esp_scsi.c u8 per = esp->target[tgt].esp_period; target 551 drivers/scsi/esp_scsi.c struct scsi_target *target = tp->starget; target 553 drivers/scsi/esp_scsi.c return spi_width(target) != tp->nego_goal_width; target 558 drivers/scsi/esp_scsi.c struct scsi_target *target = tp->starget; target 561 drivers/scsi/esp_scsi.c if (!spi_offset(target) && !tp->nego_goal_offset) target 564 drivers/scsi/esp_scsi.c if (spi_offset(target) == tp->nego_goal_offset && target 565 drivers/scsi/esp_scsi.c spi_period(target) == tp->nego_goal_period) target 760 drivers/scsi/esp_scsi.c tp = &esp->target[tgt]; target 1148 drivers/scsi/esp_scsi.c int target, lun; target 1155 drivers/scsi/esp_scsi.c target = esp->fifo[0]; target 1172 drivers/scsi/esp_scsi.c target = ffs(bits) - 1; target 1189 drivers/scsi/esp_scsi.c esp_write_tgt_sync(esp, target); target 1190 drivers/scsi/esp_scsi.c esp_write_tgt_config3(esp, target); target 1195 drivers/scsi/esp_scsi.c esp_write8(target | ESP_BUSID_RESELID | ESP_BUSID_CTR32BIT, target 1198 drivers/scsi/esp_scsi.c tp = &esp->target[target]; target 1203 drivers/scsi/esp_scsi.c target, lun); target 1251 drivers/scsi/esp_scsi.c struct esp_target_data *tp = &esp->target[cmd->device->id]; target 1286 drivers/scsi/esp_scsi.c esp->target[dev->id].flags |= ESP_TGT_CHECK_NEGO; target 1462 drivers/scsi/esp_scsi.c tp = &esp->target[tgt]; target 1601 drivers/scsi/esp_scsi.c tp = &esp->target[tgt]; target 2084 drivers/scsi/esp_scsi.c struct esp_target_data *tp = &esp->target[i]; target 2277 drivers/scsi/esp_scsi.c esp->target[i].flags = 0; target 2278 drivers/scsi/esp_scsi.c esp->target[i].nego_goal_period = 0; target 2279 drivers/scsi/esp_scsi.c esp->target[i].nego_goal_offset = 0; target 2280 drivers/scsi/esp_scsi.c esp->target[i].nego_goal_width = 0; target 2281 drivers/scsi/esp_scsi.c esp->target[i].nego_goal_tags = 0; target 2436 drivers/scsi/esp_scsi.c struct esp_target_data *tp = &esp->target[starget->id]; target 2446 drivers/scsi/esp_scsi.c struct esp_target_data *tp = &esp->target[starget->id]; target 2454 drivers/scsi/esp_scsi.c struct esp_target_data *tp = &esp->target[dev->id]; target 2476 drivers/scsi/esp_scsi.c struct esp_target_data *tp = &esp->target[dev->id]; target 2704 drivers/scsi/esp_scsi.c static void esp_set_offset(struct scsi_target *target, int offset) target 2706 drivers/scsi/esp_scsi.c struct Scsi_Host *host = dev_to_shost(target->dev.parent); target 2708 drivers/scsi/esp_scsi.c struct esp_target_data *tp = &esp->target[target->id]; target 2717 drivers/scsi/esp_scsi.c static void esp_set_period(struct scsi_target *target, int period) target 2719 drivers/scsi/esp_scsi.c struct Scsi_Host *host = dev_to_shost(target->dev.parent); target 2721 drivers/scsi/esp_scsi.c struct esp_target_data *tp = &esp->target[target->id]; target 2727 drivers/scsi/esp_scsi.c static void esp_set_width(struct scsi_target *target, int width) target 2729 drivers/scsi/esp_scsi.c struct Scsi_Host *host = dev_to_shost(target->dev.parent); target 2731 drivers/scsi/esp_scsi.c struct esp_target_data *tp = &esp->target[target->id]; target 451 drivers/scsi/esp_scsi.h struct esp_target_data target[ESP_MAX_TARGET]; target 177 drivers/scsi/fdomain.c static int fdomain_select(struct Scsi_Host *sh, int target) target 184 drivers/scsi/fdomain.c outb(BIT(sh->this_id) | BIT(target), fd->base + REG_SCSI_DATA_NOACK); target 768 drivers/scsi/fnic/fcpio.h u8 target; target 1125 drivers/scsi/gdth.c cmd_ptr->u.raw64.target = (u8)p3; target 1130 drivers/scsi/gdth.c cmd_ptr->u.raw.target = (u8)p3; target 2196 drivers/scsi/gdth.c cmdp->u.raw64.target = t; target 2212 drivers/scsi/gdth.c cmdp->u.raw.target = t; target 3593 drivers/scsi/gdth.c gen->command.u.raw64.target = gen->command.u.raw.target; target 3721 drivers/scsi/gdth.c rsc->hdr_list[i].target = i; target 3801 drivers/scsi/gdth.c rsc->hdr_list[i].target = i; target 91 drivers/scsi/gdth_ioctl.h u8 target; /* target ID */ target 110 drivers/scsi/gdth_ioctl.h u8 target; /* target ID */ target 238 drivers/scsi/gdth_ioctl.h u8 target; /* target ID */ target 834 drivers/scsi/hpsa.c hdev->bus, hdev->target, hdev->lun, target 1186 drivers/scsi/hpsa.c unsigned char scsi3addr[], int bus, int *target, int *lun) target 1197 drivers/scsi/hpsa.c if (h->dev[i]->bus == bus && h->dev[i]->target != -1) target 1198 drivers/scsi/hpsa.c __set_bit(h->dev[i]->target, lun_taken); target 1204 drivers/scsi/hpsa.c *target = i; target 1255 drivers/scsi/hpsa.c h->scsi_host->host_no, dev->bus, dev->target, dev->lun, target 1295 drivers/scsi/hpsa.c device->bus, &device->target, &device->lun) != 0) target 1317 drivers/scsi/hpsa.c device->target = sd->target; target 1412 drivers/scsi/hpsa.c if (new_entry->target == -1) { target 1413 drivers/scsi/hpsa.c new_entry->target = h->dev[entry]->target; target 1607 drivers/scsi/hpsa.c sd->bus, sd->target, sd->lun); target 1615 drivers/scsi/hpsa.c sd->bus, sd->target, sd->lun); target 1621 drivers/scsi/hpsa.c sd->bus, sd->target, sd->lun); target 1627 drivers/scsi/hpsa.c sd->bus, sd->target, sd->lun); target 1633 drivers/scsi/hpsa.c sd->bus, sd->target, sd->lun); target 1639 drivers/scsi/hpsa.c sd->bus, sd->target, sd->lun); target 1645 drivers/scsi/hpsa.c sd->bus, sd->target, sd->lun); target 1651 drivers/scsi/hpsa.c sd->bus, sd->target, sd->lun); target 1657 drivers/scsi/hpsa.c sd->bus, sd->target, sd->lun); target 1663 drivers/scsi/hpsa.c sd->bus, sd->target, sd->lun); target 1669 drivers/scsi/hpsa.c sd->bus, sd->target, sd->lun); target 1675 drivers/scsi/hpsa.c sd->bus, sd->target, sd->lun); target 1740 drivers/scsi/hpsa.c logical_drive->target, logical_drive->lun); target 1807 drivers/scsi/hpsa.c device->target, device->lun); target 1865 drivers/scsi/hpsa.c device->bus, device->target, device->lun, cmds); target 1885 drivers/scsi/hpsa.c device->target, device->lun); target 2081 drivers/scsi/hpsa.c int bus, int target, int lun) target 2088 drivers/scsi/hpsa.c if (sd->bus == bus && sd->target == target && sd->lun == lun) target 2110 drivers/scsi/hpsa.c sd->target = sdev_id(sdev); target 3446 drivers/scsi/hpsa.c if (encl_dev->target == -1 || encl_dev->lun == -1) { target 3798 drivers/scsi/hpsa.c int bus, int target, int lun) target 3801 drivers/scsi/hpsa.c device->target = target; target 3961 drivers/scsi/hpsa.c this_device->bus, this_device->target, target 3985 drivers/scsi/hpsa.c this_device->target, this_device->lun); target 63 drivers/scsi/hpsa.h int bus, target, lun; /* as presented to the OS */ target 1050 drivers/scsi/hptiop.c req->target = scp->device->id; target 208 drivers/scsi/hptiop.h u8 target; target 226 drivers/scsi/hptiop.h u8 target; target 1278 drivers/scsi/ibmvscsi_tgt/ibmvscsi_tgt.c iue = srp_iu_get(&vscsi->target); target 1520 drivers/scsi/ibmvscsi_tgt/ibmvscsi_tgt.c iue->target); target 1639 drivers/scsi/ibmvscsi_tgt/ibmvscsi_tgt.c iue->target); target 3208 drivers/scsi/ibmvscsi_tgt/ibmvscsi_tgt.c struct srp_target *target = iue->target; target 3209 drivers/scsi/ibmvscsi_tgt/ibmvscsi_tgt.c struct scsi_info *vscsi = target->ldata; target 3486 drivers/scsi/ibmvscsi_tgt/ibmvscsi_tgt.c rc = srp_target_alloc(&vscsi->target, &vdev->dev, vscsi->request_limit, target 3491 drivers/scsi/ibmvscsi_tgt/ibmvscsi_tgt.c vscsi->target.ldata = vscsi; target 3591 drivers/scsi/ibmvscsi_tgt/ibmvscsi_tgt.c srp_target_free(&vscsi->target); target 3624 drivers/scsi/ibmvscsi_tgt/ibmvscsi_tgt.c srp_target_free(&vscsi->target); target 296 drivers/scsi/ibmvscsi_tgt/ibmvscsi_tgt.h struct srp_target target; target 104 drivers/scsi/ibmvscsi_tgt/libsrp.c int srp_target_alloc(struct srp_target *target, struct device *dev, target 109 drivers/scsi/ibmvscsi_tgt/libsrp.c spin_lock_init(&target->lock); target 111 drivers/scsi/ibmvscsi_tgt/libsrp.c target->dev = dev; target 113 drivers/scsi/ibmvscsi_tgt/libsrp.c target->srp_iu_size = iu_size; target 114 drivers/scsi/ibmvscsi_tgt/libsrp.c target->rx_ring_size = nr; target 115 drivers/scsi/ibmvscsi_tgt/libsrp.c target->rx_ring = srp_ring_alloc(target->dev, nr, iu_size); target 116 drivers/scsi/ibmvscsi_tgt/libsrp.c if (!target->rx_ring) target 118 drivers/scsi/ibmvscsi_tgt/libsrp.c err = srp_iu_pool_alloc(&target->iu_queue, nr, target->rx_ring); target 122 drivers/scsi/ibmvscsi_tgt/libsrp.c dev_set_drvdata(target->dev, target); target 126 drivers/scsi/ibmvscsi_tgt/libsrp.c srp_ring_free(target->dev, target->rx_ring, nr, iu_size); target 130 drivers/scsi/ibmvscsi_tgt/libsrp.c void srp_target_free(struct srp_target *target) target 132 drivers/scsi/ibmvscsi_tgt/libsrp.c dev_set_drvdata(target->dev, NULL); target 133 drivers/scsi/ibmvscsi_tgt/libsrp.c srp_ring_free(target->dev, target->rx_ring, target->rx_ring_size, target 134 drivers/scsi/ibmvscsi_tgt/libsrp.c target->srp_iu_size); target 135 drivers/scsi/ibmvscsi_tgt/libsrp.c srp_iu_pool_free(&target->iu_queue); target 138 drivers/scsi/ibmvscsi_tgt/libsrp.c struct iu_entry *srp_iu_get(struct srp_target *target) target 142 drivers/scsi/ibmvscsi_tgt/libsrp.c if (kfifo_out_locked(&target->iu_queue.queue, (void *)&iue, target 144 drivers/scsi/ibmvscsi_tgt/libsrp.c &target->iu_queue.lock) != sizeof(void *)) { target 150 drivers/scsi/ibmvscsi_tgt/libsrp.c iue->target = target; target 157 drivers/scsi/ibmvscsi_tgt/libsrp.c kfifo_in_locked(&iue->target->iu_queue.queue, (void *)&iue, target 158 drivers/scsi/ibmvscsi_tgt/libsrp.c sizeof(void *), &iue->target->iu_queue.lock); target 172 drivers/scsi/ibmvscsi_tgt/libsrp.c nsg = dma_map_sg(iue->target->dev, sg, cmd->se_cmd.t_data_nents, target 187 drivers/scsi/ibmvscsi_tgt/libsrp.c dma_unmap_sg(iue->target->dev, sg, nsg, DMA_BIDIRECTIONAL); target 218 drivers/scsi/ibmvscsi_tgt/libsrp.c md = dma_alloc_coherent(iue->target->dev, target 243 drivers/scsi/ibmvscsi_tgt/libsrp.c nsg = dma_map_sg(iue->target->dev, sg, cmd->se_cmd.t_data_nents, target 259 drivers/scsi/ibmvscsi_tgt/libsrp.c dma_unmap_sg(iue->target->dev, sg, nsg, DMA_BIDIRECTIONAL); target 263 drivers/scsi/ibmvscsi_tgt/libsrp.c dma_free_coherent(iue->target->dev, target 98 drivers/scsi/ibmvscsi_tgt/libsrp.h struct srp_target *target; target 541 drivers/scsi/imm.c static int imm_select(imm_struct *dev, int target) target 564 drivers/scsi/imm.c w_dtr(ppb, 0x80 | (1 << target)); target 748 drivers/scsi/initio.c if ((host->act_tags[first->target] == 0) && target 749 drivers/scsi/initio.c !(host->targets[first->target].flags & TCF_BUSY)) target 752 drivers/scsi/initio.c if ((host->act_tags[first->target] >= target 753 drivers/scsi/initio.c host->max_tags[first->target]) | target 754 drivers/scsi/initio.c (host->targets[first->target].flags & TCF_BUSY)) { target 799 drivers/scsi/initio.c host->act_tags[scbp->target]++; target 801 drivers/scsi/initio.c host->targets[scbp->target].flags |= TCF_BUSY; target 824 drivers/scsi/initio.c host->act_tags[tmp->target]--; target 826 drivers/scsi/initio.c host->targets[tmp->target].flags &= ~TCF_BUSY; target 856 drivers/scsi/initio.c host->act_tags[tmp->target]--; target 858 drivers/scsi/initio.c host->targets[tmp->target].flags &= ~TCF_BUSY; target 875 drivers/scsi/initio.c scbp_tarlun = (tmp->lun << 8) | (tmp->target); target 975 drivers/scsi/initio.c host->act_tags[tmp->target]--; target 1081 drivers/scsi/initio.c host->max_tags[scb->target] = target 1082 drivers/scsi/initio.c host->act_tags[scb->target] - 1; target 1183 drivers/scsi/initio.c outb((host->scsi_id << 4) | (scb->target & 0x0F), target 1186 drivers/scsi/initio.c active_tc = &host->targets[scb->target]; target 1948 drivers/scsi/initio.c if (scb->target != tar || scb->lun != lun) { target 2310 drivers/scsi/initio.c host->active_tc = &host->targets[scb->target]; target 2325 drivers/scsi/initio.c host->active_tc = &host->targets[scb->target]; target 2342 drivers/scsi/initio.c host->active_tc = &host->targets[scb->target]; target 2368 drivers/scsi/initio.c tar = scb->target; /* target */ target 2375 drivers/scsi/initio.c if (tmp->target == tar) { target 2545 drivers/scsi/initio.c cblk->target = cmnd->device->id; target 369 drivers/scsi/initio.h u8 target; /*26 Target Id */ target 1214 drivers/scsi/ipr.c res->target = gscsi_res->target; target 1219 drivers/scsi/ipr.c res->target = find_first_zero_bit(ioa_cfg->target_ids, target 1221 drivers/scsi/ipr.c set_bit(res->target, ioa_cfg->target_ids); target 1225 drivers/scsi/ipr.c res->target = 0; target 1228 drivers/scsi/ipr.c res->target = find_first_zero_bit(ioa_cfg->array_ids, target 1230 drivers/scsi/ipr.c set_bit(res->target, ioa_cfg->array_ids); target 1233 drivers/scsi/ipr.c res->target = find_first_zero_bit(ioa_cfg->vset_ids, target 1235 drivers/scsi/ipr.c set_bit(res->target, ioa_cfg->vset_ids); target 1237 drivers/scsi/ipr.c res->target = find_first_zero_bit(ioa_cfg->target_ids, target 1239 drivers/scsi/ipr.c set_bit(res->target, ioa_cfg->target_ids); target 1251 drivers/scsi/ipr.c res->target = cfgtew->u.cfgte->res_addr.target; target 1279 drivers/scsi/ipr.c res->target == cfgtew->u.cfgte->res_addr.target && target 1408 drivers/scsi/ipr.c clear_bit(res->target, ioa_cfg->array_ids); target 1410 drivers/scsi/ipr.c clear_bit(res->target, ioa_cfg->vset_ids); target 1415 drivers/scsi/ipr.c clear_bit(res->target, ioa_cfg->target_ids); target 1418 drivers/scsi/ipr.c clear_bit(res->target, ioa_cfg->target_ids); target 1842 drivers/scsi/ipr.c error->last_func_vset_res_addr.target, target 1893 drivers/scsi/ipr.c error->last_func_vset_res_addr.target, target 3331 drivers/scsi/ipr.c u8 bus, target, lun; target 3367 drivers/scsi/ipr.c target = res->target; target 3371 drivers/scsi/ipr.c scsi_add_device(ioa_cfg->host, bus, target, lun); target 4565 drivers/scsi/ipr.c res->bus, res->target, res->lun); target 4780 drivers/scsi/ipr.c (res->target == starget->id)) { target 4885 drivers/scsi/ipr.c (res->target == sdev->id) && target 6881 drivers/scsi/ipr.c scsi_report_device_reset(ioa_cfg->host, res->bus, res->target); target 332 drivers/scsi/ipr.h u8 target; target 335 drivers/scsi/ipr.h (((res_addr).bus << 16) | ((res_addr).target << 8) | (res_addr).lun) target 1298 drivers/scsi/ipr.h u32 target; /* AKA id */ target 1306 drivers/scsi/ipr.h (((res)->bus << 24) | ((res)->target << 8) | (res)->lun) target 1782 drivers/scsi/ipr.h #define ipr_res_printk(level, ioa_cfg, bus, target, lun, fmt, ...) \ target 1784 drivers/scsi/ipr.h bus, target, lun, ##__VA_ARGS__) target 1787 drivers/scsi/ipr.h ipr_res_printk(KERN_ERR, ioa_cfg, (res)->bus, (res)->target, (res)->lun, fmt, ##__VA_ARGS__) target 1791 drivers/scsi/ipr.h (ra).bus, (ra).target, (ra).lun, ##__VA_ARGS__) target 1803 drivers/scsi/ipr.h (res).bus, (res).target, (res).lun); \ target 1953 drivers/scsi/ipr.h (res_addr->target < (IPR_MAX_NUM_TARGETS_PER_BUS - 1))) target 83 drivers/scsi/iscsi_boot_sysfs.c iscsi_boot_rd_attr(tgt_name, target-name, ISCSI_BOOT_TGT_NAME); target 6908 drivers/scsi/lpfc/lpfc_attr.c (rdata->target) ? cast rdata->target->field : 0); \ target 528 drivers/scsi/megaraid.c int target = 0; target 552 drivers/scsi/megaraid.c target = cmd->device->id; target 559 drivers/scsi/megaraid.c if( target == 0 ) { target 560 drivers/scsi/megaraid.c target = adapter->boot_pdrv_tgt; target 562 drivers/scsi/megaraid.c else if( target == adapter->boot_pdrv_tgt ) { target 563 drivers/scsi/megaraid.c target = 0; target 575 drivers/scsi/megaraid.c target = cmd->device->id; target 909 drivers/scsi/megaraid.c channel, target); target 919 drivers/scsi/megaraid.c channel, target); target 950 drivers/scsi/megaraid.c int channel, int target) target 966 drivers/scsi/megaraid.c pthru->target = (adapter->flag & BOARD_40LD) ? target 967 drivers/scsi/megaraid.c (channel << 4) | target : target; target 1015 drivers/scsi/megaraid.c int channel, int target) target 1030 drivers/scsi/megaraid.c epthru->target = (adapter->flag & BOARD_40LD) ? target 1031 drivers/scsi/megaraid.c (channel << 4) | target : target; target 4035 drivers/scsi/megaraid.c pthru->target = (adapter->flag & BOARD_40LD) ? (ch << 4)|tgt : tgt; target 133 drivers/scsi/megaraid.h u8 target; /* if islogical == 0 */ target 159 drivers/scsi/megaraid.h u8 target; /* if islogical == 0 */ target 397 drivers/scsi/megaraid.h u8 target; target 233 drivers/scsi/megaraid/mbox_defs.h uint8_t target; target 287 drivers/scsi/megaraid/mbox_defs.h uint8_t target; target 590 drivers/scsi/megaraid/mbox_defs.h uint8_t target; target 227 drivers/scsi/megaraid/mega_common.h #define MRAID_GET_DEVICE_MAP(adp, scp, p_chan, target, islogical) \ target 238 drivers/scsi/megaraid/mega_common.h target = \ target 244 drivers/scsi/megaraid/mega_common.h target = ((adp)->device_ids[SCP2CHANNEL(scp)] \ target 1484 drivers/scsi/megaraid/megaraid_mbox.c int target; target 1500 drivers/scsi/megaraid/megaraid_mbox.c MRAID_GET_DEVICE_MAP(adapter, scp, channel, target, islogical); target 1525 drivers/scsi/megaraid/megaraid_mbox.c scb->dev_target = target; target 1534 drivers/scsi/megaraid/megaraid_mbox.c ccb->raw_mbox[3] = target; target 1595 drivers/scsi/megaraid/megaraid_mbox.c if ((target % 0x80) >= MAX_LOGICAL_DRIVES_40LD) { target 1610 drivers/scsi/megaraid/megaraid_mbox.c scb->dev_target = target; target 1619 drivers/scsi/megaraid/megaraid_mbox.c pthru->logdrv = target; target 1654 drivers/scsi/megaraid/megaraid_mbox.c scb->dev_target = target; target 1657 drivers/scsi/megaraid/megaraid_mbox.c mbox->logdrv = target; target 1751 drivers/scsi/megaraid/megaraid_mbox.c scb->dev_target = target; target 1756 drivers/scsi/megaraid/megaraid_mbox.c ccb->raw_mbox[3] = target; target 1769 drivers/scsi/megaraid/megaraid_mbox.c if (target > 15 || SCP2LUN(scp) > 7) { target 1777 drivers/scsi/megaraid/megaraid_mbox.c if (rdev->fast_load && (target == 15) && target 1819 drivers/scsi/megaraid/megaraid_mbox.c scb->dev_target = target; target 1941 drivers/scsi/megaraid/megaraid_mbox.c uint8_t target; target 1946 drivers/scsi/megaraid/megaraid_mbox.c target = scb->dev_target; target 1953 drivers/scsi/megaraid/megaraid_mbox.c pthru->target = (channel << 4) | target; target 1990 drivers/scsi/megaraid/megaraid_mbox.c uint8_t target; target 1995 drivers/scsi/megaraid/megaraid_mbox.c target = scb->dev_target; target 2002 drivers/scsi/megaraid/megaraid_mbox.c epthru->target = (channel << 4) | target; target 249 drivers/scsi/myrb.c mbox->type3D.target = sdev->id; target 323 drivers/scsi/myrb.c ev_buf->channel, ev_buf->target, target 328 drivers/scsi/myrb.c ev_buf->channel, ev_buf->target, target 792 drivers/scsi/myrb.c mbox->type3D.target = sdev->id; target 1294 drivers/scsi/myrb.c dcdb->target = sdev->id; target 1978 drivers/scsi/myrb.c mbox->type3D.target = sdev->id; target 201 drivers/scsi/myrb.h unsigned char target; target 379 drivers/scsi/myrb.h unsigned char target:5; /* Byte 2 Bits 0-4 */ target 531 drivers/scsi/myrb.h unsigned target:4; /* Byte 0 Bits 0-3 */ target 609 drivers/scsi/myrb.h unsigned char target; /* Byte 3 */ target 308 drivers/scsi/myrs.c unsigned char channel, unsigned char target, unsigned char lun, target 331 drivers/scsi/myrs.c mbox->pdev_info.pdev.target = target; target 339 drivers/scsi/myrs.c channel, target, lun); target 377 drivers/scsi/myrs.c unsigned char channel, unsigned char target, unsigned char lun, target 401 drivers/scsi/myrs.c mbox->pdev_info.pdev.target = target; target 855 drivers/scsi/myrs.c ev->target, 0); target 902 drivers/scsi/myrs.c ev->ev_seq, ev->channel, ev->target, ev_msg); target 905 drivers/scsi/myrs.c ev->channel, ev->target, target 909 drivers/scsi/myrs.c ev->channel, ev->target, target 920 drivers/scsi/myrs.c ev->ev_seq, ev->target, msg_buf); target 1645 drivers/scsi/myrs.c mbox->SCSI_10.pdev.target = ldev_info->target; target 1651 drivers/scsi/myrs.c mbox->SCSI_10.pdev.target = sdev->id; target 1691 drivers/scsi/myrs.c mbox->SCSI_255.pdev.target = ldev_info->target; target 1697 drivers/scsi/myrs.c mbox->SCSI_255.pdev.target = sdev->id; target 1823 drivers/scsi/myrs.c ldev_info->channel, ldev_info->target, target 408 drivers/scsi/myrs.h unsigned char target; /* Byte 2 */ target 490 drivers/scsi/myrs.h unsigned char target; /* Byte 2 */ target 579 drivers/scsi/myrs.h unsigned char target; /* Byte 14 */ target 618 drivers/scsi/myrs.h unsigned char target; /* Byte 1 */ target 787 drivers/scsi/ncr53c8xx.c static int device_queue_depth(int unit, int target, int lun) target 805 drivers/scsi/ncr53c8xx.c if (t != target) target 806 drivers/scsi/ncr53c8xx.c t = (target == v) ? v : NO_TARGET; target 815 drivers/scsi/ncr53c8xx.c (t == ALL_TARGETS || t == target) && target 1126 drivers/scsi/ncr53c8xx.c u_long target; target 1566 drivers/scsi/ncr53c8xx.c u_char target; target 1642 drivers/scsi/ncr53c8xx.c struct tcb target[MAX_TARGET]; /* Target data */ target 3963 drivers/scsi/ncr53c8xx.c struct tcb *tp = &np->target[i]; target 3970 drivers/scsi/ncr53c8xx.c np->target[i].usrflag = UF_NODISC; target 4053 drivers/scsi/ncr53c8xx.c struct tcb *tp = &np->target[cp->target]; target 4113 drivers/scsi/ncr53c8xx.c struct tcb *tp = &np->target[sdev->id]; target 4700 drivers/scsi/ncr53c8xx.c int target, lun; target 4771 drivers/scsi/ncr53c8xx.c for (target = 0; target < MAX_TARGET ; target++) { target 4772 drivers/scsi/ncr53c8xx.c tp=&np->target[target]; target 4834 drivers/scsi/ncr53c8xx.c tp = &np->target[cmd->device->id]; target 5085 drivers/scsi/ncr53c8xx.c struct tcb *tp = &np->target[cp->target]; target 5317 drivers/scsi/ncr53c8xx.c struct tcb *tp = &np->target[i]; target 5486 drivers/scsi/ncr53c8xx.c static void ncr_set_sync_wide_status (struct ncb *np, u_char target) target 5489 drivers/scsi/ncr53c8xx.c struct tcb *tp = &np->target[target]; target 5504 drivers/scsi/ncr53c8xx.c if (scmd_id(cp->cmd) != target) continue; target 5525 drivers/scsi/ncr53c8xx.c u_char target = INB (nc_sdid) & 0x0f; target 5528 drivers/scsi/ncr53c8xx.c BUG_ON(target != (scmd_id(cmd) & 0xf)); target 5530 drivers/scsi/ncr53c8xx.c tp = &np->target[target]; target 5565 drivers/scsi/ncr53c8xx.c ncr_set_sync_wide_status(np, target); target 5581 drivers/scsi/ncr53c8xx.c u16 target = INB (nc_sdid) & 0x0f; target 5586 drivers/scsi/ncr53c8xx.c BUG_ON(target != (scmd_id(cmd) & 0xf)); target 5588 drivers/scsi/ncr53c8xx.c tp = &np->target[target]; target 5613 drivers/scsi/ncr53c8xx.c ncr_set_sync_wide_status(np, target); target 5626 drivers/scsi/ncr53c8xx.c struct tcb *tp = &np->target[tn]; target 6535 drivers/scsi/ncr53c8xx.c struct tcb *tp = &np->target[cmd->device->id]; target 6690 drivers/scsi/ncr53c8xx.c u_char target = INB (nc_sdid) & 0x0f; target 6691 drivers/scsi/ncr53c8xx.c struct tcb *tp = &np->target[target]; target 6727 drivers/scsi/ncr53c8xx.c ncr_name (np), target, num); target 6732 drivers/scsi/ncr53c8xx.c ncr_name (np), target, num); target 7139 drivers/scsi/ncr53c8xx.c struct tcb *tp = &np->target[tn]; target 7227 drivers/scsi/ncr53c8xx.c cp->target = tn; target 7248 drivers/scsi/ncr53c8xx.c struct tcb *tp = &np->target[cp->target]; target 7349 drivers/scsi/ncr53c8xx.c struct tcb *tp = &np->target[tn]; target 7398 drivers/scsi/ncr53c8xx.c struct tcb *tp = &np->target[tn]; target 7485 drivers/scsi/ncr53c8xx.c struct tcb *tp = &np->target[tn]; target 7576 drivers/scsi/ncr53c8xx.c struct tcb *tp = &np->target[tn]; target 7959 drivers/scsi/ncr53c8xx.c struct tcb *tp = &np->target[device->id]; target 7969 drivers/scsi/ncr53c8xx.c struct tcb *tp = &np->target[device->id]; target 8531 drivers/scsi/ncr53c8xx.c struct tcb *tp = &np->target[starget->id]; target 8547 drivers/scsi/ncr53c8xx.c struct tcb *tp = &np->target[starget->id]; target 8563 drivers/scsi/ncr53c8xx.c struct tcb *tp = &np->target[starget->id]; target 447 drivers/scsi/nsp32.c unsigned char target = scmd_id(SCpnt); target 515 drivers/scsi/nsp32.c param->target_id = BIT(host_id) | BIT(target); target 578 drivers/scsi/nsp32.c unsigned char target = scmd_id(SCpnt); target 625 drivers/scsi/nsp32.c nsp32_write1(base, SCSI_OUT_LATCH_TARGET_ID, BIT(host_id) | BIT(target)); target 835 drivers/scsi/nsp32.c data->cur_target = &(data->target[newid]); target 900 drivers/scsi/nsp32.c nsp32_target *target; target 974 drivers/scsi/nsp32.c target = &data->target[scmd_id(SCpnt)]; target 975 drivers/scsi/nsp32.c data->cur_target = target; target 977 drivers/scsi/nsp32.c if (!(target->sync_flag & (SDTR_DONE | SDTR_INITIATOR | SDTR_TARGET))) { target 981 drivers/scsi/nsp32.c nsp32_set_max_sync(data, target, &period, &offset); target 983 drivers/scsi/nsp32.c target->sync_flag |= SDTR_INITIATOR; target 985 drivers/scsi/nsp32.c nsp32_set_async(data, target); target 986 drivers/scsi/nsp32.c target->sync_flag |= SDTR_DONE; target 991 drivers/scsi/nsp32.c target->limit_entry, period, offset); target 992 drivers/scsi/nsp32.c } else if (target->sync_flag & SDTR_INITIATOR) { target 998 drivers/scsi/nsp32.c nsp32_set_async(data, target); target 999 drivers/scsi/nsp32.c target->sync_flag &= ~SDTR_INITIATOR; target 1000 drivers/scsi/nsp32.c target->sync_flag |= SDTR_DONE; target 1004 drivers/scsi/nsp32.c } else if (target->sync_flag & SDTR_TARGET) { target 1010 drivers/scsi/nsp32.c nsp32_set_async(data, target); target 1011 drivers/scsi/nsp32.c target->sync_flag &= ~SDTR_TARGET; target 1012 drivers/scsi/nsp32.c target->sync_flag |= SDTR_DONE; target 1020 drivers/scsi/nsp32.c SCpnt->device->id, target->sync_flag, target->syncreg, target 1021 drivers/scsi/nsp32.c target->ackwidth); target 1469 drivers/scsi/nsp32.c for (id = 0; id < ARRAY_SIZE(data->target); id++) { target 1478 drivers/scsi/nsp32.c if (data->target[id].sync_flag == SDTR_DONE) { target 1479 drivers/scsi/nsp32.c if (data->target[id].period == 0 && target 1480 drivers/scsi/nsp32.c data->target[id].offset == ASYNC_OFFSET ) { target 1489 drivers/scsi/nsp32.c if (data->target[id].period != 0) { target 1491 drivers/scsi/nsp32.c speed = 1000000 / (data->target[id].period * 4); target 1496 drivers/scsi/nsp32.c data->target[id].offset target 2221 drivers/scsi/nsp32.c nsp32_target *target = data->cur_target; target 2240 drivers/scsi/nsp32.c if (target->sync_flag & SDTR_INITIATOR) { target 2247 drivers/scsi/nsp32.c target->sync_flag &= ~SDTR_INITIATOR; target 2248 drivers/scsi/nsp32.c target->sync_flag |= SDTR_DONE; target 2287 drivers/scsi/nsp32.c entry = nsp32_search_period_entry(data, target, get_period); target 2300 drivers/scsi/nsp32.c nsp32_set_sync_entry(data, target, entry, get_offset); target 2305 drivers/scsi/nsp32.c target->sync_flag |= SDTR_INITIATOR; target 2318 drivers/scsi/nsp32.c entry = nsp32_search_period_entry(data, target, get_period); target 2321 drivers/scsi/nsp32.c nsp32_set_async(data, target); target 2324 drivers/scsi/nsp32.c nsp32_set_sync_entry(data, target, entry, get_offset); target 2329 drivers/scsi/nsp32.c target->period = get_period; target 2341 drivers/scsi/nsp32.c nsp32_set_async(data, target); /* set as ASYNC transfer mode */ target 2343 drivers/scsi/nsp32.c target->period = 0; target 2354 drivers/scsi/nsp32.c nsp32_target *target, target 2359 drivers/scsi/nsp32.c if (target->limit_entry >= data->syncnum) { target 2361 drivers/scsi/nsp32.c target->limit_entry = 0; target 2364 drivers/scsi/nsp32.c for (i = target->limit_entry; i < data->syncnum; i++) { target 2386 drivers/scsi/nsp32.c static void nsp32_set_async(nsp32_hw_data *data, nsp32_target *target) target 2388 drivers/scsi/nsp32.c unsigned char period = data->synct[target->limit_entry].period_num; target 2390 drivers/scsi/nsp32.c target->offset = ASYNC_OFFSET; target 2391 drivers/scsi/nsp32.c target->period = 0; target 2392 drivers/scsi/nsp32.c target->syncreg = TO_SYNCREG(period, ASYNC_OFFSET); target 2393 drivers/scsi/nsp32.c target->ackwidth = 0; target 2394 drivers/scsi/nsp32.c target->sample_reg = 0; target 2404 drivers/scsi/nsp32.c nsp32_target *target, target 2410 drivers/scsi/nsp32.c period_num = data->synct[target->limit_entry].period_num; target 2411 drivers/scsi/nsp32.c *period = data->synct[target->limit_entry].start_period; target 2412 drivers/scsi/nsp32.c ackwidth = data->synct[target->limit_entry].ackwidth; target 2415 drivers/scsi/nsp32.c target->syncreg = TO_SYNCREG(period_num, *offset); target 2416 drivers/scsi/nsp32.c target->ackwidth = ackwidth; target 2417 drivers/scsi/nsp32.c target->offset = *offset; target 2418 drivers/scsi/nsp32.c target->sample_reg = 0; /* disable SREQ sampling */ target 2426 drivers/scsi/nsp32.c nsp32_target *target, target 2436 drivers/scsi/nsp32.c target->syncreg = TO_SYNCREG(period, offset); target 2437 drivers/scsi/nsp32.c target->ackwidth = ackwidth; target 2438 drivers/scsi/nsp32.c target->offset = offset; target 2439 drivers/scsi/nsp32.c target->sample_reg = sample_rate | SAMPLING_ENABLE; target 2676 drivers/scsi/nsp32.c for (i = 0; i < ARRAY_SIZE(data->target); i++) { target 2677 drivers/scsi/nsp32.c nsp32_target *target = &(data->target[i]); target 2679 drivers/scsi/nsp32.c target->limit_entry = 0; target 2680 drivers/scsi/nsp32.c target->sync_flag = 0; target 2681 drivers/scsi/nsp32.c nsp32_set_async(data, target); target 2858 drivers/scsi/nsp32.c for (i = 0; i < ARRAY_SIZE(data->target); i++) { target 2859 drivers/scsi/nsp32.c nsp32_target *target = &data->target[i]; target 2861 drivers/scsi/nsp32.c target->sync_flag = 0; target 2862 drivers/scsi/nsp32.c nsp32_set_async(data, target); target 2986 drivers/scsi/nsp32.c nsp32_target *target; target 3031 drivers/scsi/nsp32.c target = &data->target[i]; target 3033 drivers/scsi/nsp32.c target->limit_entry = 0; /* set as ULTRA20M */ target 3036 drivers/scsi/nsp32.c entry = nsp32_search_period_entry(data, target, ret); target 3041 drivers/scsi/nsp32.c target->limit_entry = entry; target 3072 drivers/scsi/nsp32.c nsp32_target *target; target 3086 drivers/scsi/nsp32.c target = &data->target[i]; target 3105 drivers/scsi/nsp32.c entry = nsp32_search_period_entry(data, target, val); target 3110 drivers/scsi/nsp32.c target->limit_entry = entry; target 577 drivers/scsi/nsp32.h nsp32_target target[MAX_TARGET]; /* SCSI ID */ target 192 drivers/scsi/pcmcia/nsp_cs.c unsigned char target = scmd_id(SCpnt); target 198 drivers/scsi/pcmcia/nsp_cs.c SCpnt, target, SCpnt->device->lun, scsi_sglist(SCpnt), target 368 drivers/scsi/pcmcia/nsp_cs.c unsigned char target = scmd_id(SCpnt); target 405 drivers/scsi/pcmcia/nsp_cs.c nsp_index_write(base, SCSIDATALATCH, BIT(host_id) | BIT(target)); target 447 drivers/scsi/pcmcia/nsp_cs.c unsigned char target = scmd_id(SCpnt); target 450 drivers/scsi/pcmcia/nsp_cs.c sync_data *sync = &(data->Sync[target]); target 658 drivers/scsi/pcmcia/nsp_cs.c int target; target 664 drivers/scsi/pcmcia/nsp_cs.c target = 0; target 670 drivers/scsi/pcmcia/nsp_cs.c target++; target 673 drivers/scsi/pcmcia/nsp_cs.c if (scmd_id(SCpnt) != target) { target 674 drivers/scsi/pcmcia/nsp_cs.c nsp_msg(KERN_ERR, "XXX: reselect ID must be %d in this implementation.", target); target 916 drivers/scsi/pcmcia/nsp_cs.c unsigned char target = scmd_id(SCpnt); target 919 drivers/scsi/pcmcia/nsp_cs.c sync_data *sync = &(data->Sync[target]); target 960 drivers/scsi/pcmcia/nsp_cs.c unsigned char target, lun; target 1054 drivers/scsi/pcmcia/nsp_cs.c target = tmpSC->device->id; target 1056 drivers/scsi/pcmcia/nsp_cs.c sync_neg = &(data->Sync[target].SyncNegotiation); target 1228 drivers/scsi/pcmcia/nsp_cs.c data->Sync[target].SyncPeriod = 0; target 1229 drivers/scsi/pcmcia/nsp_cs.c data->Sync[target].SyncOffset = 0; target 1264 drivers/scsi/pcmcia/nsp_cs.c data->Sync[target].SyncPeriod = data->MsgBuffer[3]; target 1265 drivers/scsi/pcmcia/nsp_cs.c data->Sync[target].SyncOffset = data->MsgBuffer[4]; target 1269 drivers/scsi/pcmcia/nsp_cs.c data->Sync[target].SyncPeriod = 0; target 1270 drivers/scsi/pcmcia/nsp_cs.c data->Sync[target].SyncOffset = 0; target 141 drivers/scsi/pmcraid.c u8 target, bus, lun; target 161 drivers/scsi/pmcraid.c target = temp->cfg_entry.unique_flags1; target 163 drivers/scsi/pmcraid.c target = le16_to_cpu(temp->cfg_entry.array_id) & 0xFF; target 165 drivers/scsi/pmcraid.c if (target > PMCRAID_MAX_VSET_TARGETS) target 170 drivers/scsi/pmcraid.c target = RES_TARGET(temp->cfg_entry.resource_address); target 178 drivers/scsi/pmcraid.c target == scsi_dev->id && target 4298 drivers/scsi/pmcraid.c u8 bus, target, lun; target 4353 drivers/scsi/pmcraid.c target = res->cfg_entry.unique_flags1; target 4355 drivers/scsi/pmcraid.c target = le16_to_cpu(res->cfg_entry.array_id) & 0xFF; target 4359 drivers/scsi/pmcraid.c target = target 4368 drivers/scsi/pmcraid.c scsi_add_device(pinstance->host, bus, target, lun); target 823 drivers/scsi/pmcraid.h u8 target; target 419 drivers/scsi/ppa.c static int ppa_select(ppa_struct *dev, int target) target 436 drivers/scsi/ppa.c w_dtr(ppb, (1 << target)); target 632 drivers/scsi/ppa.c printk(KERN_DEBUG "ppa: no device at SCSI ID %i\n", cmd->device->target); target 924 drivers/scsi/qedi/qedi_main.c block->target[index].target_name.byte); target 930 drivers/scsi/qedi/qedi_main.c block->target[index].ipv6_addr.byte); target 933 drivers/scsi/qedi/qedi_main.c block->target[index].ipv4_addr.byte); target 950 drivers/scsi/qedi/qedi_main.c pri_ctrl_flags = !!(block->target[0].ctrl_flags & target 959 drivers/scsi/qedi/qedi_main.c sec_ctrl_flags = !!(block->target[1].ctrl_flags & target 2128 drivers/scsi/qedi/qedi_main.c ctrl_flags = block->target[idx].ctrl_flags & target 2147 drivers/scsi/qedi/qedi_main.c mchap_name = mchap_en ? block->target[idx].chap_name.byte : NULL; target 2148 drivers/scsi/qedi/qedi_main.c mchap_secret = mchap_en ? block->target[idx].chap_password.byte : NULL; target 2153 drivers/scsi/qedi/qedi_main.c block->target[idx].target_name.byte); target 2158 drivers/scsi/qedi/qedi_main.c block->target[idx].ipv6_addr.byte); target 2161 drivers/scsi/qedi/qedi_main.c block->target[idx].ipv4_addr.byte); target 2165 drivers/scsi/qedi/qedi_main.c GET_FIELD2(block->target[idx].generic_cont0, target 2170 drivers/scsi/qedi/qedi_main.c block->target[idx].lun.value[1], target 2171 drivers/scsi/qedi/qedi_main.c block->target[idx].lun.value[0]); target 183 drivers/scsi/qedi/qedi_nvm_iscsi_cfg.h struct nvm_iscsi_target target[NUM_OF_ISCSI_TARGET_PER_PF]; target 774 drivers/scsi/qla1280.c qla1280_wait_for_pending_commands(struct scsi_qla_host *ha, int bus, int target) target 794 drivers/scsi/qla1280.c if (target >= 0 && SCSI_TCN_32(cmd) != target) target 823 drivers/scsi/qla1280.c int bus, target, lun; target 836 drivers/scsi/qla1280.c target = SCSI_TCN_32(cmd); target 872 drivers/scsi/qla1280.c target, lun); target 893 drivers/scsi/qla1280.c "command.\n", ha->host_no, bus, target, lun); target 894 drivers/scsi/qla1280.c if (qla1280_device_reset(ha, bus, target) == 0) { target 897 drivers/scsi/qla1280.c wait_for_target = target; target 945 drivers/scsi/qla1280.c ha->host_no, bus, target, lun); target 1115 drivers/scsi/qla1280.c qla1280_set_target_parameters(struct scsi_qla_host *ha, int bus, int target) target 1128 drivers/scsi/qla1280.c mb[1] = (uint16_t)((bus ? target | BIT_7 : target) << 8); target 1129 drivers/scsi/qla1280.c mb[2] = nv->bus[bus].target[target].parameter.renegotiate_on_error << 8; target 1130 drivers/scsi/qla1280.c mb[2] |= nv->bus[bus].target[target].parameter.stop_queue_on_check << 9; target 1131 drivers/scsi/qla1280.c mb[2] |= nv->bus[bus].target[target].parameter.auto_request_sense << 10; target 1132 drivers/scsi/qla1280.c mb[2] |= nv->bus[bus].target[target].parameter.tag_queuing << 11; target 1133 drivers/scsi/qla1280.c mb[2] |= nv->bus[bus].target[target].parameter.enable_sync << 12; target 1134 drivers/scsi/qla1280.c mb[2] |= nv->bus[bus].target[target].parameter.enable_wide << 13; target 1135 drivers/scsi/qla1280.c mb[2] |= nv->bus[bus].target[target].parameter.parity_checking << 14; target 1136 drivers/scsi/qla1280.c mb[2] |= nv->bus[bus].target[target].parameter.disconnect_allowed << 15; target 1139 drivers/scsi/qla1280.c mb[2] |= nv->bus[bus].target[target].ppr_1x160.flags.enable_ppr << 5; target 1140 drivers/scsi/qla1280.c mb[3] = (nv->bus[bus].target[target].flags.flags1x160.sync_offset << 8); target 1141 drivers/scsi/qla1280.c mb[6] = (nv->bus[bus].target[target].ppr_1x160.flags.ppr_options << 8) | target 1142 drivers/scsi/qla1280.c nv->bus[bus].target[target].ppr_1x160.flags.ppr_bus_width; target 1145 drivers/scsi/qla1280.c mb[3] = (nv->bus[bus].target[target].flags.flags1x80.sync_offset << 8); target 1147 drivers/scsi/qla1280.c mb[3] |= nv->bus[bus].target[target].sync_period; target 1154 drivers/scsi/qla1280.c mb[1] = (uint16_t)((bus ? target | BIT_7 : target) << 8); target 1157 drivers/scsi/qla1280.c mb[3] = nv->bus[bus].target[target].execution_throttle; target 1164 drivers/scsi/qla1280.c ha->host_no, bus, target); target 1186 drivers/scsi/qla1280.c int target = device->id; target 1198 drivers/scsi/qla1280.c (ha->bus_settings[bus].qtag_enables & (BIT_0 << target))) { target 1204 drivers/scsi/qla1280.c nv->bus[bus].target[target].parameter.enable_sync = device->sdtr; target 1205 drivers/scsi/qla1280.c nv->bus[bus].target[target].parameter.enable_wide = device->wdtr; target 1206 drivers/scsi/qla1280.c nv->bus[bus].target[target].ppr_1x160.flags.enable_ppr = device->ppr; target 1210 drivers/scsi/qla1280.c (~driver_setup.sync_mask & (1 << target)))) target 1211 drivers/scsi/qla1280.c nv->bus[bus].target[target].parameter.enable_sync = 0; target 1214 drivers/scsi/qla1280.c (~driver_setup.wide_mask & (1 << target)))) target 1215 drivers/scsi/qla1280.c nv->bus[bus].target[target].parameter.enable_wide = 0; target 1219 drivers/scsi/qla1280.c (~driver_setup.ppr_mask & (1 << target)))) target 1220 drivers/scsi/qla1280.c nv->bus[bus].target[target].ppr_1x160.flags.enable_ppr = 0; target 1224 drivers/scsi/qla1280.c if (nv->bus[bus].target[target].parameter.enable_sync) target 1225 drivers/scsi/qla1280.c status = qla1280_set_target_parameters(ha, bus, target); target 1244 drivers/scsi/qla1280.c int bus, target, lun; target 1258 drivers/scsi/qla1280.c target = SCSI_TCN_32(cmd); target 1265 drivers/scsi/qla1280.c qla1280_marker(ha, bus, target, 0, MK_SYNC_ID); target 1978 drivers/scsi/qla1280.c qla1280_set_target_defaults(struct scsi_qla_host *ha, int bus, int target) target 1982 drivers/scsi/qla1280.c nv->bus[bus].target[target].parameter.renegotiate_on_error = 1; target 1983 drivers/scsi/qla1280.c nv->bus[bus].target[target].parameter.auto_request_sense = 1; target 1984 drivers/scsi/qla1280.c nv->bus[bus].target[target].parameter.tag_queuing = 1; target 1985 drivers/scsi/qla1280.c nv->bus[bus].target[target].parameter.enable_sync = 1; target 1987 drivers/scsi/qla1280.c nv->bus[bus].target[target].parameter.enable_wide = 1; target 1989 drivers/scsi/qla1280.c nv->bus[bus].target[target].execution_throttle = target 1991 drivers/scsi/qla1280.c nv->bus[bus].target[target].parameter.parity_checking = 1; target 1992 drivers/scsi/qla1280.c nv->bus[bus].target[target].parameter.disconnect_allowed = 1; target 1995 drivers/scsi/qla1280.c nv->bus[bus].target[target].flags.flags1x160.device_enable = 1; target 1996 drivers/scsi/qla1280.c nv->bus[bus].target[target].flags.flags1x160.sync_offset = 0x0e; target 1997 drivers/scsi/qla1280.c nv->bus[bus].target[target].sync_period = 9; target 1998 drivers/scsi/qla1280.c nv->bus[bus].target[target].ppr_1x160.flags.enable_ppr = 1; target 1999 drivers/scsi/qla1280.c nv->bus[bus].target[target].ppr_1x160.flags.ppr_options = 2; target 2000 drivers/scsi/qla1280.c nv->bus[bus].target[target].ppr_1x160.flags.ppr_bus_width = 1; target 2002 drivers/scsi/qla1280.c nv->bus[bus].target[target].flags.flags1x80.device_enable = 1; target 2003 drivers/scsi/qla1280.c nv->bus[bus].target[target].flags.flags1x80.sync_offset = 12; target 2004 drivers/scsi/qla1280.c nv->bus[bus].target[target].sync_period = 10; target 2012 drivers/scsi/qla1280.c int bus, target; target 2057 drivers/scsi/qla1280.c for (target = 0; target < MAX_TARGETS; target++) target 2058 drivers/scsi/qla1280.c qla1280_set_target_defaults(ha, bus, target); target 2063 drivers/scsi/qla1280.c qla1280_config_target(struct scsi_qla_host *ha, int bus, int target) target 2072 drivers/scsi/qla1280.c mb[1] = (uint16_t)((bus ? target | BIT_7 : target) << 8); target 2083 drivers/scsi/qla1280.c mb[3] = nv->bus[bus].target[target].flags.flags1x160.sync_offset << 8; target 2085 drivers/scsi/qla1280.c mb[3] = nv->bus[bus].target[target].flags.flags1x80.sync_offset << 8; target 2086 drivers/scsi/qla1280.c mb[3] |= nv->bus[bus].target[target].sync_period; target 2090 drivers/scsi/qla1280.c flag = (BIT_0 << target); target 2091 drivers/scsi/qla1280.c if (nv->bus[bus].target[target].parameter.tag_queuing) target 2096 drivers/scsi/qla1280.c if (nv->bus[bus].target[target].flags.flags1x160.device_enable) target 2100 drivers/scsi/qla1280.c if (nv->bus[bus].target[target].flags.flags1x80.device_enable) target 2103 drivers/scsi/qla1280.c if (nv->bus[bus].target[target].flags.flags1x80.lun_disable) target 2110 drivers/scsi/qla1280.c mb[1] = (uint16_t)((bus ? target | BIT_7 : target) << 8); target 2113 drivers/scsi/qla1280.c mb[3] = nv->bus[bus].target[target].execution_throttle; target 2125 drivers/scsi/qla1280.c int target, status; target 2146 drivers/scsi/qla1280.c for (target = 0; target < MAX_TARGETS; target++) target 2147 drivers/scsi/qla1280.c status |= qla1280_config_target(ha, bus, target); target 2157 drivers/scsi/qla1280.c int bus, target, status = 0; target 2165 drivers/scsi/qla1280.c for (target = 0; target < MAX_TARGETS; target++) { target 2166 drivers/scsi/qla1280.c nv->bus[bus].target[target].parameter. target 2602 drivers/scsi/qla1280.c qla1280_device_reset(struct scsi_qla_host *ha, int bus, int target) target 2610 drivers/scsi/qla1280.c mb[1] = (bus ? (target | BIT_7) : target) << 8; target 2615 drivers/scsi/qla1280.c qla1280_marker(ha, bus, target, 0, MK_SYNC_ID); target 2639 drivers/scsi/qla1280.c unsigned int bus, target, lun; target 2645 drivers/scsi/qla1280.c target = SCSI_TCN_32(sp->cmd); target 2651 drivers/scsi/qla1280.c mb[1] = (bus ? target | BIT_7 : target) << 8 | lun; target 2712 drivers/scsi/qla1280.c pkt->target = (uint8_t) (bus ? (id | BIT_7) : id); target 2829 drivers/scsi/qla1280.c pkt->target = SCSI_BUS_32(cmd) ? target 3084 drivers/scsi/qla1280.c pkt->target = SCSI_BUS_32(cmd) ? target 3599 drivers/scsi/qla1280.c unsigned int bus, target, lun; target 3627 drivers/scsi/qla1280.c target = SCSI_TCN_32(cmd); target 3668 drivers/scsi/qla1280.c "l %i\n", bus, target, lun); target 3893 drivers/scsi/qla1280.c int bus, target, lun; target 3896 drivers/scsi/qla1280.c target = device->id; target 3901 drivers/scsi/qla1280.c mb[1] = (uint16_t) (bus ? target | BIT_7 : target); target 3906 drivers/scsi/qla1280.c printk(KERN_INFO "scsi(%li:%d:%d:%d):", ha->host_no, bus, target, lun); target 486 drivers/scsi/qla1280.h } target[MAX_TARGETS]; target 513 drivers/scsi/qla1280.h uint8_t target; /* SCSI ID */ target 599 drivers/scsi/qla1280.h uint8_t target; /* SCSI ID */ target 620 drivers/scsi/qla1280.h uint8_t target; /* SCSI ID */ target 640 drivers/scsi/qla1280.h uint8_t target; /* SCSI ID */ target 163 drivers/scsi/qla2xxx/qla_bsg.h uint16_t target; target 1826 drivers/scsi/qla2xxx/qla_def.h target_id_t target; /* SCSI ID */ target 1855 drivers/scsi/qla2xxx/qla_def.h target_id_t target; /* SCSI ID */ target 2104 drivers/scsi/qla2xxx/qla_def.h target_id_t target; /* SCSI ID */ target 2205 drivers/scsi/qla2xxx/qla_def.h target_id_t target; target 405 drivers/scsi/qla2xxx/qla_iocb.c SET_TARGET_ID(ha, cmd_pkt->target, sp->fcport->loop_id); target 535 drivers/scsi/qla2xxx/qla_iocb.c SET_TARGET_ID(ha, mrk->target, loop_id); target 122 drivers/scsi/qla2xxx/qla_target.h ? le16_to_cpu((iocb)->u.isp2x.target.extended) \ target 123 drivers/scsi/qla2xxx/qla_target.h : (uint16_t)(iocb)->u.isp2x.target.id.standard) target 139 drivers/scsi/qla2xxx/qla_target.h target_id_t target; target 208 drivers/scsi/qla2xxx/qla_target.h target_id_t target; target 323 drivers/scsi/qla2xxx/qla_target.h target_id_t target; target 1109 drivers/scsi/qla4xxx/ql4_fw.h uint16_t target; /* 08-09 */ target 1163 drivers/scsi/qla4xxx/ql4_fw.h uint16_t target; /* 08-09 */ target 1229 drivers/scsi/qla4xxx/ql4_fw.h uint16_t target; /* 08-09 */ target 1252 drivers/scsi/qla4xxx/ql4_fw.h uint16_t target; /* 08-09 */ target 105 drivers/scsi/qla4xxx/ql4_iocb.c marker_entry->target = cpu_to_le16(ddb_entry->fw_ddb_index); target 326 drivers/scsi/qla4xxx/ql4_iocb.c cmd_entry->target = cpu_to_le16(ddb_entry->fw_ddb_index); target 408 drivers/scsi/qla4xxx/ql4_iocb.c passthru_iocb->target = cpu_to_le16(ddb_entry->fw_ddb_index); target 376 drivers/scsi/qla4xxx/ql4_isr.c fw_ddb_index = le32_to_cpu(sts_entry->target); target 382 drivers/scsi/qla4xxx/ql4_isr.c __func__, sts_entry->target); target 249 drivers/scsi/scsi_debug.c unsigned int target; target 914 drivers/scsi/scsi_debug.c (devip->target == dp->target)) target 1424 drivers/scsi/scsi_debug.c (devip->target * 1000) + devip->lun); target 1426 drivers/scsi/scsi_debug.c (devip->target * 1000) - 3; target 1951 drivers/scsi/scsi_debug.c static int resp_err_recov_pg(unsigned char *p, int pcontrol, int target) target 1962 drivers/scsi/scsi_debug.c static int resp_disconnect_pg(unsigned char *p, int pcontrol, int target) target 1973 drivers/scsi/scsi_debug.c static int resp_format_pg(unsigned char *p, int pcontrol, int target) target 1993 drivers/scsi/scsi_debug.c static int resp_caching_pg(unsigned char *p, int pcontrol, int target) target 2013 drivers/scsi/scsi_debug.c static int resp_ctrl_m_pg(unsigned char *p, int pcontrol, int target) target 2037 drivers/scsi/scsi_debug.c static int resp_iec_m_pg(unsigned char *p, int pcontrol, int target) target 2052 drivers/scsi/scsi_debug.c static int resp_sas_sf_m_pg(unsigned char *p, int pcontrol, int target) target 2064 drivers/scsi/scsi_debug.c static int resp_sas_pcd_m_spg(unsigned char *p, int pcontrol, int target, target 2117 drivers/scsi/scsi_debug.c int target = scp->device->id; target 2141 drivers/scsi/scsi_debug.c (devip->target * 1000) - 3; target 2188 drivers/scsi/scsi_debug.c len = resp_err_recov_pg(ap, pcontrol, target); target 2192 drivers/scsi/scsi_debug.c len = resp_disconnect_pg(ap, pcontrol, target); target 2197 drivers/scsi/scsi_debug.c len = resp_format_pg(ap, pcontrol, target); target 2204 drivers/scsi/scsi_debug.c len = resp_caching_pg(ap, pcontrol, target); target 2210 drivers/scsi/scsi_debug.c len = resp_ctrl_m_pg(ap, pcontrol, target); target 2220 drivers/scsi/scsi_debug.c len += resp_sas_sf_m_pg(ap + len, pcontrol, target); target 2222 drivers/scsi/scsi_debug.c len += resp_sas_pcd_m_spg(ap + len, pcontrol, target, target 2229 drivers/scsi/scsi_debug.c len = resp_iec_m_pg(ap, pcontrol, target); target 2234 drivers/scsi/scsi_debug.c len = resp_err_recov_pg(ap, pcontrol, target); target 2235 drivers/scsi/scsi_debug.c len += resp_disconnect_pg(ap + len, pcontrol, target); target 2238 drivers/scsi/scsi_debug.c target); target 2240 drivers/scsi/scsi_debug.c target); target 2242 drivers/scsi/scsi_debug.c len += resp_ctrl_m_pg(ap + len, pcontrol, target); target 2243 drivers/scsi/scsi_debug.c len += resp_sas_sf_m_pg(ap + len, pcontrol, target); target 2246 drivers/scsi/scsi_debug.c target, target_dev_id); target 2249 drivers/scsi/scsi_debug.c len += resp_iec_m_pg(ap + len, pcontrol, target); target 3375 drivers/scsi/scsi_debug.c if (dp->target == sdp->id) { target 3387 drivers/scsi/scsi_debug.c if (dp->target == sdp->id) target 3838 drivers/scsi/scsi_debug.c (devip->target == sdev->id) && target 3855 drivers/scsi/scsi_debug.c open_devip->target = sdev->id; target 4076 drivers/scsi/scsi_debug.c if (devip->target == sdp->id) { target 2305 drivers/scsi/scsi_error.c void scsi_report_device_reset(struct Scsi_Host *shost, int channel, int target) target 2311 drivers/scsi/scsi_error.c target == sdev_id(sdev)) target 1497 drivers/scsi/scsi_scan.c uint target, u64 lun) target 1500 drivers/scsi/scsi_scan.c __scsi_add_device(host, channel, target, lun, NULL); target 905 drivers/scsi/smartpqi/smartpqi.h int target; target 1037 drivers/scsi/smartpqi/smartpqi_init.c int bus, int target, int lun) target 1040 drivers/scsi/smartpqi/smartpqi_init.c device->target = target; target 1049 drivers/scsi/smartpqi/smartpqi_init.c int target; target 1065 drivers/scsi/smartpqi/smartpqi_init.c target = (lunid >> 16) & 0x3fff; target 1069 drivers/scsi/smartpqi/smartpqi_init.c target = 0; target 1072 drivers/scsi/smartpqi/smartpqi_init.c pqi_set_bus_target_lun(device, bus, target, lun); target 1472 drivers/scsi/smartpqi/smartpqi_init.c device->bus, device->target, device->lun, status); target 1492 drivers/scsi/smartpqi/smartpqi_init.c device->target, device->lun); target 1514 drivers/scsi/smartpqi/smartpqi_init.c device->target, device->lun, target 1526 drivers/scsi/smartpqi/smartpqi_init.c int bus, int target, int lun) target 1532 drivers/scsi/smartpqi/smartpqi_init.c if (device->bus == bus && device->target == target && target 1604 drivers/scsi/smartpqi/smartpqi_init.c device->target, target 1659 drivers/scsi/smartpqi/smartpqi_init.c existing_device->target = new_device->target; target 1865 drivers/scsi/smartpqi/smartpqi_init.c device->bus, device->target, target 2678 drivers/scsi/smartpqi/smartpqi_init.c device->target, device->lun); target 2759 drivers/scsi/smartpqi/smartpqi_init.c ctrl_info->scsi_host->host_no, device->bus, device->target, device->lun); target 2767 drivers/scsi/smartpqi/smartpqi_init.c sshdr.ascq, ctrl_info->scsi_host->host_no, device->bus, device->target, device->lun); target 5741 drivers/scsi/smartpqi/smartpqi_init.c shost->host_no, device->bus, device->target, device->lun); target 5759 drivers/scsi/smartpqi/smartpqi_init.c shost->host_no, device->bus, device->target, device->lun, target 5782 drivers/scsi/smartpqi/smartpqi_init.c device->target = sdev_id(sdev); target 6341 drivers/scsi/smartpqi/smartpqi_init.c device->bus, device->target, target 233 drivers/scsi/stex.c u8 target; target 245 drivers/scsi/stex.c u8 target; target 702 drivers/scsi/stex.c req->target = id; target 482 drivers/scsi/storvsc_drv.c unsigned char target; target 1739 drivers/scsi/storvsc_drv.c int target = 0; target 1805 drivers/scsi/storvsc_drv.c host_dev->target = stor_device->target_id; target 1862 drivers/scsi/storvsc_drv.c target = (device->dev_instance.b[5] << 8 | target 1864 drivers/scsi/storvsc_drv.c ret = scsi_add_device(host, 0, target, 0); target 211 drivers/scsi/sym53c8xx_2/sym_glue.c cp->target,cp->lun, -1); target 255 drivers/scsi/sym53c8xx_2/sym_glue.c struct sym_tcb *tp = &np->target[cp->target]; target 298 drivers/scsi/sym53c8xx_2/sym_glue.c tp = &np->target[sdev->id]; target 727 drivers/scsi/sym53c8xx_2/sym_glue.c struct sym_tcb *tp = &np->target[sdev->id]; target 787 drivers/scsi/sym53c8xx_2/sym_glue.c struct sym_tcb *tp = &np->target[sdev->id]; target 823 drivers/scsi/sym53c8xx_2/sym_glue.c struct sym_tcb *tp = &np->target[sdev->id]; target 879 drivers/scsi/sym53c8xx_2/sym_glue.c u_long target; target 917 drivers/scsi/sym53c8xx_2/sym_glue.c if (!((uc->target >> t) & 1)) target 919 drivers/scsi/sym53c8xx_2/sym_glue.c tp = &np->target[t]; target 1027 drivers/scsi/sym53c8xx_2/sym_glue.c u_long target; target 1073 drivers/scsi/sym53c8xx_2/sym_glue.c uc->target = ~0; target 1075 drivers/scsi/sym53c8xx_2/sym_glue.c GET_INT_ARG(ptr, len, target); target 1076 drivers/scsi/sym53c8xx_2/sym_glue.c uc->target = (1<<target); target 1078 drivers/scsi/sym53c8xx_2/sym_glue.c printk("sym_user_command: target=%ld\n", target); target 1891 drivers/scsi/sym53c8xx_2/sym_glue.c struct sym_tcb *tp = &np->target[starget->id]; target 1901 drivers/scsi/sym53c8xx_2/sym_glue.c struct sym_tcb *tp = &np->target[starget->id]; target 1916 drivers/scsi/sym53c8xx_2/sym_glue.c struct sym_tcb *tp = &np->target[starget->id]; target 1931 drivers/scsi/sym53c8xx_2/sym_glue.c struct sym_tcb *tp = &np->target[starget->id]; target 1946 drivers/scsi/sym53c8xx_2/sym_glue.c struct sym_tcb *tp = &np->target[starget->id]; target 1959 drivers/scsi/sym53c8xx_2/sym_glue.c struct sym_tcb *tp = &np->target[starget->id]; target 68 drivers/scsi/sym53c8xx_2/sym_hipd.c static void sym_print_nego_msg(struct sym_hcb *np, int target, char *label, u_char *msg) target 70 drivers/scsi/sym53c8xx_2/sym_hipd.c struct sym_tcb *tp = &np->target[target]; target 902 drivers/scsi/sym53c8xx_2/sym_hipd.c struct sym_tcb *tp = &np->target[i]; target 1408 drivers/scsi/sym53c8xx_2/sym_hipd.c struct sym_tcb *tp = &np->target[cp->target]; target 1454 drivers/scsi/sym53c8xx_2/sym_hipd.c sym_print_nego_msg(np, cp->target, target 1645 drivers/scsi/sym53c8xx_2/sym_hipd.c struct sym_tcb *tp = &np->target[cp->target]; target 1877 drivers/scsi/sym53c8xx_2/sym_hipd.c struct sym_tcb *tp = &np->target[i]; target 1931 drivers/scsi/sym53c8xx_2/sym_hipd.c static void sym_settrans(struct sym_hcb *np, int target, u_char opts, u_char ofs, target 1936 drivers/scsi/sym53c8xx_2/sym_hipd.c struct sym_tcb *tp = &np->target[target]; target 1938 drivers/scsi/sym53c8xx_2/sym_hipd.c assert(target == (INB(np, nc_sdid) & 0x0f)); target 2025 drivers/scsi/sym53c8xx_2/sym_hipd.c if (cp->target != target) target 2062 drivers/scsi/sym53c8xx_2/sym_hipd.c static void sym_setwide(struct sym_hcb *np, int target, u_char wide) target 2064 drivers/scsi/sym53c8xx_2/sym_hipd.c struct sym_tcb *tp = &np->target[target]; target 2067 drivers/scsi/sym53c8xx_2/sym_hipd.c sym_settrans(np, target, 0, 0, 0, wide, 0, 0); target 2091 drivers/scsi/sym53c8xx_2/sym_hipd.c sym_setsync(struct sym_hcb *np, int target, target 2094 drivers/scsi/sym53c8xx_2/sym_hipd.c struct sym_tcb *tp = &np->target[target]; target 2098 drivers/scsi/sym53c8xx_2/sym_hipd.c sym_settrans(np, target, 0, ofs, per, wide, div, fak); target 2124 drivers/scsi/sym53c8xx_2/sym_hipd.c sym_setpprot(struct sym_hcb *np, int target, u_char opts, u_char ofs, target 2127 drivers/scsi/sym53c8xx_2/sym_hipd.c struct sym_tcb *tp = &np->target[target]; target 2130 drivers/scsi/sym53c8xx_2/sym_hipd.c sym_settrans(np, target, opts, ofs, per, wide, div, fak); target 2965 drivers/scsi/sym53c8xx_2/sym_hipd.c sym_dequeue_from_squeue(struct sym_hcb *np, int i, int target, int lun, int task) target 2987 drivers/scsi/sym53c8xx_2/sym_hipd.c if ((target == -1 || cp->target == target) && target 3080 drivers/scsi/sym53c8xx_2/sym_hipd.c sym_dequeue_from_squeue(np, i, cp->target, cp->lun, -1); target 3186 drivers/scsi/sym53c8xx_2/sym_hipd.c int sym_clear_tasks(struct sym_hcb *np, int cam_status, int target, int lun, int task) target 3209 drivers/scsi/sym53c8xx_2/sym_hipd.c cp->target != target || target 3275 drivers/scsi/sym53c8xx_2/sym_hipd.c int target=-1, lun=-1, task; target 3289 drivers/scsi/sym53c8xx_2/sym_hipd.c tp = &np->target[i]; target 3292 drivers/scsi/sym53c8xx_2/sym_hipd.c target = i; target 3299 drivers/scsi/sym53c8xx_2/sym_hipd.c target = i; target 3303 drivers/scsi/sym53c8xx_2/sym_hipd.c if (target != -1) target 3311 drivers/scsi/sym53c8xx_2/sym_hipd.c if (target == -1) { target 3317 drivers/scsi/sym53c8xx_2/sym_hipd.c target = cp->target; target 3327 drivers/scsi/sym53c8xx_2/sym_hipd.c if (target != -1) { target 3328 drivers/scsi/sym53c8xx_2/sym_hipd.c tp = &np->target[target]; target 3329 drivers/scsi/sym53c8xx_2/sym_hipd.c np->abrt_sel.sel_id = target; target 3381 drivers/scsi/sym53c8xx_2/sym_hipd.c i = sym_dequeue_from_squeue(np, i, cp->target, cp->lun, -1); target 3410 drivers/scsi/sym53c8xx_2/sym_hipd.c target = INB(np, nc_sdid) & 0xf; target 3411 drivers/scsi/sym53c8xx_2/sym_hipd.c tp = &np->target[target]; target 3464 drivers/scsi/sym53c8xx_2/sym_hipd.c if (cp->target != target) target 3521 drivers/scsi/sym53c8xx_2/sym_hipd.c target = INB(np, nc_sdid) & 0xf; target 3522 drivers/scsi/sym53c8xx_2/sym_hipd.c tp = &np->target[target]; target 3572 drivers/scsi/sym53c8xx_2/sym_hipd.c sym_dequeue_from_squeue(np, i, target, lun, -1); target 3573 drivers/scsi/sym53c8xx_2/sym_hipd.c sym_clear_tasks(np, DID_ABORT, target, lun, task); target 3947 drivers/scsi/sym53c8xx_2/sym_hipd.c int target = cp->target; target 3951 drivers/scsi/sym53c8xx_2/sym_hipd.c sym_print_nego_msg(np, target, "sync msgin", np->msgin); target 3997 drivers/scsi/sym53c8xx_2/sym_hipd.c sym_setsync (np, target, ofs, per, div, fak); target 4011 drivers/scsi/sym53c8xx_2/sym_hipd.c sym_print_nego_msg(np, target, "sync msgout", np->msgout); target 4019 drivers/scsi/sym53c8xx_2/sym_hipd.c sym_setsync (np, target, 0, 0, 0, 0); target 4060 drivers/scsi/sym53c8xx_2/sym_hipd.c sym_ppr_nego_check(struct sym_hcb *np, int req, int target) target 4062 drivers/scsi/sym53c8xx_2/sym_hipd.c struct sym_tcb *tp = &np->target[target]; target 4072 drivers/scsi/sym53c8xx_2/sym_hipd.c sym_print_nego_msg(np, target, "ppr msgin", np->msgin); target 4123 drivers/scsi/sym53c8xx_2/sym_hipd.c sym_setpprot(np, target, opts, ofs, per, wide, div, fak); target 4137 drivers/scsi/sym53c8xx_2/sym_hipd.c sym_print_nego_msg(np, target, "ppr msgout", np->msgout); target 4145 drivers/scsi/sym53c8xx_2/sym_hipd.c sym_setpprot (np, target, 0, 0, 0, 0, 0, 0); target 4178 drivers/scsi/sym53c8xx_2/sym_hipd.c result = sym_ppr_nego_check(np, req, cp->target); target 4199 drivers/scsi/sym53c8xx_2/sym_hipd.c int target = cp->target; target 4203 drivers/scsi/sym53c8xx_2/sym_hipd.c sym_print_nego_msg(np, target, "wide msgin", np->msgin); target 4235 drivers/scsi/sym53c8xx_2/sym_hipd.c sym_setwide (np, target, wide); target 4251 drivers/scsi/sym53c8xx_2/sym_hipd.c sym_print_nego_msg(np, target, "wide msgout", np->msgout); target 4295 drivers/scsi/sym53c8xx_2/sym_hipd.c sym_print_nego_msg(np, cp->target, target 4329 drivers/scsi/sym53c8xx_2/sym_hipd.c sym_setpprot (np, cp->target, 0, 0, 0, 0, 0, 0); target 4340 drivers/scsi/sym53c8xx_2/sym_hipd.c sym_setsync (np, cp->target, 0, 0, 0, 0); target 4343 drivers/scsi/sym53c8xx_2/sym_hipd.c sym_setwide (np, cp->target, 0); target 4376 drivers/scsi/sym53c8xx_2/sym_hipd.c u_char target = INB(np, nc_sdid) & 0x0f; target 4377 drivers/scsi/sym53c8xx_2/sym_hipd.c struct sym_tcb *tp = &np->target[target]; target 4649 drivers/scsi/sym53c8xx_2/sym_hipd.c struct sym_tcb *tp = &np->target[tn]; target 4755 drivers/scsi/sym53c8xx_2/sym_hipd.c cp->target = tn; target 4774 drivers/scsi/sym53c8xx_2/sym_hipd.c struct sym_tcb *tp = &np->target[cp->target]; target 4970 drivers/scsi/sym53c8xx_2/sym_hipd.c struct sym_tcb *tp = &np->target[tn]; target 5051 drivers/scsi/sym53c8xx_2/sym_hipd.c struct sym_tcb *tp = &np->target[tn]; target 5097 drivers/scsi/sym53c8xx_2/sym_hipd.c struct sym_tcb *tp = &np->target[tn]; target 5148 drivers/scsi/sym53c8xx_2/sym_hipd.c tp = &np->target[cp->target]; target 5234 drivers/scsi/sym53c8xx_2/sym_hipd.c cp->phys.select.sel_id = cp->target; target 5272 drivers/scsi/sym53c8xx_2/sym_hipd.c int sym_reset_scsi_target(struct sym_hcb *np, int target) target 5276 drivers/scsi/sym53c8xx_2/sym_hipd.c if (target == np->myaddr || (u_int)target >= SYM_CONF_MAX_TARGET) target 5279 drivers/scsi/sym53c8xx_2/sym_hipd.c tp = &np->target[target]; target 5376 drivers/scsi/sym53c8xx_2/sym_hipd.c tp = &np->target[cp->target]; target 5408 drivers/scsi/sym53c8xx_2/sym_hipd.c i = sym_dequeue_from_squeue(np, i, cp->target, sdev->lun, -1); target 5504 drivers/scsi/sym53c8xx_2/sym_hipd.c tp = &np->target[cp->target]; target 5766 drivers/scsi/sym53c8xx_2/sym_hipd.c np->targtbl[i] = cpu_to_scr(vtobus(&np->target[i])); target 5767 drivers/scsi/sym53c8xx_2/sym_hipd.c np->target[i].head.luntbl_sa = target 5769 drivers/scsi/sym53c8xx_2/sym_hipd.c np->target[i].head.lun0_sa = target 5798 drivers/scsi/sym53c8xx_2/sym_hipd.c int target; target 5822 drivers/scsi/sym53c8xx_2/sym_hipd.c for (target = 0; target < SYM_CONF_MAX_TARGET ; target++) { target 5823 drivers/scsi/sym53c8xx_2/sym_hipd.c tp = &np->target[target]; target 772 drivers/scsi/sym53c8xx_2/sym_hipd.h u_char target; target 855 drivers/scsi/sym53c8xx_2/sym_hipd.h struct sym_tcb target[SYM_CONF_MAX_TARGET]; target 1052 drivers/scsi/sym53c8xx_2/sym_hipd.h int sym_clear_tasks(struct sym_hcb *np, int cam_status, int target, int lun, int task); target 1059 drivers/scsi/sym53c8xx_2/sym_hipd.h int sym_reset_scsi_target(struct sym_hcb *np, int target); target 82 drivers/scsi/sym53c8xx_2/sym_nvram.c sym_Symbios_setup_target(struct sym_tcb *tp, int target, Symbios_nvram *nvram) target 84 drivers/scsi/sym53c8xx_2/sym_nvram.c Symbios_target *tn = &nvram->target[target]; target 106 drivers/scsi/sym53c8xx_2/sym_nvram.c sym_Tekram_setup_target(struct sym_tcb *tp, int target, Tekram_nvram *nvram) target 108 drivers/scsi/sym53c8xx_2/sym_nvram.c struct Tekram_target *tn = &nvram->target[target]; target 125 drivers/scsi/sym53c8xx_2/sym_nvram.c void sym_nvram_setup_target(struct sym_tcb *tp, int target, struct sym_nvram *nvp) target 129 drivers/scsi/sym53c8xx_2/sym_nvram.c sym_Symbios_setup_target(tp, target, &nvp->data.Symbios); target 132 drivers/scsi/sym53c8xx_2/sym_nvram.c sym_Tekram_setup_target(tp, target, &nvp->data.Tekram); target 159 drivers/scsi/sym53c8xx_2/sym_nvram.c struct Symbios_target *tn = &nvram->target[i]; target 207 drivers/scsi/sym53c8xx_2/sym_nvram.c struct Tekram_target *tn = &nvram->target[i]; target 97 drivers/scsi/sym53c8xx_2/sym_nvram.h } target[16]; target 141 drivers/scsi/sym53c8xx_2/sym_nvram.h } target[16]; target 184 drivers/scsi/sym53c8xx_2/sym_nvram.h void sym_nvram_setup_target (struct sym_tcb *tp, int target, struct sym_nvram *nvp); target 1314 drivers/scsi/ufs/ufshcd.c .target = ufshcd_devfreq_target, target 282 drivers/scsi/virtio_scsi.c unsigned int target = event->lun[1]; target 287 drivers/scsi/virtio_scsi.c scsi_add_device(shost, 0, target, lun); target 290 drivers/scsi/virtio_scsi.c sdev = scsi_device_lookup(shost, 0, target, lun); target 296 drivers/scsi/virtio_scsi.c shost->host_no, target, lun); target 309 drivers/scsi/virtio_scsi.c unsigned int target = event->lun[1]; target 314 drivers/scsi/virtio_scsi.c sdev = scsi_device_lookup(shost, 0, target, lun); target 317 drivers/scsi/virtio_scsi.c shost->host_no, target, lun); target 265 drivers/scsi/vmw_pvscsi.c cmd.target = ctx->cmd->device->id; target 317 drivers/scsi/vmw_pvscsi.c static void ll_device_reset(const struct pvscsi_adapter *adapter, u32 target) target 321 drivers/scsi/vmw_pvscsi.c dev_dbg(pvscsi_dev(adapter), "Resetting device: target=%u\n", target); target 323 drivers/scsi/vmw_pvscsi.c cmd.target = target; target 706 drivers/scsi/vmw_pvscsi.c e->target = sdev->id; target 1037 drivers/scsi/vmw_pvscsi.c desc->bus, desc->target, desc->lun[1]); target 1042 drivers/scsi/vmw_pvscsi.c sdev = scsi_device_lookup(host, desc->bus, desc->target, target 1049 drivers/scsi/vmw_pvscsi.c desc->target, desc->lun[1]); target 1058 drivers/scsi/vmw_pvscsi.c desc->bus, desc->target, desc->lun[1]); target 1063 drivers/scsi/vmw_pvscsi.c sdev = scsi_device_lookup(host, desc->bus, desc->target, target 1071 drivers/scsi/vmw_pvscsi.c desc->bus, desc->target, desc->lun[1]); target 129 drivers/scsi/vmw_pvscsi.h u32 target; target 173 drivers/scsi/vmw_pvscsi.h u32 target; target 246 drivers/scsi/vmw_pvscsi.h u32 target; target 335 drivers/scsi/vmw_pvscsi.h u8 target; target 733 drivers/staging/exfat/exfat.h char *target; target 2457 drivers/staging/exfat/exfat_super.c if (S_ISLNK(i_mode) && !EXFAT_I(inode)->target) { target 2458 drivers/staging/exfat/exfat_super.c EXFAT_I(inode)->target = kmalloc(i_size_read(inode) + 1, target 2460 drivers/staging/exfat/exfat_super.c if (!EXFAT_I(inode)->target) { target 2464 drivers/staging/exfat/exfat_super.c ffsReadFile(dir, &fid, EXFAT_I(inode)->target, target 2466 drivers/staging/exfat/exfat_super.c *(EXFAT_I(inode)->target + i_size_read(inode)) = '\0'; target 2560 drivers/staging/exfat/exfat_super.c const char *target) target 2567 drivers/staging/exfat/exfat_super.c u64 len = (u64)strlen(target); target 2587 drivers/staging/exfat/exfat_super.c err = ffsWriteFile(dir, &fid, (char *)target, len, &ret); target 2617 drivers/staging/exfat/exfat_super.c EXFAT_I(inode)->target = kmemdup(target, len + 1, GFP_KERNEL); target 2618 drivers/staging/exfat/exfat_super.c if (!EXFAT_I(inode)->target) { target 3029 drivers/staging/exfat/exfat_super.c if (ei->target) { target 3030 drivers/staging/exfat/exfat_super.c char *cookie = ei->target; target 3033 drivers/staging/exfat/exfat_super.c return (char *)(ei->target); target 3313 drivers/staging/exfat/exfat_super.c EXFAT_I(inode)->target = NULL; target 3406 drivers/staging/exfat/exfat_super.c kfree(EXFAT_I(inode)->target); target 3407 drivers/staging/exfat/exfat_super.c EXFAT_I(inode)->target = NULL; target 3802 drivers/staging/exfat/exfat_super.c EXFAT_I(inode)->target = NULL; target 336 drivers/staging/media/imx/imx-media-capture.c switch (s->target) { target 299 drivers/staging/media/imx/imx-media-csc-scaler.c switch (s->target) { target 318 drivers/staging/media/imx/imx-media-csc-scaler.c if (s->target == V4L2_SEL_TGT_CROP || target 319 drivers/staging/media/imx/imx-media-csc-scaler.c s->target == V4L2_SEL_TGT_COMPOSE) { target 337 drivers/staging/media/imx/imx-media-csc-scaler.c switch (s->target) { target 1585 drivers/staging/media/imx/imx-media-csi.c switch (sel->target) { target 1661 drivers/staging/media/imx/imx-media-csi.c switch (sel->target) { target 204 drivers/staging/media/ipu3/ipu3-v4l2.c switch (sel->target) { target 237 drivers/staging/media/ipu3/ipu3-v4l2.c imgu_sd->pipe, sel->which, sel->target, target 243 drivers/staging/media/ipu3/ipu3-v4l2.c switch (sel->target) { target 652 drivers/staging/media/omap4iss/iss_video.c .target = sel->target, target 657 drivers/staging/media/omap4iss/iss_video.c switch (sel->target) { target 709 drivers/staging/media/omap4iss/iss_video.c .target = sel->target, target 716 drivers/staging/media/omap4iss/iss_video.c switch (sel->target) { target 220 drivers/staging/media/soc_camera/imx074.c switch (sel->target) { target 179 drivers/staging/media/soc_camera/mt9t031.c static u16 mt9t031_skip(s32 *source, s32 target, s32 max) target 183 drivers/staging/media/soc_camera/mt9t031.c if (*source < target + target / 2) { target 184 drivers/staging/media/soc_camera/mt9t031.c *source = target; target 188 drivers/staging/media/soc_camera/mt9t031.c skip = min(max, *source + target / 2) / target; target 191 drivers/staging/media/soc_camera/mt9t031.c *source = target * skip; target 302 drivers/staging/media/soc_camera/mt9t031.c sel->target != V4L2_SEL_TGT_CROP) target 327 drivers/staging/media/soc_camera/mt9t031.c switch (sel->target) { target 982 drivers/staging/media/soc_camera/soc_camera.c (s->target != V4L2_SEL_TGT_COMPOSE && target 983 drivers/staging/media/soc_camera/soc_camera.c s->target != V4L2_SEL_TGT_CROP)) target 986 drivers/staging/media/soc_camera/soc_camera.c if (s->target == V4L2_SEL_TGT_COMPOSE) { target 1001 drivers/staging/media/soc_camera/soc_camera.c if (s->target == V4L2_SEL_TGT_CROP && target 1008 drivers/staging/media/soc_camera/soc_camera.c s->target == V4L2_SEL_TGT_COMPOSE) { target 1770 drivers/staging/media/soc_camera/soc_camera.c .target = sel->target, target 1787 drivers/staging/media/soc_camera/soc_camera.c .target = sel->target, target 286 drivers/staging/media/soc_camera/soc_mt9v022.c sel->target != V4L2_SEL_TGT_CROP) target 365 drivers/staging/media/soc_camera/soc_mt9v022.c switch (sel->target) { target 408 drivers/staging/media/soc_camera/soc_mt9v022.c .target = V4L2_SEL_TGT_CROP, target 861 drivers/staging/media/soc_camera/soc_ov5642.c sel->target != V4L2_SEL_TGT_CROP) target 895 drivers/staging/media/soc_camera/soc_ov5642.c switch (sel->target) { target 727 drivers/staging/media/soc_camera/soc_ov9740.c switch (sel->target) { target 392 drivers/staging/rtl8188eu/core/rtw_mlme.c void rtw_update_scanned_network(struct adapter *adapter, struct wlan_bssid_ex *target) target 408 drivers/staging/rtl8188eu/core/rtw_mlme.c if (is_same_network(&pnetwork->network, target)) target 424 drivers/staging/rtl8188eu/core/rtw_mlme.c &target->PhyInfo.Optimum_antenna); target 425 drivers/staging/rtl8188eu/core/rtw_mlme.c memcpy(&pnetwork->network, target, target 426 drivers/staging/rtl8188eu/core/rtw_mlme.c get_wlan_bssid_ex_sz(target)); target 449 drivers/staging/rtl8188eu/core/rtw_mlme.c bssid_ex_sz = get_wlan_bssid_ex_sz(target); target 450 drivers/staging/rtl8188eu/core/rtw_mlme.c target->Length = bssid_ex_sz; target 452 drivers/staging/rtl8188eu/core/rtw_mlme.c &target->PhyInfo.Optimum_antenna); target 453 drivers/staging/rtl8188eu/core/rtw_mlme.c memcpy(&pnetwork->network, target, bssid_ex_sz); target 472 drivers/staging/rtl8188eu/core/rtw_mlme.c if ((pnetwork->network.ie_length > target->ie_length) && (target->Reserved[0] == 1)) target 475 drivers/staging/rtl8188eu/core/rtw_mlme.c update_network(&pnetwork->network, target, adapter, update_ie); target 302 drivers/staging/rtl8188eu/include/rtw_mlme.h struct wlan_bssid_ex *target); target 2566 drivers/staging/rtl8192e/rtllib_rx.c struct rtllib_network *target; target 2649 drivers/staging/rtl8192e/rtllib_rx.c list_for_each_entry(target, &ieee->network_list, list) { target 2650 drivers/staging/rtl8192e/rtllib_rx.c if (is_same_network(target, network, target 2651 drivers/staging/rtl8192e/rtllib_rx.c (target->ssid_len ? 1 : 0))) target 2654 drivers/staging/rtl8192e/rtllib_rx.c (target->last_scanned < oldest->last_scanned)) target 2655 drivers/staging/rtl8192e/rtllib_rx.c oldest = target; target 2661 drivers/staging/rtl8192e/rtllib_rx.c if (&target->list == &ieee->network_list) { target 2665 drivers/staging/rtl8192e/rtllib_rx.c target = oldest; target 2668 drivers/staging/rtl8192e/rtllib_rx.c escape_essid(target->ssid, target->ssid_len), target 2669 drivers/staging/rtl8192e/rtllib_rx.c target->bssid); target 2672 drivers/staging/rtl8192e/rtllib_rx.c target = list_entry(ieee->network_free_list.next, target 2682 drivers/staging/rtl8192e/rtllib_rx.c memcpy(target, network, sizeof(*target)); target 2683 drivers/staging/rtl8192e/rtllib_rx.c list_add_tail(&target->list, &ieee->network_list); target 2688 drivers/staging/rtl8192e/rtllib_rx.c escape_essid(target->ssid, target->ssid_len), target 2689 drivers/staging/rtl8192e/rtllib_rx.c target->bssid, target 2696 drivers/staging/rtl8192e/rtllib_rx.c renew = !time_after(target->last_scanned + ieee->scan_age, target 2698 drivers/staging/rtl8192e/rtllib_rx.c if ((!target->ssid_len) && target 2699 drivers/staging/rtl8192e/rtllib_rx.c (((network->ssid_len > 0) && (target->hidden_ssid_len == 0)) target 2705 drivers/staging/rtl8192e/rtllib_rx.c update_network(ieee, target, network); target 1749 drivers/staging/rtl8192e/rtllib_softmac.c struct rtllib_network *target; target 1753 drivers/staging/rtl8192e/rtllib_softmac.c list_for_each_entry(target, &ieee->network_list, list) { target 1762 drivers/staging/rtl8192e/rtllib_softmac.c if (ieee->scan_age == 0 || time_after(target->last_scanned + target 1764 drivers/staging/rtl8192e/rtllib_softmac.c rtllib_softmac_new_net(ieee, target); target 2299 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c struct ieee80211_network *target; target 2417 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c list_for_each_entry(target, &ieee->network_list, list) { target 2418 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c if (is_same_network(target, network, ieee)) target 2421 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c (target->last_scanned < oldest->last_scanned)) target 2422 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c oldest = target; target 2427 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c if (&target->list == &ieee->network_list) { target 2431 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c target = oldest; target 2434 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c escape_essid(target->ssid, target 2435 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c target->ssid_len), target 2436 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c target->bssid); target 2439 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c target = list_entry(ieee->network_free_list.next, target 2453 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c memcpy(target, network, sizeof(*target)); target 2454 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c list_add_tail(&target->list, &ieee->network_list); target 2459 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c escape_essid(target->ssid, target 2460 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c target->ssid_len), target 2461 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c target->bssid, target 2469 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c renew = !time_after(target->last_scanned + ieee->scan_age, jiffies); target 2472 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c network->flags = (~NETWORK_EMPTY_ESSID & network->flags) | (NETWORK_EMPTY_ESSID & target->flags); target 2476 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c && (((network->ssid_len > 0) && (strncmp(target->ssid, network->ssid, network->ssid_len)))\ target 2481 drivers/staging/rtl8192u/ieee80211/ieee80211_rx.c update_network(target, network); target 1441 drivers/staging/rtl8192u/ieee80211/ieee80211_softmac.c struct ieee80211_network *target; target 1445 drivers/staging/rtl8192u/ieee80211/ieee80211_softmac.c list_for_each_entry(target, &ieee->network_list, list) { target 1453 drivers/staging/rtl8192u/ieee80211/ieee80211_softmac.c if (ieee->scan_age == 0 || time_after(target->last_scanned + ieee->scan_age, jiffies)) target 1454 drivers/staging/rtl8192u/ieee80211/ieee80211_softmac.c ieee80211_softmac_new_net(ieee, target); target 483 drivers/staging/rtl8192u/r8192U_core.c struct ieee80211_network *target; target 485 drivers/staging/rtl8192u/r8192U_core.c list_for_each_entry(target, &ieee->network_list, list) { target 488 drivers/staging/rtl8192u/r8192U_core.c if (target->wpa_ie_len > 0 || target->rsn_ie_len > 0) target 491 drivers/staging/rtl8192u/r8192U_core.c seq_printf(m, "%s %s\n", target->ssid, wpa); target 333 drivers/staging/rtl8712/rtl871x_mlme.c struct wlan_bssid_ex *target) target 351 drivers/staging/rtl8712/rtl871x_mlme.c if (is_same_network(&pnetwork->network, target)) target 369 drivers/staging/rtl8712/rtl871x_mlme.c target->Rssi = (pnetwork->network.Rssi + target 370 drivers/staging/rtl8712/rtl871x_mlme.c target->Rssi) / 2; target 371 drivers/staging/rtl8712/rtl871x_mlme.c memcpy(&pnetwork->network, target, target 372 drivers/staging/rtl8712/rtl871x_mlme.c r8712_get_wlan_bssid_ex_sz(target)); target 380 drivers/staging/rtl8712/rtl871x_mlme.c bssid_ex_sz = r8712_get_wlan_bssid_ex_sz(target); target 381 drivers/staging/rtl8712/rtl871x_mlme.c target->Length = bssid_ex_sz; target 382 drivers/staging/rtl8712/rtl871x_mlme.c memcpy(&pnetwork->network, target, bssid_ex_sz); target 391 drivers/staging/rtl8712/rtl871x_mlme.c update_network(&pnetwork->network, target, adapter); target 589 drivers/staging/rtl8723bs/core/rtw_mlme.c void rtw_update_scanned_network(struct adapter *adapter, struct wlan_bssid_ex *target) target 612 drivers/staging/rtl8723bs/core/rtw_mlme.c if (is_same_network(&(pnetwork->network), target, feature)) { target 641 drivers/staging/rtl8723bs/core/rtw_mlme.c memcpy(&(pnetwork->network), target, get_wlan_bssid_ex_sz(target)); target 663 drivers/staging/rtl8723bs/core/rtw_mlme.c bssid_ex_sz = get_wlan_bssid_ex_sz(target); target 664 drivers/staging/rtl8723bs/core/rtw_mlme.c target->Length = bssid_ex_sz; target 665 drivers/staging/rtl8723bs/core/rtw_mlme.c memcpy(&(pnetwork->network), target, bssid_ex_sz); target 686 drivers/staging/rtl8723bs/core/rtw_mlme.c if ((pnetwork->network.IELength > target->IELength) && (target->Reserved[0] == 1)) target 690 drivers/staging/rtl8723bs/core/rtw_mlme.c if ((target->Reserved[0] != 2) && target 691 drivers/staging/rtl8723bs/core/rtw_mlme.c (target->Reserved[0] >= pnetwork->network.Reserved[0]) target 698 drivers/staging/rtl8723bs/core/rtw_mlme.c update_network(&(pnetwork->network), target, adapter, update_ie); target 1704 drivers/staging/rtl8723bs/hal/hal_com.c u32 v1 = 0, v2 = 0, target = 0; target 1720 drivers/staging/rtl8723bs/hal/hal_com.c target = v2; target 1724 drivers/staging/rtl8723bs/hal/hal_com.c DBG_871X("padapter->eeprompriv.EEPROMRFGainVal = 0x%x , Gain offset Target Value = 0x%x\n", padapter->eeprompriv.EEPROMRFGainVal, target); target 1725 drivers/staging/rtl8723bs/hal/hal_com.c PHY_SetRFReg(padapter, RF_PATH_A, REG_RF_BB_GAIN_OFFSET, BIT18|BIT17|BIT16|BIT15, target); target 563 drivers/staging/rtl8723bs/include/rtw_mlme.h extern void rtw_update_scanned_network(struct adapter *adapter, struct wlan_bssid_ex *target); target 208 drivers/staging/uwb/drp-ie.c switch (rsv->target.type) { target 210 drivers/staging/uwb/drp-ie.c drp_ie->dev_addr = rsv->target.dev->dev_addr; target 213 drivers/staging/uwb/drp-ie.c drp_ie->dev_addr = rsv->target.devaddr; target 31 drivers/staging/uwb/include/debug-cmd.h __u8 target[6]; target 99 drivers/staging/uwb/rsv.c char owner[UWB_ADDR_STRSIZE], target[UWB_ADDR_STRSIZE]; target 102 drivers/staging/uwb/rsv.c if (rsv->target.type == UWB_RSV_TARGET_DEV) target 103 drivers/staging/uwb/rsv.c devaddr = rsv->target.dev->dev_addr; target 105 drivers/staging/uwb/rsv.c devaddr = rsv->target.devaddr; target 106 drivers/staging/uwb/rsv.c uwb_dev_addr_print(target, sizeof(target), &devaddr); target 109 drivers/staging/uwb/rsv.c text, owner, target, uwb_rsv_state_str(rsv->state)); target 143 drivers/staging/uwb/rsv.c switch (rsv->target.type) { target 145 drivers/staging/uwb/rsv.c streams_bm = rsv->target.dev->streams; target 174 drivers/staging/uwb/rsv.c switch (rsv->target.type) { target 176 drivers/staging/uwb/rsv.c streams_bm = rsv->target.dev->streams; target 513 drivers/staging/uwb/rsv.c if (rsv->target.type == UWB_RSV_TARGET_DEV) target 514 drivers/staging/uwb/rsv.c uwb_dev_put(rsv->target.dev); target 719 drivers/staging/uwb/rsv.c switch (rsv->target.type) { target 726 drivers/staging/uwb/rsv.c rsv_src = &rsv->target.dev->dev_addr; target 747 drivers/staging/uwb/rsv.c rsv->target.type = UWB_RSV_TARGET_DEV; target 748 drivers/staging/uwb/rsv.c rsv->target.dev = &rc->uwb_dev; target 75 drivers/staging/uwb/uwb-debug.c struct uwb_dev *target; target 78 drivers/staging/uwb/uwb-debug.c memcpy(&macaddr, cmd->target, sizeof(macaddr)); target 79 drivers/staging/uwb/uwb-debug.c target = uwb_dev_get_by_macaddr(rc, &macaddr); target 80 drivers/staging/uwb/uwb-debug.c if (target == NULL) target 85 drivers/staging/uwb/uwb-debug.c uwb_dev_put(target); target 89 drivers/staging/uwb/uwb-debug.c rsv->target.type = UWB_RSV_TARGET_DEV; target 90 drivers/staging/uwb/uwb-debug.c rsv->target.dev = target; target 203 drivers/staging/uwb/uwb-debug.c char owner[UWB_ADDR_STRSIZE], target[UWB_ADDR_STRSIZE]; target 207 drivers/staging/uwb/uwb-debug.c if (rsv->target.type == UWB_RSV_TARGET_DEV) { target 208 drivers/staging/uwb/uwb-debug.c devaddr = rsv->target.dev->dev_addr; target 211 drivers/staging/uwb/uwb-debug.c devaddr = rsv->target.devaddr; target 214 drivers/staging/uwb/uwb-debug.c uwb_dev_addr_print(target, sizeof(target), &devaddr); target 218 drivers/staging/uwb/uwb-debug.c owner, target, uwb_rsv_state_str(rsv->state)); target 232 drivers/staging/uwb/uwb.h struct uwb_rsv_target target; target 255 drivers/staging/wusbcore/host/whci/whci-hc.h static inline void whc_qset_set_link_ptr(u64 *ptr, u64 target) target 257 drivers/staging/wusbcore/host/whci/whci-hc.h if (target) target 258 drivers/staging/wusbcore/host/whci/whci-hc.h *ptr = (*ptr & ~(QH_LINK_PTR_MASK | QH_LINK_T)) | QH_LINK_PTR(target); target 82 drivers/staging/wusbcore/reservation.c rsv->target.type = UWB_RSV_TARGET_DEVADDR; target 83 drivers/staging/wusbcore/reservation.c rsv->target.devaddr = bcid; target 105 drivers/thermal/fair_share.c instance->target = get_target_state(tz, cdev, percentage, target 41 drivers/thermal/gov_bang_bang.c if (instance->target == THERMAL_NO_TARGET) target 42 drivers/thermal/gov_bang_bang.c instance->target = 0; target 45 drivers/thermal/gov_bang_bang.c if (instance->target != 0 && instance->target != 1) { target 47 drivers/thermal/gov_bang_bang.c instance->name, instance->target); target 48 drivers/thermal/gov_bang_bang.c instance->target = 1; target 55 drivers/thermal/gov_bang_bang.c if (instance->target == 0 && tz->temperature >= trip_temp) target 56 drivers/thermal/gov_bang_bang.c instance->target = 1; target 57 drivers/thermal/gov_bang_bang.c else if (instance->target == 1 && target 59 drivers/thermal/gov_bang_bang.c instance->target = 0; target 62 drivers/thermal/gov_bang_bang.c (int)instance->target); target 118 drivers/thermal/intel/int340x_thermal/acpi_thermal_rel.c result = acpi_bus_get_device(trt->target, &adev); target 198 drivers/thermal/intel/int340x_thermal/acpi_thermal_rel.c if (art->target) { target 199 drivers/thermal/intel/int340x_thermal/acpi_thermal_rel.c result = acpi_bus_get_device(art->target, &adev); target 250 drivers/thermal/intel/int340x_thermal/acpi_thermal_rel.c get_single_name(arts[i].target, art_user[i].target_device); target 286 drivers/thermal/intel/int340x_thermal/acpi_thermal_rel.c get_single_name(trts[i].target, trt_user[i].target_device); target 19 drivers/thermal/intel/int340x_thermal/acpi_thermal_rel.h acpi_handle target; target 35 drivers/thermal/intel/int340x_thermal/acpi_thermal_rel.h acpi_handle target; target 381 drivers/thermal/intel/x86_pkg_temp_thermal.c int target; target 386 drivers/thermal/intel/x86_pkg_temp_thermal.c target = cpumask_any_but(&zonedev->cpumask, cpu); target 388 drivers/thermal/intel/x86_pkg_temp_thermal.c lastcpu = target >= nr_cpu_ids; target 418 drivers/thermal/intel/x86_pkg_temp_thermal.c zonedev->cpu = target; target 452 drivers/thermal/intel/x86_pkg_temp_thermal.c pkg_thermal_schedule_work(target, &zonedev->work); target 532 drivers/thermal/power_allocator.c instance->target = 0; target 50 drivers/thermal/step_wise.c next_target = instance->target; target 149 drivers/thermal/step_wise.c old_target = instance->target; target 150 drivers/thermal/step_wise.c instance->target = get_target_state(instance, trend, throttle); target 152 drivers/thermal/step_wise.c old_target, (int)instance->target); target 154 drivers/thermal/step_wise.c if (instance->initialized && old_target == instance->target) target 159 drivers/thermal/step_wise.c instance->target != THERMAL_NO_TARGET) target 163 drivers/thermal/step_wise.c instance->target == THERMAL_NO_TARGET) target 603 drivers/thermal/thermal_core.c instance->target = state; target 728 drivers/thermal/thermal_core.c dev->target = THERMAL_NO_TARGET; target 47 drivers/thermal/thermal_core.h unsigned long target; /* expected cooling state */ target 169 drivers/thermal/thermal_helpers.c unsigned long target = 0; target 181 drivers/thermal/thermal_helpers.c instance->tz->id, instance->target); target 182 drivers/thermal/thermal_helpers.c if (instance->target == THERMAL_NO_TARGET) target 184 drivers/thermal/thermal_helpers.c if (instance->target > target) target 185 drivers/thermal/thermal_helpers.c target = instance->target; target 188 drivers/thermal/thermal_helpers.c if (!cdev->ops->set_cur_state(cdev, target)) target 189 drivers/thermal/thermal_helpers.c thermal_cooling_device_stats_update(cdev, target); target 193 drivers/thermal/thermal_helpers.c trace_cdev_update(cdev, target); target 194 drivers/thermal/thermal_helpers.c dev_dbg(&cdev->device, "set to state %lu\n", target); target 1037 drivers/tty/serial/msm_serial.c unsigned long target, old, best_rate = 0, diff, best_diff = ULONG_MAX; target 1059 drivers/tty/serial/msm_serial.c target = clk_round_rate(msm_port->clk, 16 * baud); target 1060 drivers/tty/serial/msm_serial.c divisor = DIV_ROUND_CLOSEST(target, 16 * baud); target 1066 drivers/tty/serial/msm_serial.c result = target / entry->divisor / 16; target 1073 drivers/tty/serial/msm_serial.c best_rate = target; target 1079 drivers/tty/serial/msm_serial.c old = target; target 1080 drivers/tty/serial/msm_serial.c target = clk_round_rate(msm_port->clk, old + 1); target 1085 drivers/tty/serial/msm_serial.c if (target == old) target 1090 drivers/tty/serial/msm_serial.c divisor = DIV_ROUND_CLOSEST(target, 16 * baud); target 456 drivers/usb/atm/usbatm.c u8 *target, unsigned int avail_space) target 464 drivers/usb/atm/usbatm.c bytes_written += stride, target += stride) { target 467 drivers/usb/atm/usbatm.c u8 *ptr = target; target 486 drivers/usb/atm/usbatm.c u8 *trailer = target + ATM_CELL_SIZE - ATM_AAL5_TRAILER; target 499 drivers/usb/atm/usbatm.c target[3] |= 0x2; /* adjust PTI */ target 980 drivers/usb/core/message.c int usb_get_status(struct usb_device *dev, int recip, int type, int target, target 1007 drivers/usb/core/message.c target, status, length, USB_CTRL_GET_TIMEOUT); target 412 drivers/usb/early/xhci-dbc.c static void xdbc_ring_doorbell(int target) target 414 drivers/usb/early/xhci-dbc.c writel(DOOR_BELL_TARGET(target), &xdbc.xdbc_reg->doorbell); target 590 drivers/usb/gadget/function/uvc_configfs.c struct config_item *target) target 603 drivers/usb/gadget/function/uvc_configfs.c if (!header || target->ci_parent != header) target 618 drivers/usb/gadget/function/uvc_configfs.c target_hdr = to_uvcg_control_header(target); target 632 drivers/usb/gadget/function/uvc_configfs.c struct config_item *target) target 644 drivers/usb/gadget/function/uvc_configfs.c if (!header || target->ci_parent != header) target 655 drivers/usb/gadget/function/uvc_configfs.c target_hdr = to_uvcg_control_header(target); target 874 drivers/usb/gadget/function/uvc_configfs.c struct config_item *target) target 904 drivers/usb/gadget/function/uvc_configfs.c if (src->ci_parent->ci_parent != target->ci_parent->ci_parent) target 908 drivers/usb/gadget/function/uvc_configfs.c if (!strcmp(target->ci_parent->ci_name, uvcg_format_names[i])) target 915 drivers/usb/gadget/function/uvc_configfs.c target_fmt = container_of(to_config_group(target), struct uvcg_format, target 920 drivers/usb/gadget/function/uvc_configfs.c uvcg_format_set_indices(to_config_group(target)); target 940 drivers/usb/gadget/function/uvc_configfs.c struct config_item *target) target 956 drivers/usb/gadget/function/uvc_configfs.c target_fmt = container_of(to_config_group(target), struct uvcg_format, target 2156 drivers/usb/gadget/function/uvc_configfs.c struct config_item *target) target 2171 drivers/usb/gadget/function/uvc_configfs.c if (!header || target->ci_parent != header) target 2184 drivers/usb/gadget/function/uvc_configfs.c target_hdr = to_uvcg_streaming_header(target); target 2230 drivers/usb/gadget/function/uvc_configfs.c struct config_item *target) target 2242 drivers/usb/gadget/function/uvc_configfs.c if (!header || target->ci_parent != header) target 2256 drivers/usb/gadget/function/uvc_configfs.c target_hdr = to_uvcg_streaming_header(target); target 1318 drivers/usb/gadget/udc/atmel_usba_udc.c struct usba_ep *target; target 1320 drivers/usb/gadget/udc/atmel_usba_udc.c target = get_ep_by_addr(udc, le16_to_cpu(crq->wIndex)); target 1321 drivers/usb/gadget/udc/atmel_usba_udc.c if (!target) target 1325 drivers/usb/gadget/udc/atmel_usba_udc.c if (is_stalled(udc, target)) target 1348 drivers/usb/gadget/udc/atmel_usba_udc.c struct usba_ep *target; target 1353 drivers/usb/gadget/udc/atmel_usba_udc.c target = get_ep_by_addr(udc, le16_to_cpu(crq->wIndex)); target 1354 drivers/usb/gadget/udc/atmel_usba_udc.c if (!target) target 1357 drivers/usb/gadget/udc/atmel_usba_udc.c usba_ep_writel(target, CLR_STA, USBA_FORCE_STALL); target 1358 drivers/usb/gadget/udc/atmel_usba_udc.c if (target->index != 0) target 1359 drivers/usb/gadget/udc/atmel_usba_udc.c usba_ep_writel(target, CLR_STA, target 1382 drivers/usb/gadget/udc/atmel_usba_udc.c struct usba_ep *target; target 1388 drivers/usb/gadget/udc/atmel_usba_udc.c target = get_ep_by_addr(udc, le16_to_cpu(crq->wIndex)); target 1389 drivers/usb/gadget/udc/atmel_usba_udc.c if (!target) target 1392 drivers/usb/gadget/udc/atmel_usba_udc.c usba_ep_writel(target, SET_STA, USBA_FORCE_STALL); target 153 drivers/usb/misc/ftdi-elan.c struct u132_target target[4]; target 292 drivers/usb/misc/ftdi-elan.c struct u132_target *target, u8 *buffer, int length); target 344 drivers/usb/misc/ftdi-elan.c struct u132_target *target = &ftdi->target[ed_number]; target 345 drivers/usb/misc/ftdi-elan.c if (target->active == 1) { target 346 drivers/usb/misc/ftdi-elan.c target->condition_code = TD_DEVNOTRESP; target 348 drivers/usb/misc/ftdi-elan.c ftdi_elan_do_callback(ftdi, target, NULL, 0); target 363 drivers/usb/misc/ftdi-elan.c struct u132_target *target = &ftdi->target[ed_number]; target 364 drivers/usb/misc/ftdi-elan.c target->abandoning = 1; target 365 drivers/usb/misc/ftdi-elan.c wait_1:if (target->active == 1) { target 387 drivers/usb/misc/ftdi-elan.c wait_2:if (target->active == 1) { target 421 drivers/usb/misc/ftdi-elan.c struct u132_target *target = &ftdi->target[ed_number]; target 422 drivers/usb/misc/ftdi-elan.c target->abandoning = 1; target 423 drivers/usb/misc/ftdi-elan.c wait:if (target->active == 1) { target 827 drivers/usb/misc/ftdi-elan.c struct u132_target *target, u8 *buffer, int length) target 829 drivers/usb/misc/ftdi-elan.c struct urb *urb = target->urb; target 830 drivers/usb/misc/ftdi-elan.c int halted = target->halted; target 831 drivers/usb/misc/ftdi-elan.c int skipped = target->skipped; target 832 drivers/usb/misc/ftdi-elan.c int actual = target->actual; target 833 drivers/usb/misc/ftdi-elan.c int non_null = target->non_null; target 834 drivers/usb/misc/ftdi-elan.c int toggle_bits = target->toggle_bits; target 835 drivers/usb/misc/ftdi-elan.c int error_count = target->error_count; target 836 drivers/usb/misc/ftdi-elan.c int condition_code = target->condition_code; target 837 drivers/usb/misc/ftdi-elan.c int repeat_number = target->repeat_number; target 839 drivers/usb/misc/ftdi-elan.c int, int, int, int) = target->callback; target 840 drivers/usb/misc/ftdi-elan.c target->active -= 1; target 841 drivers/usb/misc/ftdi-elan.c target->callback = NULL; target 842 drivers/usb/misc/ftdi-elan.c (*callback) (target->endp, urb, buffer, length, toggle_bits, target 848 drivers/usb/misc/ftdi-elan.c struct u132_target *target, u16 ed_length, int ed_number, int ed_type, target 853 drivers/usb/misc/ftdi-elan.c target->actual = 0; target 854 drivers/usb/misc/ftdi-elan.c target->non_null = (ed_length >> 15) & 0x0001; target 855 drivers/usb/misc/ftdi-elan.c target->repeat_number = (ed_length >> 11) & 0x000F; target 857 drivers/usb/misc/ftdi-elan.c if (payload == 0 || target->abandoning > 0) { target 858 drivers/usb/misc/ftdi-elan.c target->abandoning = 0; target 860 drivers/usb/misc/ftdi-elan.c ftdi_elan_do_callback(ftdi, target, 4 + ftdi->response, target 873 drivers/usb/misc/ftdi-elan.c target->abandoning = 0; target 875 drivers/usb/misc/ftdi-elan.c ftdi_elan_do_callback(ftdi, target, 4 + ftdi->response, target 885 drivers/usb/misc/ftdi-elan.c struct u132_target *target, u16 ed_length, int ed_number, int ed_type, target 889 drivers/usb/misc/ftdi-elan.c target->condition_code = TD_DEVNOTRESP; target 890 drivers/usb/misc/ftdi-elan.c target->actual = (ed_length >> 0) & 0x01FF; target 891 drivers/usb/misc/ftdi-elan.c target->non_null = (ed_length >> 15) & 0x0001; target 892 drivers/usb/misc/ftdi-elan.c target->repeat_number = (ed_length >> 11) & 0x000F; target 894 drivers/usb/misc/ftdi-elan.c if (target->active) target 895 drivers/usb/misc/ftdi-elan.c ftdi_elan_do_callback(ftdi, target, NULL, 0); target 896 drivers/usb/misc/ftdi-elan.c target->abandoning = 0; target 986 drivers/usb/misc/ftdi-elan.c struct u132_target *target = &ftdi->target[ed_number]; target 1000 drivers/usb/misc/ftdi-elan.c ftdi_elan_do_callback(ftdi, target, 4 + ftdi->response, target 1044 drivers/usb/misc/ftdi-elan.c struct u132_target *target = &ftdi->target[ target 1046 drivers/usb/misc/ftdi-elan.c target->halted = (ftdi->response[0] >> 3) & target 1048 drivers/usb/misc/ftdi-elan.c target->skipped = (ftdi->response[0] >> 2) & target 1050 drivers/usb/misc/ftdi-elan.c target->toggle_bits = (ftdi->response[3] >> 6) target 1052 drivers/usb/misc/ftdi-elan.c target->error_count = (ftdi->response[3] >> 4) target 1054 drivers/usb/misc/ftdi-elan.c target->condition_code = (ftdi->response[ target 1057 drivers/usb/misc/ftdi-elan.c b = have_ed_set_response(ftdi, target, target 1062 drivers/usb/misc/ftdi-elan.c b = have_ed_get_response(ftdi, target, target 1442 drivers/usb/misc/ftdi-elan.c struct u132_target *target = &ftdi->target[ed]; target 1454 drivers/usb/misc/ftdi-elan.c target->callback = callback; target 1455 drivers/usb/misc/ftdi-elan.c target->endp = endp; target 1456 drivers/usb/misc/ftdi-elan.c target->urb = urb; target 1457 drivers/usb/misc/ftdi-elan.c target->active = 1; target 1499 drivers/usb/misc/ftdi-elan.c struct u132_target *target = &ftdi->target[ed]; target 1519 drivers/usb/misc/ftdi-elan.c target->callback = callback; target 1520 drivers/usb/misc/ftdi-elan.c target->endp = endp; target 1521 drivers/usb/misc/ftdi-elan.c target->urb = urb; target 1522 drivers/usb/misc/ftdi-elan.c target->active = 1; target 1564 drivers/usb/misc/ftdi-elan.c struct u132_target *target = &ftdi->target[ed]; target 1576 drivers/usb/misc/ftdi-elan.c target->callback = callback; target 1577 drivers/usb/misc/ftdi-elan.c target->endp = endp; target 1578 drivers/usb/misc/ftdi-elan.c target->urb = urb; target 1579 drivers/usb/misc/ftdi-elan.c target->active = 1; target 1628 drivers/usb/misc/ftdi-elan.c struct u132_target *target = &ftdi->target[ed]; target 1655 drivers/usb/misc/ftdi-elan.c target->callback = callback; target 1656 drivers/usb/misc/ftdi-elan.c target->endp = endp; target 1657 drivers/usb/misc/ftdi-elan.c target->urb = urb; target 1658 drivers/usb/misc/ftdi-elan.c target->active = 1; target 1702 drivers/usb/misc/ftdi-elan.c struct u132_target *target = &ftdi->target[ed]; target 1720 drivers/usb/misc/ftdi-elan.c target->callback = callback; target 1721 drivers/usb/misc/ftdi-elan.c target->endp = endp; target 1722 drivers/usb/misc/ftdi-elan.c target->urb = urb; target 1723 drivers/usb/misc/ftdi-elan.c target->active = 1; target 1758 drivers/usb/misc/ftdi-elan.c struct u132_target *target = &ftdi->target[ed]; target 1760 drivers/usb/misc/ftdi-elan.c if (target->abandoning > 0) { target 1764 drivers/usb/misc/ftdi-elan.c target->abandoning = 1; target 1765 drivers/usb/misc/ftdi-elan.c wait_1:if (target->active == 1) { target 455 drivers/usb/typec/ucsi/ucsi_ccg.c unsigned long target = jiffies + msecs_to_jiffies(1000); target 470 drivers/usb/typec/ucsi/ucsi_ccg.c } while (time_is_after_jiffies(target)); target 472 drivers/usb/typec/ucsi/ucsi_ccg.c if (time_is_before_jiffies(target)) { target 467 drivers/vfio/vfio.c struct vfio_group *target = group; target 471 drivers/vfio/vfio.c if (group == target) { target 220 drivers/vhost/scsi.c u8 *target, *lunp; target 897 drivers/vhost/scsi.c tpg = READ_ONCE(vs_tpg[*vc->target]); target 899 drivers/vhost/scsi.c vq_err(vq, "Target 0x%x does not exist\n", *vc->target); target 954 drivers/vhost/scsi.c vc.target = &v_req_pi.lun[1]; target 959 drivers/vhost/scsi.c vc.target = &v_req.lun[1]; target 1224 drivers/vhost/scsi.c vc.target = &v_req.tmf.lun[1]; target 1232 drivers/vhost/scsi.c vc.target = NULL; target 1496 drivers/vhost/scsi.c u8 target; target 1514 drivers/vhost/scsi.c target = i; target 1515 drivers/vhost/scsi.c tpg = vs->vs_tpg[target]; target 1536 drivers/vhost/scsi.c vs->vs_tpg[target] = NULL; target 429 drivers/video/fbdev/tgafb.c int n, shift, base, min_diff, target; target 471 drivers/video/fbdev/tgafb.c target = (f << shift) / TGA_PLL_BASE_FREQ; target 474 drivers/video/fbdev/tgafb.c r = 7 / target; target 477 drivers/video/fbdev/tgafb.c base = target * r; target 479 drivers/video/fbdev/tgafb.c for (n = base < 7 ? 7 : base; n < base + target && n < 449; n++) { target 490 drivers/video/fbdev/tgafb.c base += target; target 173 drivers/virt/fsl_hypervisor.c if ((param.source == -1) == (param.target == -1)) target 265 drivers/virt/fsl_hypervisor.c sg_list[0].target = param.remote_paddr; target 268 drivers/virt/fsl_hypervisor.c sg_list[0].target = page_to_phys(pages[0]) + lb_offset; target 279 drivers/virt/fsl_hypervisor.c sg_list[i].target = remote_paddr; target 283 drivers/virt/fsl_hypervisor.c sg_list[i].target = page_to_phys(pages[i]); target 291 drivers/virt/fsl_hypervisor.c param.ret = fh_partition_memcpy(param.source, param.target, target 358 drivers/virtio/virtio_balloon.c s64 target; target 368 drivers/virtio/virtio_balloon.c target = num_pages; target 369 drivers/virtio/virtio_balloon.c return target - vb->num_pages; target 558 drivers/xen/balloon.c void balloon_set_new_target(unsigned long target) target 561 drivers/xen/balloon.c balloon_stats.target_pages = target; target 209 drivers/xen/xen-balloon.c static DEVICE_ATTR(target, S_IRUGO | S_IWUSR, target 900 fs/9p/vfs_inode_dotl.c char *target; target 911 fs/9p/vfs_inode_dotl.c retval = p9_client_readlink(fid, &target); target 914 fs/9p/vfs_inode_dotl.c set_delayed_call(done, kfree_link, target); target 915 fs/9p/vfs_inode_dotl.c return target; target 27 fs/btrfs/block-group.c u64 target = 0; target 34 fs/btrfs/block-group.c target = BTRFS_BLOCK_GROUP_DATA | bctl->data.target; target 37 fs/btrfs/block-group.c target = BTRFS_BLOCK_GROUP_SYSTEM | bctl->sys.target; target 40 fs/btrfs/block-group.c target = BTRFS_BLOCK_GROUP_METADATA | bctl->meta.target; target 43 fs/btrfs/block-group.c return target; target 56 fs/btrfs/block-group.c u64 target; target 65 fs/btrfs/block-group.c target = get_restripe_target(fs_info, flags); target 66 fs/btrfs/block-group.c if (target) { target 68 fs/btrfs/block-group.c if ((flags & target) & BTRFS_EXTENDED_PROFILE_MASK) { target 70 fs/btrfs/block-group.c return extended_to_chunk(target); target 187 fs/btrfs/block-rsv.c struct btrfs_block_rsv *target = NULL; target 194 fs/btrfs/block-rsv.c target = global_rsv; target 196 fs/btrfs/block-rsv.c target = delayed_rsv; target 198 fs/btrfs/block-rsv.c if (target && block_rsv->space_info != target->space_info) target 199 fs/btrfs/block-rsv.c target = NULL; target 201 fs/btrfs/block-rsv.c return block_rsv_release_bytes(fs_info, block_rsv, target, num_bytes, target 2219 fs/btrfs/ctree.c u64 target; target 2242 fs/btrfs/ctree.c target = search; target 2263 fs/btrfs/ctree.c if ((search <= target && target - search <= 65536) || target 2264 fs/btrfs/ctree.c (search > target && search - target <= 65536)) { target 2085 fs/btrfs/ctree.h cpu->target = le64_to_cpu(disk->target); target 2105 fs/btrfs/ctree.h disk->target = cpu_to_le64(cpu->target); target 3589 fs/btrfs/volumes.c if (bargs->target == chunk_type) target 3913 fs/btrfs/volumes.c (!alloc_profile_is_valid(bctl_arg->target, 1) || target 3914 fs/btrfs/volumes.c (bctl_arg->target & ~allowed))); target 3963 fs/btrfs/volumes.c btrfs_bg_type_to_raid_name(bargs->target)); target 4133 fs/btrfs/volumes.c btrfs_bg_type_to_raid_name(bctl->data.target)); target 4140 fs/btrfs/volumes.c btrfs_bg_type_to_raid_name(bctl->meta.target)); target 4147 fs/btrfs/volumes.c btrfs_bg_type_to_raid_name(bctl->sys.target)); target 4167 fs/btrfs/volumes.c !(bctl->sys.target & allowed)) || target 4170 fs/btrfs/volumes.c !(bctl->meta.target & allowed))) target 4177 fs/btrfs/volumes.c bctl->meta.target : fs_info->avail_metadata_alloc_bits; target 4179 fs/btrfs/volumes.c bctl->data.target : fs_info->avail_data_alloc_bits; target 3590 fs/ceph/caps.c int target, issued; target 3597 fs/ceph/caps.c target = le32_to_cpu(ph->mds); target 3600 fs/ceph/caps.c target = -1; target 3604 fs/ceph/caps.c inode, ci, mds, mseq, target); target 3611 fs/ceph/caps.c if (target < 0) { target 3633 fs/ceph/caps.c tcap = __get_cap_for_mds(ci, target); target 3638 fs/ceph/caps.c dout(" updating import cap %p mds%d\n", tcap, target); target 3680 fs/ceph/caps.c tsession = ceph_mdsc_open_export_target_session(mdsc, target); target 3682 fs/ceph/caps.c if (mds > target) { target 3695 fs/ceph/caps.c target = -1; target 1184 fs/ceph/mds_client.c __open_export_target_session(struct ceph_mds_client *mdsc, int target) target 1188 fs/ceph/mds_client.c session = __ceph_lookup_mds_session(mdsc, target); target 1190 fs/ceph/mds_client.c session = register_session(mdsc, target); target 1202 fs/ceph/mds_client.c ceph_mdsc_open_export_target_session(struct ceph_mds_client *mdsc, int target) target 1206 fs/ceph/mds_client.c dout("open_export_target_session to mds%d\n", target); target 1209 fs/ceph/mds_client.c session = __open_export_target_session(mdsc, target); target 1249 fs/ceph/mds_client.c struct list_head *target) target 1253 fs/ceph/mds_client.c list_splice_init(&session->s_cap_releases, target); target 519 fs/ceph/mds_client.h ceph_mdsc_open_export_target_session(struct ceph_mds_client *mdsc, int target); target 33 fs/cifs/cifs_unicode.c convert_sfu_char(const __u16 src_char, char *target) target 42 fs/cifs/cifs_unicode.c *target = ':'; target 45 fs/cifs/cifs_unicode.c *target = '*'; target 48 fs/cifs/cifs_unicode.c *target = '?'; target 51 fs/cifs/cifs_unicode.c *target = '|'; target 54 fs/cifs/cifs_unicode.c *target = '>'; target 57 fs/cifs/cifs_unicode.c *target = '<'; target 67 fs/cifs/cifs_unicode.c convert_sfm_char(const __u16 src_char, char *target) target 70 fs/cifs/cifs_unicode.c *target = src_char - 0xF000; target 75 fs/cifs/cifs_unicode.c *target = ':'; target 78 fs/cifs/cifs_unicode.c *target = '"'; target 81 fs/cifs/cifs_unicode.c *target = '*'; target 84 fs/cifs/cifs_unicode.c *target = '?'; target 87 fs/cifs/cifs_unicode.c *target = '|'; target 90 fs/cifs/cifs_unicode.c *target = '>'; target 93 fs/cifs/cifs_unicode.c *target = '<'; target 96 fs/cifs/cifs_unicode.c *target = ' '; target 99 fs/cifs/cifs_unicode.c *target = '.'; target 120 fs/cifs/cifs_unicode.c cifs_mapchar(char *target, const __u16 *from, const struct nls_table *cp, target 128 fs/cifs/cifs_unicode.c if ((maptype == SFM_MAP_UNI_RSVD) && convert_sfm_char(src_char, target)) target 131 fs/cifs/cifs_unicode.c convert_sfu_char(src_char, target)) target 135 fs/cifs/cifs_unicode.c len = cp->uni2char(src_char, target, NLS_MAX_CHARSET_SIZE); target 145 fs/cifs/cifs_unicode.c len = utf16s_to_utf8s(from, 3, UTF16_LITTLE_ENDIAN, target, 6); target 151 fs/cifs/cifs_unicode.c *target = '?'; target 460 fs/cifs/cifs_unicode.c cifsConvertToUTF16(__le16 *target, const char *source, int srclen, target 473 fs/cifs/cifs_unicode.c return cifs_strtoUTF16(target, source, PATH_MAX, cp); target 534 fs/cifs/cifs_unicode.c put_unaligned(dst_char, &target[j]); target 539 fs/cifs/cifs_unicode.c put_unaligned(dst_char, &target[j]); target 542 fs/cifs/cifs_unicode.c put_unaligned(dst_char, &target[j]); target 545 fs/cifs/cifs_unicode.c put_unaligned(dst_char, &target[j]); target 548 fs/cifs/cifs_unicode.c put_unaligned(dst_char, &target[j]); target 551 fs/cifs/cifs_unicode.c put_unaligned(dst_char, &target[j]); target 566 fs/cifs/cifs_unicode.c put_unaligned(dst_char, &target[j]); target 570 fs/cifs/cifs_unicode.c put_unaligned(0, &target[j]); /* Null terminate target unicode string */ target 101 fs/cifs/cifs_unicode.h extern int cifsConvertToUTF16(__le16 *target, const char *source, int maxlen, target 3748 fs/cifs/connect.c static void rfc1002mangle(char *target, char *source, unsigned int length) target 3754 fs/cifs/connect.c target[j] = 'A' + (0x0F & (source[i] >> 4)); target 3755 fs/cifs/connect.c target[j+1] = 'A' + (0x0F & source[i]); target 99 fs/configfs/configfs_internal.h int configfs_create_link(struct configfs_dirent *target, struct dentry *parent, target 354 fs/configfs/dir.c int configfs_create_link(struct configfs_dirent *target, struct dentry *parent, target 362 fs/configfs/dir.c err = configfs_make_dirent(p, dentry, target, mode, CONFIGFS_ITEM_LINK, target 1051 fs/configfs/dir.c struct config_item *target) target 1059 fs/configfs/dir.c if (sd->s_element == target) /* Boo-yah */ target 1067 fs/configfs/dir.c target); target 1081 fs/configfs/dir.c struct config_item *target) target 1088 fs/configfs/dir.c ret = configfs_depend_prep(subsys_dentry, target); target 1096 fs/configfs/dir.c p = target->ci_dentry->d_fsdata; target 1125 fs/configfs/dir.c struct config_item *target) target 1154 fs/configfs/dir.c ret = configfs_do_depend_item(subsys_sd->s_dentry, target); target 1174 fs/configfs/dir.c void configfs_undepend_item(struct config_item *target) target 1184 fs/configfs/dir.c sd = target->ci_dentry->d_fsdata; target 1207 fs/configfs/dir.c struct config_item *target) target 1215 fs/configfs/dir.c if (configfs_is_root(target)) target 1218 fs/configfs/dir.c parent = target->ci_group; target 1224 fs/configfs/dir.c target_subsys = to_configfs_subsystem(to_config_group(target)); target 1256 fs/configfs/dir.c ret = configfs_do_depend_item(subsys_sd->s_dentry, target); target 59 fs/configfs/symlink.c struct config_item *target, char *path) target 65 fs/configfs/symlink.c size = item_path_length(target) + depth * 3 - 1; target 74 fs/configfs/symlink.c fill_item_path(target, path, size); target 120 fs/configfs/symlink.c struct config_item **target, struct super_block *sb) target 127 fs/configfs/symlink.c *target = configfs_get_config_item(path->dentry); target 128 fs/configfs/symlink.c if (!*target) { target 168 fs/crypto/hooks.c int __fscrypt_encrypt_symlink(struct inode *inode, const char *target, target 172 fs/crypto/hooks.c struct qstr iname = QSTR_INIT(target, len); target 204 fs/crypto/hooks.c inode->i_link = kmemdup(target, len + 1, GFP_NOFS); target 2730 fs/dcache.c static void swap_names(struct dentry *dentry, struct dentry *target) target 2732 fs/dcache.c if (unlikely(dname_external(target))) { target 2737 fs/dcache.c swap(target->d_name.name, dentry->d_name.name); target 2743 fs/dcache.c memcpy(target->d_iname, dentry->d_name.name, target 2745 fs/dcache.c dentry->d_name.name = target->d_name.name; target 2746 fs/dcache.c target->d_name.name = target->d_iname; target 2754 fs/dcache.c memcpy(dentry->d_iname, target->d_name.name, target 2755 fs/dcache.c target->d_name.len + 1); target 2756 fs/dcache.c target->d_name.name = dentry->d_name.name; target 2766 fs/dcache.c ((long *) &target->d_iname)[i]); target 2770 fs/dcache.c swap(dentry->d_name.hash_len, target->d_name.hash_len); target 2773 fs/dcache.c static void copy_name(struct dentry *dentry, struct dentry *target) target 2778 fs/dcache.c if (unlikely(dname_external(target))) { target 2779 fs/dcache.c atomic_inc(&external_name(target)->u.count); target 2780 fs/dcache.c dentry->d_name = target->d_name; target 2782 fs/dcache.c memcpy(dentry->d_iname, target->d_name.name, target 2783 fs/dcache.c target->d_name.len + 1); target 2785 fs/dcache.c dentry->d_name.hash_len = target->d_name.hash_len; target 2802 fs/dcache.c static void __d_move(struct dentry *dentry, struct dentry *target, target 2810 fs/dcache.c if (WARN_ON(dentry == target)) target 2813 fs/dcache.c BUG_ON(d_ancestor(target, dentry)); target 2815 fs/dcache.c p = d_ancestor(old_parent, target); target 2818 fs/dcache.c spin_lock(&target->d_parent->d_lock); target 2821 fs/dcache.c spin_lock(&target->d_parent->d_lock); target 2826 fs/dcache.c if (p != target) target 2827 fs/dcache.c spin_lock_nested(&target->d_parent->d_lock, target 2831 fs/dcache.c spin_lock_nested(&target->d_lock, 3); target 2833 fs/dcache.c if (unlikely(d_in_lookup(target))) { target 2834 fs/dcache.c dir = target->d_parent->d_inode; target 2836 fs/dcache.c __d_lookup_done(target); target 2840 fs/dcache.c write_seqcount_begin_nested(&target->d_seq, DENTRY_D_LOCK_NESTED); target 2845 fs/dcache.c if (!d_unhashed(target)) target 2846 fs/dcache.c ___d_drop(target); target 2849 fs/dcache.c dentry->d_parent = target->d_parent; target 2851 fs/dcache.c copy_name(dentry, target); target 2852 fs/dcache.c target->d_hash.pprev = NULL; target 2857 fs/dcache.c target->d_parent = old_parent; target 2858 fs/dcache.c swap_names(dentry, target); target 2859 fs/dcache.c list_move(&target->d_child, &target->d_parent->d_subdirs); target 2860 fs/dcache.c __d_rehash(target); target 2861 fs/dcache.c fsnotify_update_flags(target); target 2868 fs/dcache.c write_seqcount_end(&target->d_seq); target 2878 fs/dcache.c spin_unlock(&target->d_lock); target 2891 fs/dcache.c void d_move(struct dentry *dentry, struct dentry *target) target 2894 fs/dcache.c __d_move(dentry, target, false); target 641 fs/debugfs/inode.c const char *target) target 645 fs/debugfs/inode.c char *link = kstrdup(target, GFP_KERNEL); target 980 fs/ecryptfs/inode.c char *target; target 983 fs/ecryptfs/inode.c target = ecryptfs_readlink_lower(dentry, &targetsiz); target 984 fs/ecryptfs/inode.c if (!IS_ERR(target)) { target 985 fs/ecryptfs/inode.c kfree(target); target 988 fs/ecryptfs/inode.c rc = PTR_ERR(target); target 402 fs/ext2/inode.c int target, i; target 416 fs/ext2/inode.c target = blks + indirect_blks; target 419 fs/ext2/inode.c count = target; target 425 fs/ext2/inode.c target -= count; target 1135 fs/ext4/inline.c void *target = dir_block->b_data; target 1141 fs/ext4/inline.c de = (struct ext4_dir_entry_2 *)target; target 1145 fs/ext4/inline.c header_size = (void *)de - target; target 5179 fs/ext4/super.c tid_t target; target 5199 fs/ext4/super.c target = jbd2_get_latest_transaction(sbi->s_journal); target 5201 fs/ext4/super.c !jbd2_trans_will_send_data_barrier(sbi->s_journal, target)) target 5204 fs/ext4/super.c if (jbd2_journal_start_commit(sbi->s_journal, &target)) { target 5207 fs/ext4/super.c target); target 381 fs/f2fs/data.c struct page *target; target 392 fs/f2fs/data.c target = bvec->bv_page; target 393 fs/f2fs/data.c if (fscrypt_is_bounce_page(target)) target 394 fs/f2fs/data.c target = fscrypt_pagecache_page(target); target 396 fs/f2fs/data.c if (inode && inode == target->mapping->host) target 398 fs/f2fs/data.c if (page && page == target) target 400 fs/f2fs/data.c if (ino && ino == ino_of_node(target)) target 1236 fs/f2fs/namei.c const char *target; target 1245 fs/f2fs/namei.c target = fscrypt_get_symlink(inode, page_address(page), target 1248 fs/f2fs/namei.c return target; target 161 fs/fsopen.c struct path target; target 181 fs/fsopen.c ret = user_path_at(dfd, path, lookup_flags, &target); target 186 fs/fsopen.c if (target.mnt->mnt_root != target.dentry) target 189 fs/fsopen.c fc = fs_context_for_reconfigure(target.dentry, 0, 0); target 201 fs/fsopen.c path_put(&target); target 207 fs/fsopen.c path_put(&target); target 1084 fs/gfs2/bmap.c ap.target = data_blocks + ind_blocks; target 2111 fs/gfs2/bmap.c struct gfs2_alloc_parms ap = { .target = 1, }; target 492 fs/gfs2/file.c ap.target = data_blocks + ind_blocks; target 1038 fs/gfs2/file.c ap.target = data_blocks + ind_blocks; target 60 fs/gfs2/glock.c static void do_xmote(struct gfs2_glock *gl, struct gfs2_holder *gh, unsigned int target); target 543 fs/gfs2/glock.c static void do_xmote(struct gfs2_glock *gl, struct gfs2_holder *gh, unsigned int target) target 553 fs/gfs2/glock.c target != LM_ST_UNLOCKED) target 557 fs/gfs2/glock.c GLOCK_BUG_ON(gl, gl->gl_state == target); target 559 fs/gfs2/glock.c if ((target == LM_ST_UNLOCKED || target == LM_ST_DEFERRED) && target 564 fs/gfs2/glock.c gl->gl_req = target; target 574 fs/gfs2/glock.c glops->go_inval(gl, target == LM_ST_DEFERRED ? 0 : DIO_METADATA); target 580 fs/gfs2/glock.c ret = sdp->sd_lockstruct.ls_ops->lm_lock(gl, target, lck_flags); target 582 fs/gfs2/glock.c target == LM_ST_UNLOCKED && target 584 fs/gfs2/glock.c finish_xmote(gl, target); target 593 fs/gfs2/glock.c finish_xmote(gl, target); target 324 fs/gfs2/incore.h u64 target; target 375 fs/gfs2/inode.c struct gfs2_alloc_parms ap = { .target = *dblocks, .aflags = flags, }; target 517 fs/gfs2/inode.c struct gfs2_alloc_parms ap = { .target = da->nr_blocks, }; target 962 fs/gfs2/inode.c struct gfs2_alloc_parms ap = { .target = da.nr_blocks, }; target 1512 fs/gfs2/inode.c struct gfs2_alloc_parms ap = { .target = da.nr_blocks, }; target 1895 fs/gfs2/inode.c ap.target = gfs2_get_inode_blocks(&ip->i_inode); target 1914 fs/gfs2/inode.c gfs2_quota_change(ip, -(s64)ap.target, ouid, ogid); target 1915 fs/gfs2/inode.c gfs2_quota_change(ip, ap.target, nuid, ngid); target 922 fs/gfs2/quota.c ap.target = reserved; target 1221 fs/gfs2/quota.c if (limit && limit < (value + (s64)ap->target)) { target 1718 fs/gfs2/quota.c ap.target = blocks; target 1577 fs/gfs2/rgrp.c extlen = max_t(u32, atomic_read(&ip->i_sizehint), ap->target); target 2051 fs/gfs2/rgrp.c if (gfs2_assert_warn(sdp, ap->target)) target 2103 fs/gfs2/rgrp.c (loops == 0 && ap->target > rs->rs_rbm.rgd->rd_extfail_pt)) target 2119 fs/gfs2/rgrp.c if (free_blocks >= ap->target || target 727 fs/gfs2/xattr.c struct gfs2_alloc_parms ap = { .target = blks }; target 481 fs/jbd2/journal.c int __jbd2_log_start_commit(journal_t *journal, tid_t target) target 484 fs/jbd2/journal.c if (journal->j_commit_request == target) target 493 fs/jbd2/journal.c journal->j_running_transaction->t_tid == target) { target 499 fs/jbd2/journal.c journal->j_commit_request = target; target 506 fs/jbd2/journal.c } else if (!tid_geq(journal->j_commit_request, target)) target 513 fs/jbd2/journal.c target, journal->j_running_transaction ? target 75 fs/jffs2/dir.c static struct dentry *jffs2_lookup(struct inode *dir_i, struct dentry *target, target 86 fs/jffs2/dir.c if (target->d_name.len > JFFS2_MAX_NAME_LEN) target 92 fs/jffs2/dir.c nhash = full_name_hash(NULL, target->d_name.name, target->d_name.len); target 100 fs/jffs2/dir.c strlen(fd_list->name) == target->d_name.len && target 101 fs/jffs2/dir.c !strncmp(fd_list->name, target->d_name.name, target->d_name.len)) { target 114 fs/jffs2/dir.c return d_splice_alias(inode, target); target 279 fs/jffs2/dir.c static int jffs2_symlink (struct inode *dir_i, struct dentry *dentry, const char *target) target 290 fs/jffs2/dir.c int ret, targetlen = strlen(target); target 334 fs/jffs2/dir.c ri->data_crc = cpu_to_je32(crc32(0, target, targetlen)); target 337 fs/jffs2/dir.c fn = jffs2_write_dnode(c, f, ri, target, targetlen, ALLOC_NORMAL); target 350 fs/jffs2/dir.c f->target = kmemdup(target, targetlen + 1, GFP_KERNEL); target 351 fs/jffs2/dir.c if (!f->target) { target 358 fs/jffs2/dir.c inode->i_link = f->target; target 361 fs/jffs2/dir.c __func__, (char *)f->target); target 296 fs/jffs2/fs.c inode->i_link = f->target; target 46 fs/jffs2/jffs2_fs_i.h unsigned char *target; target 57 fs/jffs2/os-linux.h f->target = NULL; target 1255 fs/jffs2/readinode.c f->target = kmalloc(csize + 1, GFP_KERNEL); target 1256 fs/jffs2/readinode.c if (!f->target) { target 1262 fs/jffs2/readinode.c csize, &retlen, (char *)f->target); target 1267 fs/jffs2/readinode.c kfree(f->target); target 1268 fs/jffs2/readinode.c f->target = NULL; target 1272 fs/jffs2/readinode.c f->target[csize] = '\0'; target 1273 fs/jffs2/readinode.c dbg_readinode("symlink's target '%s' cached\n", f->target); target 52 fs/jffs2/super.c kfree(f->target); target 106 fs/jffs2/xattr.c uint32_t target, before; target 114 fs/jffs2/xattr.c target = c->xdatum_mem_usage * 4 / 5; /* 20% reduction */ target 122 fs/jffs2/xattr.c if (c->xdatum_mem_usage <= target) target 1519 fs/jfs/jfs_logmgr.c struct tblock *target = NULL; target 1534 fs/jfs/jfs_logmgr.c target = list_entry(log->cqueue.prev, struct tblock, cqueue); target 1544 fs/jfs/jfs_logmgr.c log->flush_tblk = target; target 1547 fs/jfs/jfs_logmgr.c log->flush_tblk = target; target 1565 fs/jfs/jfs_logmgr.c if (wait && target && !(target->flag & tblkGC_COMMITTED)) { target 1568 fs/jfs/jfs_logmgr.c add_wait_queue(&target->gcwait, &__wait); target 1573 fs/jfs/jfs_logmgr.c remove_wait_queue(&target->gcwait, &__wait); target 27 fs/kernfs/symlink.c struct kernfs_node *target) target 34 fs/kernfs/symlink.c if (target->iattr) { target 35 fs/kernfs/symlink.c uid = target->iattr->ia_uid; target 36 fs/kernfs/symlink.c gid = target->iattr->ia_gid; target 45 fs/kernfs/symlink.c kn->ns = target->ns; target 46 fs/kernfs/symlink.c kn->symlink.target_kn = target; target 47 fs/kernfs/symlink.c kernfs_get(target); /* ref owned by symlink */ target 58 fs/kernfs/symlink.c struct kernfs_node *target, char *path) target 67 fs/kernfs/symlink.c kn = target->parent; target 83 fs/kernfs/symlink.c kn = target; target 97 fs/kernfs/symlink.c kn = target; target 116 fs/kernfs/symlink.c struct kernfs_node *target = kn->symlink.target_kn; target 120 fs/kernfs/symlink.c error = kernfs_get_target_path(parent, target, path); target 3976 fs/namei.c struct inode *target = dentry->d_inode; target 3985 fs/namei.c inode_lock(target); target 3991 fs/namei.c error = try_break_deleg(target, delegated_inode); target 4003 fs/namei.c inode_unlock(target); target 4007 fs/namei.c fsnotify_link_count(target); target 4393 fs/namei.c struct inode *target = new_dentry->d_inode; target 4398 fs/namei.c if (source == target) target 4405 fs/namei.c if (!target) { target 4432 fs/namei.c error = inode_permission(target, MAY_WRITE); target 4446 fs/namei.c lock_two_nondirectories(source, target); target 4447 fs/namei.c else if (target) target 4448 fs/namei.c inode_lock(target); target 4467 fs/namei.c if (target && !new_is_dir) { target 4468 fs/namei.c error = try_break_deleg(target, delegated_inode); target 4477 fs/namei.c if (!(flags & RENAME_EXCHANGE) && target) { target 4480 fs/namei.c target->i_flags |= S_DEAD; target 4493 fs/namei.c unlock_two_nondirectories(source, target); target 4494 fs/namei.c else if (target) target 4495 fs/namei.c inode_unlock(target); target 4499 fs/namei.c !(flags & RENAME_EXCHANGE) ? target : NULL, old_dentry); target 827 fs/nfs/client.c void nfs_server_copy_userdata(struct nfs_server *target, struct nfs_server *source) target 829 fs/nfs/client.c target->flags = source->flags; target 830 fs/nfs/client.c target->rsize = source->rsize; target 831 fs/nfs/client.c target->wsize = source->wsize; target 832 fs/nfs/client.c target->acregmin = source->acregmin; target 833 fs/nfs/client.c target->acregmax = source->acregmax; target 834 fs/nfs/client.c target->acdirmin = source->acdirmin; target 835 fs/nfs/client.c target->acdirmax = source->acdirmax; target 836 fs/nfs/client.c target->caps = source->caps; target 837 fs/nfs/client.c target->options = source->options; target 838 fs/nfs/client.c target->auth_info = source->auth_info; target 839 fs/nfs/client.c target->port = source->port; target 2082 fs/nfsd/nfs4state.c static void copy_verf(struct nfs4_client *target, nfs4_verifier *source) target 2084 fs/nfsd/nfs4state.c memcpy(target->cl_verifier.data, source->data, target 2085 fs/nfsd/nfs4state.c sizeof(target->cl_verifier.data)); target 2088 fs/nfsd/nfs4state.c static void copy_clid(struct nfs4_client *target, struct nfs4_client *source) target 2090 fs/nfsd/nfs4state.c target->cl_clientid.cl_boot = source->cl_clientid.cl_boot; target 2091 fs/nfsd/nfs4state.c target->cl_clientid.cl_id = source->cl_clientid.cl_id; target 2094 fs/nfsd/nfs4state.c static int copy_cred(struct svc_cred *target, struct svc_cred *source) target 2096 fs/nfsd/nfs4state.c target->cr_principal = kstrdup(source->cr_principal, GFP_KERNEL); target 2097 fs/nfsd/nfs4state.c target->cr_raw_principal = kstrdup(source->cr_raw_principal, target 2099 fs/nfsd/nfs4state.c target->cr_targ_princ = kstrdup(source->cr_targ_princ, GFP_KERNEL); target 2100 fs/nfsd/nfs4state.c if ((source->cr_principal && !target->cr_principal) || target 2101 fs/nfsd/nfs4state.c (source->cr_raw_principal && !target->cr_raw_principal) || target 2102 fs/nfsd/nfs4state.c (source->cr_targ_princ && !target->cr_targ_princ)) target 2105 fs/nfsd/nfs4state.c target->cr_flavor = source->cr_flavor; target 2106 fs/nfsd/nfs4state.c target->cr_uid = source->cr_uid; target 2107 fs/nfsd/nfs4state.c target->cr_gid = source->cr_gid; target 2108 fs/nfsd/nfs4state.c target->cr_group_info = source->cr_group_info; target 2109 fs/nfsd/nfs4state.c get_group_info(target->cr_group_info); target 2110 fs/nfsd/nfs4state.c target->cr_gss_mech = source->cr_gss_mech; target 567 fs/nfsd/nfssvc.c unsigned long long target; target 571 fs/nfsd/nfssvc.c target = (i.totalram - i.totalhigh) << PAGE_SHIFT; target 577 fs/nfsd/nfssvc.c target >>= 12; target 580 fs/nfsd/nfssvc.c while (ret > target && ret >= 8*1024*2) target 381 fs/nilfs2/alloc.c unsigned long target, target 387 fs/nilfs2/alloc.c if (likely(target < bsize)) { target 388 fs/nilfs2/alloc.c pos = target; target 397 fs/nilfs2/alloc.c end = target; target 6114 fs/ocfs2/alloc.c tid_t target; target 6135 fs/ocfs2/alloc.c if (jbd2_journal_start_commit(osb->journal->j_journal, &target)) { target 6136 fs/ocfs2/alloc.c jbd2_log_wait_commit(osb->journal->j_journal, target); target 353 fs/ocfs2/cluster/heartbeat.c static int o2hb_send_nego_msg(int key, int type, u8 target) target 361 fs/ocfs2/cluster/heartbeat.c target, &status); target 442 fs/ocfs2/dcache.c void ocfs2_dentry_move(struct dentry *dentry, struct dentry *target, target 466 fs/ocfs2/dcache.c d_move(dentry, target); target 39 fs/ocfs2/dcache.h void ocfs2_dentry_move(struct dentry *dentry, struct dentry *target, target 349 fs/ocfs2/dlm/dlmdomain.c struct dlm_ctxt *target; target 354 fs/ocfs2/dlm/dlmdomain.c list_for_each_entry(target, &dlm_domains, list) { target 355 fs/ocfs2/dlm/dlmdomain.c if (target == dlm) { target 356 fs/ocfs2/dlm/dlmdomain.c __dlm_get(target); target 357 fs/ocfs2/dlm/dlmdomain.c ret = target; target 112 fs/ocfs2/dlm/dlmmaster.c u8 target); target 2543 fs/ocfs2/dlm/dlmmaster.c struct dlm_lock_resource *res, u8 target) target 2557 fs/ocfs2/dlm/dlmmaster.c BUG_ON(target == O2NM_MAX_NODES); target 2563 fs/ocfs2/dlm/dlmmaster.c target); target 2587 fs/ocfs2/dlm/dlmmaster.c namelen, target, dlm->node_num); target 2608 fs/ocfs2/dlm/dlmmaster.c if (dlm_mark_lockres_migrating(dlm, res, target) < 0) { target 2611 fs/ocfs2/dlm/dlmmaster.c res->lockname.name, target); target 2652 fs/ocfs2/dlm/dlmmaster.c ret = dlm_send_one_lockres(dlm, res, mres, target, target 2657 fs/ocfs2/dlm/dlmmaster.c target, ret); target 2667 fs/ocfs2/dlm/dlmmaster.c dlm_wait_for_node_death(dlm, target, target 2691 fs/ocfs2/dlm/dlmmaster.c res->owner == target) target 2698 fs/ocfs2/dlm/dlmmaster.c if (dlm_is_node_dead(dlm, target)) { target 2702 fs/ocfs2/dlm/dlmmaster.c res->lockname.name, target); target 2721 fs/ocfs2/dlm/dlmmaster.c dlm_set_lockres_owner(dlm, res, target); target 2750 fs/ocfs2/dlm/dlmmaster.c name, target, ret); target 2770 fs/ocfs2/dlm/dlmmaster.c u8 target = O2NM_MAX_NODES; target 2776 fs/ocfs2/dlm/dlmmaster.c target = dlm_pick_migration_target(dlm, res); target 2779 fs/ocfs2/dlm/dlmmaster.c if (target == O2NM_MAX_NODES) target 2785 fs/ocfs2/dlm/dlmmaster.c ret = dlm_migrate_lockres(dlm, res, target); target 2789 fs/ocfs2/dlm/dlmmaster.c target, ret); target 2837 fs/ocfs2/dlm/dlmmaster.c u8 target) target 2843 fs/ocfs2/dlm/dlmmaster.c target); target 2873 fs/ocfs2/dlm/dlmmaster.c dlm_migration_can_proceed(dlm, res, target), target 2878 fs/ocfs2/dlm/dlmmaster.c test_bit(target, dlm->domain_map) ? "no":"yes"); target 2882 fs/ocfs2/dlm/dlmmaster.c test_bit(target, dlm->domain_map) ? "no":"yes"); target 2884 fs/ocfs2/dlm/dlmmaster.c if (!dlm_migration_can_proceed(dlm, res, target)) { target 2892 fs/ocfs2/dlm/dlmmaster.c if (!test_bit(target, dlm->domain_map)) { target 2894 fs/ocfs2/dlm/dlmmaster.c target); target 347 fs/ocfs2/dlm/dlmthread.c struct dlm_lock *lock, *target; target 368 fs/ocfs2/dlm/dlmthread.c target = list_entry(res->converting.next, struct dlm_lock, list); target 369 fs/ocfs2/dlm/dlmthread.c if (target->ml.convert_type == LKM_IVMODE) { target 375 fs/ocfs2/dlm/dlmthread.c if (lock==target) target 378 fs/ocfs2/dlm/dlmthread.c target->ml.convert_type)) { target 386 fs/ocfs2/dlm/dlmthread.c if (lock->ml.highest_blocked < target->ml.convert_type) target 388 fs/ocfs2/dlm/dlmthread.c target->ml.convert_type; target 393 fs/ocfs2/dlm/dlmthread.c if (lock==target) target 396 fs/ocfs2/dlm/dlmthread.c target->ml.convert_type)) { target 402 fs/ocfs2/dlm/dlmthread.c if (lock->ml.highest_blocked < target->ml.convert_type) target 404 fs/ocfs2/dlm/dlmthread.c target->ml.convert_type; target 410 fs/ocfs2/dlm/dlmthread.c spin_lock(&target->spinlock); target 411 fs/ocfs2/dlm/dlmthread.c BUG_ON(target->ml.highest_blocked != LKM_IVMODE); target 416 fs/ocfs2/dlm/dlmthread.c dlm_get_lock_cookie_node(be64_to_cpu(target->ml.cookie)), target 417 fs/ocfs2/dlm/dlmthread.c dlm_get_lock_cookie_seq(be64_to_cpu(target->ml.cookie)), target 418 fs/ocfs2/dlm/dlmthread.c target->ml.type, target 419 fs/ocfs2/dlm/dlmthread.c target->ml.convert_type, target->ml.node); target 421 fs/ocfs2/dlm/dlmthread.c target->ml.type = target->ml.convert_type; target 422 fs/ocfs2/dlm/dlmthread.c target->ml.convert_type = LKM_IVMODE; target 423 fs/ocfs2/dlm/dlmthread.c list_move_tail(&target->list, &res->granted); target 425 fs/ocfs2/dlm/dlmthread.c BUG_ON(!target->lksb); target 426 fs/ocfs2/dlm/dlmthread.c target->lksb->status = DLM_NORMAL; target 428 fs/ocfs2/dlm/dlmthread.c spin_unlock(&target->spinlock); target 431 fs/ocfs2/dlm/dlmthread.c __dlm_queue_ast(dlm, target); target 439 fs/ocfs2/dlm/dlmthread.c target = list_entry(res->blocked.next, struct dlm_lock, list); target 442 fs/ocfs2/dlm/dlmthread.c if (lock==target) target 444 fs/ocfs2/dlm/dlmthread.c if (!dlm_lock_compatible(lock->ml.type, target->ml.type)) { target 450 fs/ocfs2/dlm/dlmthread.c if (lock->ml.highest_blocked < target->ml.type) target 451 fs/ocfs2/dlm/dlmthread.c lock->ml.highest_blocked = target->ml.type; target 456 fs/ocfs2/dlm/dlmthread.c if (lock==target) target 458 fs/ocfs2/dlm/dlmthread.c if (!dlm_lock_compatible(lock->ml.type, target->ml.type)) { target 464 fs/ocfs2/dlm/dlmthread.c if (lock->ml.highest_blocked < target->ml.type) target 465 fs/ocfs2/dlm/dlmthread.c lock->ml.highest_blocked = target->ml.type; target 472 fs/ocfs2/dlm/dlmthread.c spin_lock(&target->spinlock); target 473 fs/ocfs2/dlm/dlmthread.c BUG_ON(target->ml.highest_blocked != LKM_IVMODE); target 478 fs/ocfs2/dlm/dlmthread.c dlm_get_lock_cookie_node(be64_to_cpu(target->ml.cookie)), target 479 fs/ocfs2/dlm/dlmthread.c dlm_get_lock_cookie_seq(be64_to_cpu(target->ml.cookie)), target 480 fs/ocfs2/dlm/dlmthread.c target->ml.type, target->ml.node); target 483 fs/ocfs2/dlm/dlmthread.c list_move_tail(&target->list, &res->granted); target 485 fs/ocfs2/dlm/dlmthread.c BUG_ON(!target->lksb); target 486 fs/ocfs2/dlm/dlmthread.c target->lksb->status = DLM_NORMAL; target 488 fs/ocfs2/dlm/dlmthread.c spin_unlock(&target->spinlock); target 491 fs/ocfs2/dlm/dlmthread.c __dlm_queue_ast(dlm, target); target 241 fs/ocfs2/stack_user.c static int ocfs2_control_cfu(void *target, size_t target_len, target 249 fs/ocfs2/stack_user.c if (copy_from_user(target, buf, target_len)) target 398 fs/ocfs2/super.c tid_t target; target 413 fs/ocfs2/super.c &target)) { target 416 fs/ocfs2/super.c target); target 4198 fs/ocfs2/xattr.c char *target = bucket_block(bucket, blks - 1); target 4218 fs/ocfs2/xattr.c memcpy(target + offset, src + offset, size); target 4227 fs/ocfs2/xattr.c target = bucket_block(bucket, 0); target 4230 fs/ocfs2/xattr.c memcpy(target + offset, (char *)xb_xh + offset, size); target 4240 fs/ocfs2/xattr.c sort(target + offset, count, sizeof(struct ocfs2_xattr_entry), target 4510 fs/ocfs2/xattr.c struct ocfs2_xattr_bucket *target, target 4546 fs/ocfs2/xattr.c if (bucket_blkno(target) >= src_blkno) { target 4549 fs/ocfs2/xattr.c (bucket_blkno(target) - src_blkno); target 4552 fs/ocfs2/xattr.c ocfs2_xattr_bucket_relse(target); target 4563 fs/ocfs2/xattr.c ret = ocfs2_read_xattr_bucket(target, src_blkno); target 5030 fs/ocfs2/xattr.c struct ocfs2_xattr_bucket *target, target 5045 fs/ocfs2/xattr.c first, target, target 5057 fs/ocfs2/xattr.c if (prev_clusters > 1 && bucket_blkno(target) != last_blk) { target 5071 fs/ocfs2/xattr.c if ((bucket_blkno(target) == last_blk) && extend) target 5098 fs/ocfs2/xattr.c struct ocfs2_xattr_bucket *target, target 5158 fs/ocfs2/xattr.c target, target 5271 fs/ocfs2/xattr.c struct ocfs2_xattr_bucket *target, target 5279 fs/ocfs2/xattr.c le32_to_cpu(bucket_xh(target)->xh_entries[0].xe_name_hash); target 5288 fs/ocfs2/xattr.c (unsigned long long)bucket_blkno(target)); target 5320 fs/ocfs2/xattr.c target, target 5335 fs/ocfs2/xattr.c bucket_blkno(target), target 251 fs/orangefs/namei.c strncpy(new_op->upcall.req.sym.target, symname, ORANGEFS_NAME_MAX - 1); target 44 fs/orangefs/upcall.h char target[ORANGEFS_NAME_MAX]; target 208 fs/proc/vmcore.c static int copy_to(void *target, void *src, size_t size, int userbuf) target 211 fs/proc/vmcore.c if (copy_to_user((char __user *) target, src, size)) target 214 fs/proc/vmcore.c memcpy(target, src, size); target 116 fs/squashfs/xattr.c char *target = kmalloc(name_len, GFP_KERNEL); target 118 fs/squashfs/xattr.c if (target == NULL) target 137 fs/squashfs/xattr.c err = squashfs_read_metadata(sb, target, &start, target 146 fs/squashfs/xattr.c strncmp(target, name, name_size) == 0) { target 198 fs/squashfs/xattr.c kfree(target); target 382 fs/sysfs/group.c struct kobject *target, const char *link_name) target 391 fs/sysfs/group.c error = sysfs_create_link_sd(parent, target, link_name); target 428 fs/sysfs/group.c struct kernfs_node *target; target 438 fs/sysfs/group.c target = target_kobj->sd; target 439 fs/sysfs/group.c if (target) target 440 fs/sysfs/group.c kernfs_get(target); target 442 fs/sysfs/group.c if (!target) target 447 fs/sysfs/group.c kernfs_put(target); target 456 fs/sysfs/group.c kernfs_put(target); target 24 fs/sysfs/symlink.c struct kernfs_node *kn, *target = NULL; target 36 fs/sysfs/symlink.c target = target_kobj->sd; target 37 fs/sysfs/symlink.c kernfs_get(target); target 41 fs/sysfs/symlink.c if (!target) target 44 fs/sysfs/symlink.c kn = kernfs_create_link(parent, name, target); target 45 fs/sysfs/symlink.c kernfs_put(target); target 61 fs/sysfs/symlink.c int sysfs_create_link_sd(struct kernfs_node *kn, struct kobject *target, target 64 fs/sysfs/symlink.c return sysfs_do_create_link_sd(kn, target, name, 1); target 67 fs/sysfs/symlink.c static int sysfs_do_create_link(struct kobject *kobj, struct kobject *target, target 80 fs/sysfs/symlink.c return sysfs_do_create_link_sd(parent, target, name, warn); target 89 fs/sysfs/symlink.c int sysfs_create_link(struct kobject *kobj, struct kobject *target, target 92 fs/sysfs/symlink.c return sysfs_do_create_link(kobj, target, name, 1); target 105 fs/sysfs/symlink.c int sysfs_create_link_nowarn(struct kobject *kobj, struct kobject *target, target 108 fs/sysfs/symlink.c return sysfs_do_create_link(kobj, target, name, 0); target 38 fs/sysfs/sysfs.h int sysfs_create_link_sd(struct kernfs_node *kn, struct kobject *target, target 489 fs/ubifs/auth.c struct shash_desc *target) target 502 fs/ubifs/auth.c err = crypto_shash_import(target, state); target 1689 fs/ubifs/ubifs.h struct shash_desc *target); target 1692 fs/ubifs/ubifs.h struct shash_desc *target) target 1695 fs/ubifs/ubifs.h return __ubifs_shash_copy_state(c, src, target); target 203 fs/xfs/xfs_buf.c struct xfs_buftarg *target, target 230 fs/xfs/xfs_buf.c bp->b_target = target; target 231 fs/xfs/xfs_buf.c bp->b_mount = target->bt_mount; target 665 fs/xfs/xfs_buf.c struct xfs_buftarg *target, target 674 fs/xfs/xfs_buf.c error = xfs_buf_find(target, &map, 1, flags, NULL, &bp); target 687 fs/xfs/xfs_buf.c struct xfs_buftarg *target, target 696 fs/xfs/xfs_buf.c error = xfs_buf_find(target, map, nmaps, flags, NULL, &bp); target 718 fs/xfs/xfs_buf.c new_bp = _xfs_buf_alloc(target, map, nmaps, flags); target 728 fs/xfs/xfs_buf.c error = xfs_buf_find(target, map, nmaps, flags, new_bp, &bp); target 741 fs/xfs/xfs_buf.c xfs_warn(target->bt_mount, target 755 fs/xfs/xfs_buf.c XFS_STATS_INC(target->bt_mount, xb_get); target 811 fs/xfs/xfs_buf.c struct xfs_buftarg *target, target 821 fs/xfs/xfs_buf.c bp = xfs_buf_get_map(target, map, nmaps, flags); target 828 fs/xfs/xfs_buf.c XFS_STATS_INC(target->bt_mount, xb_get_read); target 857 fs/xfs/xfs_buf.c struct xfs_buftarg *target, target 862 fs/xfs/xfs_buf.c if (bdi_read_congested(target->bt_bdev->bd_bdi)) target 865 fs/xfs/xfs_buf.c xfs_buf_read_map(target, map, nmaps, target 875 fs/xfs/xfs_buf.c struct xfs_buftarg *target, target 886 fs/xfs/xfs_buf.c bp = xfs_buf_get_uncached(target, numblks, flags); target 910 fs/xfs/xfs_buf.c struct xfs_buftarg *target, target 920 fs/xfs/xfs_buf.c bp = _xfs_buf_alloc(target, &map, 1, flags & XBF_NO_IOACCT); target 938 fs/xfs/xfs_buf.c xfs_warn(target->bt_mount, target 191 fs/xfs/xfs_buf.h struct xfs_buf *xfs_buf_incore(struct xfs_buftarg *target, target 195 fs/xfs/xfs_buf.h struct xfs_buf *xfs_buf_get_map(struct xfs_buftarg *target, target 198 fs/xfs/xfs_buf.h struct xfs_buf *xfs_buf_read_map(struct xfs_buftarg *target, target 202 fs/xfs/xfs_buf.h void xfs_buf_readahead_map(struct xfs_buftarg *target, target 208 fs/xfs/xfs_buf.h struct xfs_buftarg *target, target 213 fs/xfs/xfs_buf.h return xfs_buf_get_map(target, &map, 1, 0); target 218 fs/xfs/xfs_buf.h struct xfs_buftarg *target, target 225 fs/xfs/xfs_buf.h return xfs_buf_read_map(target, &map, 1, flags, ops); target 230 fs/xfs/xfs_buf.h struct xfs_buftarg *target, target 236 fs/xfs/xfs_buf.h return xfs_buf_readahead_map(target, &map, 1, ops); target 239 fs/xfs/xfs_buf.h struct xfs_buf *xfs_buf_get_uncached(struct xfs_buftarg *target, size_t numblks, target 241 fs/xfs/xfs_buf.h int xfs_buf_read_uncached(struct xfs_buftarg *target, xfs_daddr_t daddr, target 489 fs/xfs/xfs_file.c struct xfs_buftarg *target = XFS_IS_REALTIME_INODE(ip) ? target 493 fs/xfs/xfs_file.c if ((iocb->ki_pos | count) & target->bt_logical_sectormask) target 2142 fs/xfs/xfs_ioctl.c xfs_buftarg_t *target = target 2146 fs/xfs/xfs_ioctl.c da.d_mem = da.d_miniosz = target->bt_logical_sectorsize; target 173 fs/xfs/xfs_trans.h struct xfs_buftarg *target, target 180 fs/xfs/xfs_trans.h struct xfs_buftarg *target, target 186 fs/xfs/xfs_trans.h return xfs_trans_get_buf_map(tp, target, &map, 1, flags); target 191 fs/xfs/xfs_trans.h struct xfs_buftarg *target, target 201 fs/xfs/xfs_trans.h struct xfs_buftarg *target, target 209 fs/xfs/xfs_trans.h return xfs_trans_read_buf_map(mp, tp, target, &map, 1, target 370 fs/xfs/xfs_trans_ail.c xfs_lsn_t target; target 394 fs/xfs/xfs_trans_ail.c target = ailp->ail_target; target 395 fs/xfs/xfs_trans_ail.c ailp->ail_target_prev = target; target 411 fs/xfs/xfs_trans_ail.c while ((XFS_LSN_CMP(lip->li_lsn, target) <= 0)) { target 494 fs/xfs/xfs_trans_ail.c if (!count || XFS_LSN_CMP(lsn, target) >= 0) { target 25 fs/xfs/xfs_trans_buf.c struct xfs_buftarg *target, target 40 fs/xfs/xfs_trans_buf.c blip->bli_buf->b_target == target && target 118 fs/xfs/xfs_trans_buf.c struct xfs_buftarg *target, target 127 fs/xfs/xfs_trans_buf.c return xfs_buf_get_map(target, map, nmaps, flags); target 135 fs/xfs/xfs_trans_buf.c bp = xfs_trans_buf_item_match(tp, target, map, nmaps); target 152 fs/xfs/xfs_trans_buf.c bp = xfs_buf_get_map(target, map, nmaps, flags); target 226 fs/xfs/xfs_trans_buf.c struct xfs_buftarg *target, target 247 fs/xfs/xfs_trans_buf.c bp = xfs_trans_buf_item_match(tp, target, map, nmaps); target 301 fs/xfs/xfs_trans_buf.c bp = xfs_buf_read_map(target, map, nmaps, flags, ops); target 490 include/acpi/actypes.h #define ACPI_SET_BIT(target,bit) ((target) |= (bit)) target 491 include/acpi/actypes.h #define ACPI_CLEAR_BIT(target,bit) ((target) &= ~(bit)) target 145 include/drm/drm_atomic_helper.h uint32_t target, target 586 include/drm/drm_crtc.h uint32_t flags, uint32_t target, target 11 include/linux/atalk.h struct atalk_addr target; target 77 include/linux/btree-128.h static inline int btree_merge128(struct btree_head128 *target, target 81 include/linux/btree-128.h return btree_merge(&target->h, &victim->h, &btree_geo128, gfp); target 30 include/linux/btree-type.h static inline int BTREE_FN(merge)(BTREE_TYPE_HEAD *target, target 34 include/linux/btree-type.h return btree_merge(&target->h, &victim->h, BTREE_TYPE_GEO, gfp); target 158 include/linux/btree.h int btree_merge(struct btree_head *target, struct btree_head *victim, target 214 include/linux/configfs.h int (*allow_link)(struct config_item *src, struct config_item *target); target 215 include/linux/configfs.h void (*drop_link)(struct config_item *src, struct config_item *target); target 256 include/linux/configfs.h struct config_item *target); target 257 include/linux/configfs.h void configfs_undepend_item(struct config_item *target); target 267 include/linux/configfs.h struct config_item *target); target 270 include/linux/configfs.h static inline void configfs_undepend_item_unlocked(struct config_item *target) target 272 include/linux/configfs.h configfs_undepend_item(target); target 308 include/linux/cpufreq.h int (*target)(struct cpufreq_policy *policy, target 99 include/linux/devfreq.h int (*target)(struct device *dev, unsigned long *freq, u32 flags); target 44 include/linux/device-mapper.h typedef int (*dm_ctr_fn) (struct dm_target *target, target 266 include/linux/fscrypt.h extern int __fscrypt_encrypt_symlink(struct inode *inode, const char *target, target 539 include/linux/fscrypt.h const char *target, target 729 include/linux/fscrypt.h const char *target, target 737 include/linux/fscrypt.h disk_link->name = (unsigned char *)target; target 760 include/linux/fscrypt.h const char *target, target 765 include/linux/fscrypt.h return __fscrypt_encrypt_symlink(inode, target, len, disk_link); target 106 include/linux/fsnotify.h int isdir, struct inode *target, target 130 include/linux/fsnotify.h if (target) target 131 include/linux/fsnotify.h fsnotify_link_count(target); target 624 include/linux/genhd.h extern int disk_expand_part_tbl(struct gendisk *disk, int target); target 178 include/linux/host1x.h } target; target 124 include/linux/jump_label.h s32 target; target 135 include/linux/jump_label.h return (unsigned long)&entry->target + entry->target; target 154 include/linux/jump_label.h return entry->target; target 359 include/linux/kernfs.h struct kernfs_node *target); target 456 include/linux/kernfs.h struct kernfs_node *target) target 790 include/linux/kvm_host.h int kvm_vcpu_yield_to(struct kvm_vcpu *target); target 1471 include/linux/lsm_hooks.h int (*capget)(struct task_struct *target, kernel_cap_t *effective, target 1669 include/linux/lsm_hooks.h struct task_struct *target, long type, target 60 include/linux/mbus.h int mvebu_mbus_get_io_win_info(phys_addr_t phyaddr, u32 *size, u8 *target, target 72 include/linux/mbus.h u8 *target, u8 *attr) target 89 include/linux/mbus.h int mvebu_mbus_get_dram_win_info(phys_addr_t phyaddr, u8 *target, u8 *attr); target 90 include/linux/mbus.h int mvebu_mbus_add_window_remap_by_id(unsigned int target, target 94 include/linux/mbus.h int mvebu_mbus_add_window_by_id(unsigned int target, unsigned int attribute, target 102 include/linux/mbus.h static inline int mvebu_mbus_get_dram_win_info(phys_addr_t phyaddr, u8 *target, target 292 include/linux/module.h struct module *source, *target; target 76 include/linux/mtd/nand.h unsigned int target; target 466 include/linux/mtd/nand.h pos->target = tmp; target 483 include/linux/mtd/nand.h if (a->target != b->target) target 484 include/linux/mtd/nand.h return a->target < b->target ? -1 : 1; target 517 include/linux/mtd/nand.h (pos->target * nand->memorg.luns_per_target)) * target 557 include/linux/mtd/nand.h pos->target++; target 744 include/linux/mtd/nand.h ((pos->lun + (pos->target * nand->memorg.luns_per_target)) * target 305 include/linux/mtd/spinand.h unsigned int target); target 383 include/linux/mtd/spinand.h unsigned int target); target 459 include/linux/mtd/spinand.h int spinand_select_target(struct spinand_device *spinand, unsigned int target); target 124 include/linux/netfilter/ipset/ip_set.h bool target; target 693 include/linux/netfilter/ipset/ip_set.h { .bytes = (skb)->len, .packets = 1, .target = true,\ target 33 include/linux/netfilter/x_tables.h const struct xt_target *target; target 127 include/linux/netfilter/x_tables.h const struct xt_target *target; target 137 include/linux/netfilter/x_tables.h const struct xt_target *target; target 191 include/linux/netfilter/x_tables.h unsigned int (*target)(struct sk_buff *skb, target 270 include/linux/netfilter/x_tables.h int xt_register_target(struct xt_target *target); target 271 include/linux/netfilter/x_tables.h void xt_unregister_target(struct xt_target *target); target 272 include/linux/netfilter/x_tables.h int xt_register_targets(struct xt_target *target, unsigned int n); target 273 include/linux/netfilter/x_tables.h void xt_unregister_targets(struct xt_target *target, unsigned int n); target 275 include/linux/netfilter/x_tables.h int xt_register_match(struct xt_match *target); target 276 include/linux/netfilter/x_tables.h void xt_unregister_match(struct xt_match *target); target 288 include/linux/netfilter/x_tables.h unsigned int target, unsigned int size); target 322 include/linux/netfilter/x_tables.h int xt_find_revision(u8 af, const char *name, u8 revision, int target, target 479 include/linux/netfilter/x_tables.h compat_uptr_t target; target 523 include/linux/netfilter/x_tables.h int xt_compat_target_offset(const struct xt_target *target); target 21 include/linux/netfilter_arp/arp_tables.h struct xt_standard_target target; target 26 include/linux/netfilter_arp/arp_tables.h struct xt_error_target target; target 38 include/linux/netfilter_arp/arp_tables.h .target = XT_TARGET_INIT(XT_STANDARD_TARGET, \ target 40 include/linux/netfilter_arp/arp_tables.h .target.verdict = -(__verdict) - 1, \ target 46 include/linux/netfilter_arp/arp_tables.h .target = XT_TARGET_INIT(XT_ERROR_TARGET, \ target 48 include/linux/netfilter_arp/arp_tables.h .target.errorname = "ERROR", \ target 40 include/linux/netfilter_bridge/ebtables.h unsigned int (*target)(struct sk_buff *skb, target 42 include/linux/netfilter_bridge/ebtables.h unsigned int hook_num, const struct xt_target *target, target 45 include/linux/netfilter_bridge/ebtables.h const struct xt_target *target, void *targinfo, target 47 include/linux/netfilter_bridge/ebtables.h void (*destroy)(const struct xt_target *target, void *targinfo); target 58 include/linux/netfilter_bridge/ebtables.h unsigned int (*target)(struct sk_buff *skb, target 60 include/linux/netfilter_bridge/ebtables.h unsigned int hook_num, const struct xt_target *target, target 63 include/linux/netfilter_bridge/ebtables.h const struct xt_target *target, void *targinfo, target 65 include/linux/netfilter_bridge/ebtables.h void (*destroy)(const struct xt_target *target, void *targinfo); target 125 include/linux/netfilter_bridge/ebtables.h static inline bool ebt_invalid_target(int target) target 127 include/linux/netfilter_bridge/ebtables.h return (target < -NUM_STANDARD_TARGETS || target >= 0); target 34 include/linux/netfilter_ipv4/ip_tables.h struct xt_standard_target target; target 39 include/linux/netfilter_ipv4/ip_tables.h struct xt_error_target target; target 51 include/linux/netfilter_ipv4/ip_tables.h .target = XT_TARGET_INIT(XT_STANDARD_TARGET, \ target 53 include/linux/netfilter_ipv4/ip_tables.h .target.verdict = -(__verdict) - 1, \ target 59 include/linux/netfilter_ipv4/ip_tables.h .target = XT_TARGET_INIT(XT_ERROR_TARGET, \ target 61 include/linux/netfilter_ipv4/ip_tables.h .target.errorname = "ERROR", \ target 33 include/linux/nfs.h static inline void nfs_copy_fh(struct nfs_fh *target, const struct nfs_fh *source) target 35 include/linux/nfs.h target->size = source->size; target 36 include/linux/nfs.h memcpy(target->data, source->data, source->size); target 557 include/linux/of.h struct device_node **target, u32 *id_out); target 975 include/linux/of.h struct device_node **target, u32 *id_out) target 1437 include/linux/of.h struct device_node *target; target 171 include/linux/perf_event.h struct task_struct *target; target 417 include/linux/ptrace.h extern int task_current_syscall(struct task_struct *target, struct syscall_info *info); target 36 include/linux/regset.h typedef int user_regset_active_fn(struct task_struct *target, target 55 include/linux/regset.h typedef int user_regset_get_fn(struct task_struct *target, target 76 include/linux/regset.h typedef int user_regset_set_fn(struct task_struct *target, target 102 include/linux/regset.h typedef int user_regset_writeback_fn(struct task_struct *target, target 125 include/linux/regset.h typedef unsigned int user_regset_get_size_fn(struct task_struct *target, target 365 include/linux/regset.h static inline int copy_regset_to_user(struct task_struct *target, target 379 include/linux/regset.h return regset->get(target, regset, offset, size, NULL, data); target 391 include/linux/regset.h static inline int copy_regset_from_user(struct task_struct *target, target 405 include/linux/regset.h return regset->set(target, regset, offset, size, NULL, data); target 416 include/linux/regset.h static inline unsigned int regset_size(struct task_struct *target, target 422 include/linux/regset.h return regset->get_size(target, regset); target 137 include/linux/security.h extern int cap_capget(struct task_struct *target, kernel_cap_t *effective, kernel_cap_t *inheritable, kernel_cap_t *permitted); target 261 include/linux/security.h int security_capget(struct task_struct *target, target 421 include/linux/security.h struct task_struct *target, long type, int mode); target 519 include/linux/security.h static inline int security_capget(struct task_struct *target, target 524 include/linux/security.h return cap_capget(target, effective, inheritable, permitted); target 1157 include/linux/security.h struct task_struct *target, target 1818 include/linux/security.h const char *target, target 1841 include/linux/security.h const char *target, target 65 include/linux/swiotlb.h enum dma_sync_target target); target 262 include/linux/sysfs.h int __must_check sysfs_create_link(struct kobject *kobj, struct kobject *target, target 265 include/linux/sysfs.h struct kobject *target, target 269 include/linux/sysfs.h int sysfs_rename_link_ns(struct kobject *kobj, struct kobject *target, target 297 include/linux/sysfs.h struct kobject *target, const char *link_name); target 407 include/linux/sysfs.h struct kobject *target, const char *name) target 413 include/linux/sysfs.h struct kobject *target, target 492 include/linux/sysfs.h const char *group_name, struct kobject *target, target 539 include/linux/sysfs.h static inline int sysfs_rename_link(struct kobject *kobj, struct kobject *target, target 542 include/linux/sysfs.h return sysfs_rename_link_ns(kobj, target, old_name, new_name, NULL); target 1796 include/linux/usb.h int recip, int type, int target, void *data); target 1799 include/linux/usb.h int recip, int target, void *data) target 1801 include/linux/usb.h return usb_get_status(dev, recip, USB_STATUS_TYPE_STANDARD, target, target 103 include/linux/virtio_vsock.h size_t target, target 107 include/linux/virtio_vsock.h size_t target, target 111 include/linux/virtio_vsock.h size_t target, struct vsock_transport_recv_notify_data *data); target 113 include/linux/virtio_vsock.h size_t target, struct vsock_transport_recv_notify_data *data); target 115 include/linux/virtio_vsock.h size_t target, struct vsock_transport_recv_notify_data *data); target 117 include/linux/virtio_vsock.h size_t target, ssize_t copied, bool data_read, target 245 include/net/9p/client.h int p9_client_readlink(struct p9_fid *fid, char **target); target 107 include/net/codel.h codel_time_t target; target 55 include/net/codel_impl.h params->target = MS2TIME(5); target 124 include/net/codel_impl.h if (codel_time_before(vars->ldelay, params->target) || target 31 include/net/fib_rules.h u32 target; target 1064 include/net/ipv6.h int ipv6_find_hdr(const struct sk_buff *skb, unsigned int *offset, int target, target 82 include/net/ndisc.h struct in6_addr target; target 99 include/net/ndisc.h struct in6_addr target; target 471 include/net/ndisc.h void ndisc_send_redirect(struct sk_buff *skb, const struct in6_addr *target); target 29 include/net/nfc/hci.h int (*dep_link_up)(struct nfc_hci_dev *hdev, struct nfc_target *target, target 33 include/net/nfc/hci.h struct nfc_target *target); target 35 include/net/nfc/hci.h struct nfc_target *target); target 37 include/net/nfc/hci.h struct nfc_target *target, struct sk_buff *skb, target 41 include/net/nfc/hci.h struct nfc_target *target); target 54 include/net/nfc/nfc.h int (*dep_link_up)(struct nfc_dev *dev, struct nfc_target *target, target 57 include/net/nfc/nfc.h int (*activate_target)(struct nfc_dev *dev, struct nfc_target *target, target 60 include/net/nfc/nfc.h struct nfc_target *target, u8 mode); target 61 include/net/nfc/nfc.h int (*im_transceive)(struct nfc_dev *dev, struct nfc_target *target, target 65 include/net/nfc/nfc.h int (*check_presence)(struct nfc_dev *dev, struct nfc_target *target); target 51 include/net/phonet/phonet.h const struct sockaddr_pn *target); target 332 include/scsi/scsi_device.h uint target, u64 lun); target 384 include/sound/hda_codec.h unsigned char target; /* target pin control value */ target 13 include/trace/events/cpuhp.h int target, target 17 include/trace/events/cpuhp.h TP_ARGS(cpu, target, idx, fun), target 21 include/trace/events/cpuhp.h __field( int, target ) target 28 include/trace/events/cpuhp.h __entry->target = target; target 34 include/trace/events/cpuhp.h __entry->cpu, __entry->target, __entry->idx, __entry->fun) target 40 include/trace/events/cpuhp.h int target, target 45 include/trace/events/cpuhp.h TP_ARGS(cpu, target, idx, fun, node), target 49 include/trace/events/cpuhp.h __field( int, target ) target 56 include/trace/events/cpuhp.h __entry->target = target; target 62 include/trace/events/cpuhp.h __entry->cpu, __entry->target, __entry->idx, __entry->fun) target 1070 include/trace/events/f2fs.h __field(dev_t, target) target 1080 include/trace/events/f2fs.h __entry->target = bio_dev(bio); target 1089 include/trace/events/f2fs.h show_dev(__entry->target), target 3 include/trace/events/target.h #define TRACE_SYSTEM target target 51 include/trace/events/thermal.h TP_PROTO(struct thermal_cooling_device *cdev, unsigned long target), target 53 include/trace/events/thermal.h TP_ARGS(cdev, target), target 57 include/trace/events/thermal.h __field(unsigned long, target) target 62 include/trace/events/thermal.h __entry->target = target; target 65 include/trace/events/thermal.h TP_printk("type=%s target=%lu", __get_str(type), __entry->target) target 356 include/uapi/drm/tegra_drm.h } target; target 83 include/uapi/drm/virtgpu_drm.h __u32 target; target 262 include/uapi/linux/android/binder.h } target; target 303 include/uapi/linux/btrfs.h __u64 target; target 697 include/uapi/linux/btrfs_tree.h __le64 target; target 144 include/uapi/linux/fsl_hypervisor.h __u32 target; target 47 include/uapi/linux/netfilter/x_tables.h struct xt_target *target; target 59 include/uapi/linux/netfilter/x_tables.h .target.u.user = { \ target 66 include/uapi/linux/netfilter/x_tables.h struct xt_entry_target target; target 71 include/uapi/linux/netfilter/x_tables.h struct xt_entry_target target; target 18 include/uapi/linux/netfilter_arp/arpt_mangle.h int target; target 9 include/uapi/linux/netfilter_bridge/ebt_arpreply.h int target; target 20 include/uapi/linux/netfilter_bridge/ebt_mark_t.h int target; target 11 include/uapi/linux/netfilter_bridge/ebt_nat.h int target; target 7 include/uapi/linux/netfilter_bridge/ebt_redirect.h int target; target 154 include/uapi/linux/netfilter_bridge/ebtables.h struct xt_target *target; target 163 include/uapi/linux/netfilter_bridge/ebtables.h struct ebt_entry_target target; target 134 include/uapi/linux/netfilter_ipv6/ip6_tables.h struct xt_standard_target target; target 139 include/uapi/linux/netfilter_ipv6/ip6_tables.h struct xt_error_target target; target 151 include/uapi/linux/netfilter_ipv6/ip6_tables.h .target = XT_TARGET_INIT(XT_STANDARD_TARGET, \ target 153 include/uapi/linux/netfilter_ipv6/ip6_tables.h .target.verdict = -(__verdict) - 1, \ target 159 include/uapi/linux/netfilter_ipv6/ip6_tables.h .target = XT_TARGET_INIT(XT_ERROR_TARGET, \ target 161 include/uapi/linux/netfilter_ipv6/ip6_tables.h .target.errorname = "ERROR", \ target 152 include/uapi/linux/v4l2-subdev.h __u32 target; target 1198 include/uapi/linux/videodev2.h __u32 target; target 233 include/uapi/linux/virtio_gpu.h __le32 target; target 25 include/xen/balloon.h void balloon_set_new_target(unsigned long target); target 351 kernel/bpf/inode.c const char *target) target 353 kernel/bpf/inode.c char *link = kstrdup(target, GFP_USER | __GFP_NOWARN); target 122 kernel/capability.c struct task_struct *target; target 126 kernel/capability.c target = find_task_by_vpid(pid); target 127 kernel/capability.c if (!target) target 130 kernel/capability.c ret = security_capget(target, pEp, pIp, pPp); target 57 kernel/cpu.c enum cpuhp_state target; target 171 kernel/cpu.c trace_cpuhp_enter(cpu, st->target, state, cb); target 183 kernel/cpu.c trace_cpuhp_multi_enter(cpu, st->target, state, cbm, node); target 195 kernel/cpu.c trace_cpuhp_multi_enter(cpu, st->target, state, cbm, node); target 220 kernel/cpu.c trace_cpuhp_multi_enter(cpu, st->target, state, cbm, node); target 453 kernel/cpu.c cpuhp_set_state(struct cpuhp_cpu_state *st, enum cpuhp_state target) target 460 kernel/cpu.c st->target = target; target 462 kernel/cpu.c st->bringup = st->state < target; target 483 kernel/cpu.c st->target = prev_state; target 490 kernel/cpu.c if (!st->single && st->state == st->target) target 504 kernel/cpu.c static int cpuhp_kick_ap(struct cpuhp_cpu_state *st, enum cpuhp_state target) target 509 kernel/cpu.c prev_state = cpuhp_set_state(st, target); target 541 kernel/cpu.c if (st->target <= CPUHP_AP_ONLINE_IDLE) target 544 kernel/cpu.c return cpuhp_kick_ap(st, st->target); target 573 kernel/cpu.c for (st->state--; st->state > st->target; st->state--) target 592 kernel/cpu.c enum cpuhp_state target) target 597 kernel/cpu.c while (st->state < target) { target 602 kernel/cpu.c st->target = prev_state; target 673 kernel/cpu.c st->should_run = (st->state < st->target); target 674 kernel/cpu.c WARN_ON_ONCE(st->state > st->target); target 678 kernel/cpu.c st->should_run = (st->state > st->target); target 679 kernel/cpu.c WARN_ON_ONCE(st->state < st->target); target 779 kernel/cpu.c trace_cpuhp_enter(cpu, st->target, prev_state, cpuhp_kick_ap_work); target 780 kernel/cpu.c ret = cpuhp_kick_ap(st, st->target); target 847 kernel/cpu.c enum cpuhp_state target = max((int)st->target, CPUHP_AP_OFFLINE); target 863 kernel/cpu.c for (; st->state > target; st->state--) { target 953 kernel/cpu.c for (st->state++; st->state < st->target; st->state++) target 958 kernel/cpu.c enum cpuhp_state target) target 963 kernel/cpu.c for (; st->state > target; st->state--) { target 966 kernel/cpu.c st->target = prev_state; target 977 kernel/cpu.c enum cpuhp_state target) target 992 kernel/cpu.c prev_state = cpuhp_set_state(st, target); target 998 kernel/cpu.c st->target = max((int)target, CPUHP_TEARDOWN_CPU); target 1014 kernel/cpu.c st->target = target; target 1020 kernel/cpu.c ret = cpuhp_down_callbacks(cpu, st, target); target 1037 kernel/cpu.c static int cpu_down_maps_locked(unsigned int cpu, enum cpuhp_state target) target 1041 kernel/cpu.c return _cpu_down(cpu, 0, target); target 1044 kernel/cpu.c static int do_cpu_down(unsigned int cpu, enum cpuhp_state target) target 1049 kernel/cpu.c err = cpu_down_maps_locked(cpu, target); target 1074 kernel/cpu.c enum cpuhp_state target = min((int)st->target, CPUHP_AP_ONLINE); target 1079 kernel/cpu.c while (st->state < target) { target 1113 kernel/cpu.c static int _cpu_up(unsigned int cpu, int tasks_frozen, enum cpuhp_state target) target 1130 kernel/cpu.c if (st->state >= target) target 1144 kernel/cpu.c cpuhp_set_state(st, target); target 1164 kernel/cpu.c target = min((int)target, CPUHP_BRINGUP_CPU); target 1165 kernel/cpu.c ret = cpuhp_up_callbacks(cpu, st, target); target 1172 kernel/cpu.c static int do_cpu_up(unsigned int cpu, enum cpuhp_state target) target 1200 kernel/cpu.c err = _cpu_up(cpu, 0, target); target 2005 kernel/cpu.c int target, ret; target 2007 kernel/cpu.c ret = kstrtoint(buf, 10, &target); target 2012 kernel/cpu.c if (target < CPUHP_OFFLINE || target > CPUHP_ONLINE) target 2015 kernel/cpu.c if (target != CPUHP_OFFLINE && target != CPUHP_ONLINE) target 2024 kernel/cpu.c sp = cpuhp_get_step(target); target 2030 kernel/cpu.c if (st->state < target) target 2031 kernel/cpu.c ret = do_cpu_up(dev->id, target); target 2033 kernel/cpu.c ret = do_cpu_down(dev->id, target); target 2044 kernel/cpu.c return sprintf(buf, "%d\n", st->target); target 2046 kernel/cpu.c static DEVICE_ATTR(target, 0644, show_cpuhp_target, write_cpuhp_target); target 2055 kernel/debug/kdb/kdb_main.c kdb_printf("%s ", use->target->name); target 629 kernel/dma/swiotlb.c enum dma_sync_target target) target 638 kernel/dma/swiotlb.c switch (target) { target 4048 kernel/events/core.c event->hw.target != current) { target 4578 kernel/events/core.c if (event->hw.target) target 4579 kernel/events/core.c put_task_struct(event->hw.target); target 10489 kernel/events/core.c event->hw.target = get_task_struct(task); target 10633 kernel/events/core.c if (event->hw.target) target 10634 kernel/events/core.c put_task_struct(event->hw.target); target 107 kernel/events/hw_breakpoint.c struct task_struct *tsk = bp->hw.target; target 112 kernel/events/hw_breakpoint.c if (iter->hw.target == tsk && target 144 kernel/events/hw_breakpoint.c if (!bp->hw.target) target 201 kernel/events/hw_breakpoint.c if (!bp->hw.target) { target 305 kernel/gcov/fs.c char *target; target 316 kernel/gcov/fs.c target = kasprintf(GFP_KERNEL, "%s/%s.%s", dir, copy, ext); target 318 kernel/gcov/fs.c target = kasprintf(GFP_KERNEL, "%s.%s", copy, ext); target 321 kernel/gcov/fs.c return target; target 369 kernel/gcov/fs.c char *target; target 379 kernel/gcov/fs.c target = get_link_target( target 382 kernel/gcov/fs.c if (!target) target 384 kernel/gcov/fs.c basename = kbasename(target); target 385 kernel/gcov/fs.c if (basename == target) target 388 kernel/gcov/fs.c parent, target); target 389 kernel/gcov/fs.c kfree(target); target 394 kernel/gcov/fs.c kfree(target); target 71 kernel/jump_label.c jea->target = jeb->target - delta; target 75 kernel/jump_label.c jeb->target = tmp.target + delta; target 1551 kernel/locking/lockdep.c print_circular_bug_entry(struct lock_list *target, int depth) target 1556 kernel/locking/lockdep.c print_lock_name(target->class); target 1558 kernel/locking/lockdep.c print_lock_trace(target->trace, 6); target 1567 kernel/locking/lockdep.c struct lock_class *target = hlock_class(tgt); target 1589 kernel/locking/lockdep.c __print_lock_name(target); target 1597 kernel/locking/lockdep.c __print_lock_name(target); target 1603 kernel/locking/lockdep.c __print_lock_name(target); target 1649 kernel/locking/lockdep.c struct lock_list *target, target 1665 kernel/locking/lockdep.c depth = get_lock_depth(target); target 1667 kernel/locking/lockdep.c print_circular_bug_header(target, depth, check_src, check_tgt); target 1669 kernel/locking/lockdep.c parent = get_lock_parent(target); target 1766 kernel/locking/lockdep.c check_path(struct lock_class *target, struct lock_list *src_entry, target 1771 kernel/locking/lockdep.c ret = __bfs_forwards(src_entry, (void *)target, class_equal, target 1788 kernel/locking/lockdep.c check_noncircular(struct held_lock *src, struct held_lock *target, target 1800 kernel/locking/lockdep.c ret = check_path(hlock_class(target), &src_entry, &target_entry); target 1812 kernel/locking/lockdep.c print_circular_bug(&src_entry, target_entry, src, target); target 1827 kernel/locking/lockdep.c check_redundant(struct held_lock *src, struct held_lock *target) target 1838 kernel/locking/lockdep.c ret = check_path(hlock_class(target), &src_entry, &target_entry); target 862 kernel/module.c use->target = b; target 897 kernel/module.c struct module *i = use->target; target 1729 kernel/module.c sysfs_remove_link(use->target->holders_dir, mod->name); target 1742 kernel/module.c ret = sysfs_create_link(use->target->holders_dir, target 1628 kernel/sched/deadline.c int target = find_later_rq(p); target 1630 kernel/sched/deadline.c if (target != -1 && target 1632 kernel/sched/deadline.c cpu_rq(target)->dl.earliest_dl.curr) || target 1633 kernel/sched/deadline.c (cpu_rq(target)->dl.dl_nr_running == 0))) target 1634 kernel/sched/deadline.c cpu = target; target 5549 kernel/sched/fair.c int target = nr_cpumask_bits; target 5552 kernel/sched/fair.c target = wake_affine_idle(this_cpu, prev_cpu, sync); target 5554 kernel/sched/fair.c if (sched_feat(WA_WEIGHT) && target == nr_cpumask_bits) target 5555 kernel/sched/fair.c target = wake_affine_weight(sd, p, this_cpu, prev_cpu, sync); target 5558 kernel/sched/fair.c if (target == nr_cpumask_bits) target 5563 kernel/sched/fair.c return target; target 5883 kernel/sched/fair.c static int select_idle_core(struct task_struct *p, struct sched_domain *sd, int target) target 5891 kernel/sched/fair.c if (!test_idle_cores(target, false)) target 5896 kernel/sched/fair.c for_each_cpu_wrap(core, cpus, target) { target 5912 kernel/sched/fair.c set_idle_cores(target, 0); target 5920 kernel/sched/fair.c static int select_idle_smt(struct task_struct *p, int target) target 5927 kernel/sched/fair.c for_each_cpu(cpu, cpu_smt_mask(target)) { target 5941 kernel/sched/fair.c static inline int select_idle_core(struct task_struct *p, struct sched_domain *sd, int target) target 5946 kernel/sched/fair.c static inline int select_idle_smt(struct task_struct *p, int target) target 5958 kernel/sched/fair.c static int select_idle_cpu(struct task_struct *p, struct sched_domain *sd, int target) target 5994 kernel/sched/fair.c for_each_cpu_wrap(cpu, cpus, target) { target 6014 kernel/sched/fair.c static int select_idle_sibling(struct task_struct *p, int prev, int target) target 6019 kernel/sched/fair.c if (available_idle_cpu(target) || sched_idle_cpu(target)) target 6020 kernel/sched/fair.c return target; target 6025 kernel/sched/fair.c if (prev != target && cpus_share_cache(prev, target) && target 6032 kernel/sched/fair.c recent_used_cpu != target && target 6033 kernel/sched/fair.c cpus_share_cache(recent_used_cpu, target) && target 6044 kernel/sched/fair.c sd = rcu_dereference(per_cpu(sd_llc, target)); target 6046 kernel/sched/fair.c return target; target 6048 kernel/sched/fair.c i = select_idle_core(p, sd, target); target 6052 kernel/sched/fair.c i = select_idle_cpu(p, sd, target); target 6056 kernel/sched/fair.c i = select_idle_smt(p, target); target 6060 kernel/sched/fair.c return target; target 1429 kernel/sched/rt.c int target = find_lowest_rq(p); target 1435 kernel/sched/rt.c if (target != -1 && target 1436 kernel/sched/rt.c p->prio < cpu_rq(target)->rt.highest_prio.curr) target 1437 kernel/sched/rt.c cpu = target; target 1470 kernel/signal.c struct task_struct *target) target 1472 kernel/signal.c const struct cred *pcred = __task_cred(target); target 18 kernel/test_kprobes.c static u32 (*target)(u32 value); target 66 kernel/test_kprobes.c ret = target(rand1); target 125 kernel/test_kprobes.c ret = target(rand1); target 206 kernel/test_kprobes.c ret = target(rand1); target 254 kernel/test_kprobes.c ret = target(rand1); target 275 kernel/test_kprobes.c target = kprobe_target; target 100 kernel/trace/trace_events_filter.c int target; target 121 kernel/trace/trace_events_filter.c t = prog[N].target; target 122 kernel/trace/trace_events_filter.c s = prog[t].target; target 124 kernel/trace/trace_events_filter.c prog[t].target = N; target 125 kernel/trace/trace_events_filter.c prog[N].target = s; target 474 kernel/trace/trace_events_filter.c prog[N].target = N-1; target 554 kernel/trace/trace_events_filter.c prog[N].target = 1; /* TRUE */ target 556 kernel/trace/trace_events_filter.c prog[N+1].target = 0; /* FALSE */ target 557 kernel/trace/trace_events_filter.c prog[N-1].target = N; target 562 kernel/trace/trace_events_filter.c int target = prog[i].target; target 563 kernel/trace/trace_events_filter.c if (prog[i].when_to_branch == prog[target].when_to_branch) target 564 kernel/trace/trace_events_filter.c prog[i].target = prog[target].target; target 572 kernel/trace/trace_events_filter.c if (WARN_ON(prog[i].target <= i)) { target 914 kernel/trace/trace_events_filter.c i = prog[i].target; target 916 kernel/trace/trace_events_filter.c return prog[i].target; target 2009 kernel/trace/trace_events_filter.c int target; target 2016 kernel/trace/trace_events_filter.c target = prog[i].target + 1; target 2018 kernel/trace/trace_events_filter.c if (prog[target].pred) target 2022 kernel/trace/trace_events_filter.c return prog[i].when_to_branch == prog[target].target; target 1750 kernel/trace/trace_kprobe.c int (*target)(int, int, int, int, int, int); target 1762 kernel/trace/trace_kprobe.c target = kprobe_trace_selftest_target; target 1813 kernel/trace/trace_kprobe.c ret = target(1, 2, 3, 4, 5, 6); target 1194 kernel/trace/trace_uprobe.c if (event->hw.target->mm == mm) target 1205 kernel/trace/trace_uprobe.c return __uprobe_perf_filter(filter, event->hw.target->mm); target 1214 kernel/trace/trace_uprobe.c if (event->hw.target) { target 1217 kernel/trace/trace_uprobe.c (event->hw.target->flags & PF_EXITING) || target 1235 kernel/trace/trace_uprobe.c if (event->hw.target) { target 2655 kernel/workqueue.c struct work_struct *target, struct worker *worker) target 2669 kernel/workqueue.c init_completion_map(&barr->done, &target->lockdep_map); target 2680 kernel/workqueue.c unsigned long *bits = work_data_bits(target); target 2682 kernel/workqueue.c head = target->entry.next; target 639 lib/btree.c int btree_merge(struct btree_head *target, struct btree_head *victim, target 647 lib/btree.c BUG_ON(target == victim); target 649 lib/btree.c if (!(target->node)) { target 651 lib/btree.c target->node = victim->node; target 652 lib/btree.c target->height = victim->height; target 664 lib/btree.c err = btree_insert(target, geo, key, val, gfp); target 80 lib/lz4/lz4hc_compress.c U32 const target = (U32)(ip - base); target 83 lib/lz4/lz4hc_compress.c while (idx < target) { target 96 lib/lz4/lz4hc_compress.c hc4->nextToUpdate = target; target 8 lib/syscall.c static int collect_syscall(struct task_struct *target, struct syscall_info *info) target 12 lib/syscall.c if (!try_get_task_stack(target)) { target 19 lib/syscall.c regs = task_pt_regs(target); target 21 lib/syscall.c put_task_stack(target); target 28 lib/syscall.c info->data.nr = syscall_get_nr(target, regs); target 30 lib/syscall.c syscall_get_arguments(target, regs, target 33 lib/syscall.c put_task_stack(target); target 62 lib/syscall.c int task_current_syscall(struct task_struct *target, struct syscall_info *info) target 67 lib/syscall.c if (target == current) target 68 lib/syscall.c return collect_syscall(target, info); target 70 lib/syscall.c state = target->state; target 74 lib/syscall.c ncsw = wait_task_inactive(target, state); target 76 lib/syscall.c unlikely(collect_syscall(target, info)) || target 77 lib/syscall.c unlikely(wait_task_inactive(target, state) != ncsw)) target 1727 lib/zstd/compress.c const U32 target = (U32)(ip - base); target 1730 lib/zstd/compress.c while (idx < target) target 1758 lib/zstd/compress.c const U32 target = (U32)(ip - base); target 1761 lib/zstd/compress.c while (idx < target) target 1803 lib/zstd/compress.c const U32 target = (U32)(ip - base); target 1806 lib/zstd/compress.c while (idx < target) { /* catch up */ target 1813 lib/zstd/compress.c zc->nextToUpdate = target; target 221 lib/zstd/zstd_opt.h const U32 target = zc->nextToUpdate3 = (U32)(ip - base); target 224 lib/zstd/zstd_opt.h while (idx < target) { target 878 mm/memcontrol.c enum mem_cgroup_events_target target) target 883 mm/memcontrol.c next = __this_cpu_read(memcg->vmstats_percpu->targets[target]); target 886 mm/memcontrol.c switch (target) { target 899 mm/memcontrol.c __this_cpu_write(memcg->vmstats_percpu->targets[target], next); target 5595 mm/memcontrol.c unsigned long addr, pte_t ptent, union mc_target *target) target 5620 mm/memcontrol.c if (target) target 5621 mm/memcontrol.c target->page = page; target 5623 mm/memcontrol.c if (!ret || !target) target 5633 mm/memcontrol.c if (target) target 5634 mm/memcontrol.c target->ent = ent; target 5646 mm/memcontrol.c unsigned long addr, pmd_t pmd, union mc_target *target) target 5662 mm/memcontrol.c if (target) { target 5664 mm/memcontrol.c target->page = page; target 5671 mm/memcontrol.c unsigned long addr, pmd_t pmd, union mc_target *target) target 5885 mm/memcontrol.c union mc_target target; target 5894 mm/memcontrol.c target_type = get_mctgt_type_thp(vma, addr, *pmd, &target); target 5896 mm/memcontrol.c page = target.page; target 5907 mm/memcontrol.c page = target.page; target 5931 mm/memcontrol.c switch (get_mctgt_type(vma, addr, ptent, &target)) { target 5936 mm/memcontrol.c page = target.page; target 5959 mm/memcontrol.c ent = target.ent; target 1891 mm/mempolicy.c unsigned target; target 1897 mm/mempolicy.c target = (unsigned int)n % nnodes; target 1899 mm/mempolicy.c for (i = 0; i < target; i++) target 2263 net/9p/client.c int p9_client_readlink(struct p9_fid *fid, char **target) target 2277 net/9p/client.c err = p9pdu_readf(&req->rc, clnt->proto_version, "s", target); target 2282 net/9p/client.c p9_debug(P9_DEBUG_9P, "<<< RREADLINK target %s\n", *target); target 138 net/appletalk/atalk_proc.c ntohs(rt->target.s_net), rt->target.s_node, target 423 net/appletalk/ddp.c static struct atalk_route *atrtr_find(struct atalk_addr *target) target 438 net/appletalk/ddp.c if (r->target.s_net == target->s_net) { target 444 net/appletalk/ddp.c if (r->target.s_node == target->s_node) target 519 net/appletalk/ddp.c if (ta->sat_addr.s_net == rt->target.s_net) { target 522 net/appletalk/ddp.c if (ta->sat_addr.s_node == rt->target.s_node) target 564 net/appletalk/ddp.c rt->target = ta->sat_addr; target 586 net/appletalk/ddp.c if (tmp->target.s_net == addr->s_net && target 588 net/appletalk/ddp.c tmp->target.s_node == addr->s_node)) { target 334 net/bluetooth/af_bluetooth.c size_t target, copied = 0; target 344 net/bluetooth/af_bluetooth.c target = sock_rcvlowat(sk, flags & MSG_WAITALL, size); target 353 net/bluetooth/af_bluetooth.c if (copied >= target) target 320 net/bridge/br_arp_nd_proxy.c na->target = ns->target; target 431 net/bridge/br_arp_nd_proxy.c if (br_is_local_ip6(vlandev, &msg->target)) { target 439 net/bridge/br_arp_nd_proxy.c n = neigh_lookup(ipv6_stub->nd_tbl, &msg->target, vlandev); target 59 net/bridge/netfilter/ebt_arpreply.c return info->target; target 67 net/bridge/netfilter/ebt_arpreply.c if (BASE_CHAIN && info->target == EBT_RETURN) target 72 net/bridge/netfilter/ebt_arpreply.c if (ebt_invalid_target(info->target)) target 84 net/bridge/netfilter/ebt_arpreply.c .target = ebt_arpreply_tg, target 50 net/bridge/netfilter/ebt_dnat.c return info->target; target 58 net/bridge/netfilter/ebt_dnat.c return info->target; target 66 net/bridge/netfilter/ebt_dnat.c if (BASE_CHAIN && info->target == EBT_RETURN) target 76 net/bridge/netfilter/ebt_dnat.c if (ebt_invalid_target(info->target)) target 87 net/bridge/netfilter/ebt_dnat.c .target = ebt_dnat_tg, target 207 net/bridge/netfilter/ebt_log.c .target = ebt_log_tg, target 26 net/bridge/netfilter/ebt_mark.c int action = info->target & -16; target 37 net/bridge/netfilter/ebt_mark.c return info->target | ~EBT_VERDICT_BITS; target 45 net/bridge/netfilter/ebt_mark.c tmp = info->target | ~EBT_VERDICT_BITS; target 50 net/bridge/netfilter/ebt_mark.c tmp = info->target & ~EBT_VERDICT_BITS; target 59 net/bridge/netfilter/ebt_mark.c compat_uint_t target; target 68 net/bridge/netfilter/ebt_mark.c kern->target = user->target; target 77 net/bridge/netfilter/ebt_mark.c put_user(kern->target, &user->target)) target 87 net/bridge/netfilter/ebt_mark.c .target = ebt_mark_tg, target 55 net/bridge/netfilter/ebt_nflog.c .target = ebt_nflog_tg, target 34 net/bridge/netfilter/ebt_redirect.c return info->target; target 42 net/bridge/netfilter/ebt_redirect.c if (BASE_CHAIN && info->target == EBT_RETURN) target 51 net/bridge/netfilter/ebt_redirect.c if (ebt_invalid_target(info->target)) target 62 net/bridge/netfilter/ebt_redirect.c .target = ebt_redirect_tg, target 29 net/bridge/netfilter/ebt_snat.c if (!(info->target & NAT_ARP_BIT) && target 43 net/bridge/netfilter/ebt_snat.c return info->target | ~EBT_VERDICT_BITS; target 51 net/bridge/netfilter/ebt_snat.c tmp = info->target | ~EBT_VERDICT_BITS; target 57 net/bridge/netfilter/ebt_snat.c tmp = info->target | EBT_VERDICT_BITS; target 69 net/bridge/netfilter/ebt_snat.c .target = ebt_snat_tg, target 83 net/bridge/netfilter/ebtables.c par->target = w->u.watcher; target 85 net/bridge/netfilter/ebtables.c w->u.watcher->target(skb, par); target 233 net/bridge/netfilter/ebtables.c if (!t->u.target->target) target 236 net/bridge/netfilter/ebtables.c acpar.target = t->u.target; target 238 net/bridge/netfilter/ebtables.c verdict = t->u.target->target(skb, &acpar); target 414 net/bridge/netfilter/ebtables.c par->target = watcher; target 607 net/bridge/netfilter/ebtables.c par.target = w->u.watcher; target 610 net/bridge/netfilter/ebtables.c if (par.target->destroy != NULL) target 611 net/bridge/netfilter/ebtables.c par.target->destroy(&par); target 612 net/bridge/netfilter/ebtables.c module_put(par.target->me); target 632 net/bridge/netfilter/ebtables.c par.target = t->u.target; target 635 net/bridge/netfilter/ebtables.c if (par.target->destroy != NULL) target 636 net/bridge/netfilter/ebtables.c par.target->destroy(&par); target 637 net/bridge/netfilter/ebtables.c module_put(par.target->me); target 648 net/bridge/netfilter/ebtables.c struct xt_target *target; target 710 net/bridge/netfilter/ebtables.c target = xt_request_find_target(NFPROTO_BRIDGE, t->u.name, 0); target 711 net/bridge/netfilter/ebtables.c if (IS_ERR(target)) { target 712 net/bridge/netfilter/ebtables.c ret = PTR_ERR(target); target 717 net/bridge/netfilter/ebtables.c if (target->family != NFPROTO_BRIDGE) { target 718 net/bridge/netfilter/ebtables.c module_put(target->me); target 723 net/bridge/netfilter/ebtables.c t->u.target = target; target 724 net/bridge/netfilter/ebtables.c if (t->u.target == &ebt_standard_target) { target 735 net/bridge/netfilter/ebtables.c module_put(t->u.target->me); target 740 net/bridge/netfilter/ebtables.c tgpar.target = target; target 745 net/bridge/netfilter/ebtables.c module_put(target->me); target 1374 net/bridge/netfilter/ebtables.c ret = ebt_obj_to_user(hlp, t->u.target->name, t->data, sizeof(*t), target 1375 net/bridge/netfilter/ebtables.c t->u.target->usersize, t->target_size, target 1376 net/bridge/netfilter/ebtables.c t->u.target->revision); target 1624 net/bridge/netfilter/ebtables.c const struct xt_target *target = t->u.target; target 1626 net/bridge/netfilter/ebtables.c int off = xt_compat_target_offset(target); target 1632 net/bridge/netfilter/ebtables.c if (copy_to_user(cm->u.name, target->name, strlen(target->name) + 1) || target 1633 net/bridge/netfilter/ebtables.c put_user(target->revision, &cm->u.revision) || target 1637 net/bridge/netfilter/ebtables.c if (target->compat_to_user) { target 1638 net/bridge/netfilter/ebtables.c if (target->compat_to_user(cm->data, t->data)) target 1641 net/bridge/netfilter/ebtables.c if (xt_data_to_user(cm->data, t->data, target->usersize, tsize, target 1750 net/bridge/netfilter/ebtables.c off += xt_compat_target_offset(t->u.target); target 352 net/caif/caif_socket.c int target; target 369 net/caif/caif_socket.c target = sock_rcvlowat(sk, flags&MSG_WAITALL, size); target 385 net/caif/caif_socket.c if (copied >= target) target 289 net/core/fib_rules.c struct fib_rule *target; target 291 net/core/fib_rules.c target = rcu_dereference(rule->ctarget); target 292 net/core/fib_rules.c if (target == NULL) { target 295 net/core/fib_rules.c rule = target; target 591 net/core/fib_rules.c nlrule->target = nla_get_u32(tb[FRA_GOTO]); target 593 net/core/fib_rules.c if (nlrule->target <= nlrule->pref) { target 773 net/core/fib_rules.c if (r->pref == rule->target) { target 800 net/core/fib_rules.c r->target == rule->pref && target 1007 net/core/fib_rules.c (rule->target && target 1008 net/core/fib_rules.c nla_put_u32(skb, FRA_GOTO, rule->target)) || target 540 net/core/filter.c int new_flen = 0, pass = 0, target, i, stack_off; target 671 net/core/filter.c if (target >= len || target < 0) \ target 673 net/core/filter.c off = addrs ? addrs[target] - addrs[i] - 1 : 0; \ target 683 net/core/filter.c target = i + fp->k + 1; target 716 net/core/filter.c target = i + fp->jt + 1; target 737 net/core/filter.c target = i + fp->jf + 1; target 743 net/core/filter.c target = i + fp->jt + 1; target 749 net/core/filter.c target = i + fp->jf + 1; target 876 net/core/net_namespace.c struct net *peer, *target = net; target 909 net/core/net_namespace.c target = rtnl_get_net_ns_capable(NETLINK_CB(skb).sk, id); target 910 net/core/net_namespace.c if (IS_ERR(target)) { target 914 net/core/net_namespace.c err = PTR_ERR(target); target 927 net/core/net_namespace.c fillargs.nsid = peernet2id(target, peer); target 939 net/core/net_namespace.c put_net(target); target 1640 net/decnet/af_decnet.c static int dn_data_ready(struct sock *sk, struct sk_buff_head *q, int flags, int target) target 1662 net/decnet/af_decnet.c if (len >= target) target 1676 net/decnet/af_decnet.c size_t target = size > 1 ? 1 : 0; target 1709 net/decnet/af_decnet.c target = size; target 1739 net/decnet/af_decnet.c if (dn_data_ready(sk, queue, flags, target)) target 1749 net/decnet/af_decnet.c sk_wait_event(sk, &timeo, dn_data_ready(sk, queue, flags, target), &wait); target 1795 net/decnet/af_decnet.c if (copied >= target) target 335 net/ipv4/arp.c __be32 target = *(__be32 *)neigh->primary_key; target 360 net/ipv4/arp.c if (inet_addr_onlink(in_dev, target, saddr)) target 371 net/ipv4/arp.c saddr = inet_select_addr(dev, target, RT_SCOPE_LINK); target 389 net/ipv4/arp.c arp_send_dst(ARPOP_REQUEST, ETH_P_ARP, target, dev, saddr, target 235 net/ipv4/netfilter/arp_tables.c if (!t->u.kernel.target->target) { target 267 net/ipv4/netfilter/arp_tables.c acpar.target = t->u.kernel.target; target 269 net/ipv4/netfilter/arp_tables.c verdict = t->u.kernel.target->target(skb, &acpar); target 333 net/ipv4/netfilter/arp_tables.c (strcmp(t->target.u.user.name, target 364 net/ipv4/netfilter/arp_tables.c if (strcmp(t->target.u.user.name, target 394 net/ipv4/netfilter/arp_tables.c .target = t->u.kernel.target, target 409 net/ipv4/netfilter/arp_tables.c struct xt_target *target; target 416 net/ipv4/netfilter/arp_tables.c target = xt_request_find_target(NFPROTO_ARP, t->u.user.name, target 418 net/ipv4/netfilter/arp_tables.c if (IS_ERR(target)) { target 419 net/ipv4/netfilter/arp_tables.c ret = PTR_ERR(target); target 422 net/ipv4/netfilter/arp_tables.c t->u.kernel.target = target; target 429 net/ipv4/netfilter/arp_tables.c module_put(t->u.kernel.target->me); target 506 net/ipv4/netfilter/arp_tables.c par.target = t->u.kernel.target; target 509 net/ipv4/netfilter/arp_tables.c if (par.target->destroy != NULL) target 510 net/ipv4/netfilter/arp_tables.c par.target->destroy(&par); target 511 net/ipv4/netfilter/arp_tables.c module_put(par.target->me); target 747 net/ipv4/netfilter/arp_tables.c off += xt_compat_target_offset(t->u.kernel.target); target 1068 net/ipv4/netfilter/arp_tables.c module_put(t->u.kernel.target->me); target 1079 net/ipv4/netfilter/arp_tables.c struct xt_target *target; target 1104 net/ipv4/netfilter/arp_tables.c target = xt_request_find_target(NFPROTO_ARP, t->u.user.name, target 1106 net/ipv4/netfilter/arp_tables.c if (IS_ERR(target)) { target 1107 net/ipv4/netfilter/arp_tables.c ret = PTR_ERR(target); target 1110 net/ipv4/netfilter/arp_tables.c t->u.kernel.target = target; target 1112 net/ipv4/netfilter/arp_tables.c off += xt_compat_target_offset(target); target 1121 net/ipv4/netfilter/arp_tables.c module_put(t->u.kernel.target->me); target 1602 net/ipv4/netfilter/arp_tables.c .target = arpt_error, target 55 net/ipv4/netfilter/arpt_mangle.c return mangle->target; target 66 net/ipv4/netfilter/arpt_mangle.c if (mangle->target != NF_DROP && mangle->target != NF_ACCEPT && target 67 net/ipv4/netfilter/arpt_mangle.c mangle->target != XT_CONTINUE) target 75 net/ipv4/netfilter/arpt_mangle.c .target = target, target 164 net/ipv4/netfilter/ip_tables.c if (strcmp(t->target.u.kernel.target->name, XT_ERROR_TARGET) == 0) { target 166 net/ipv4/netfilter/ip_tables.c *chainname = t->target.data; target 172 net/ipv4/netfilter/ip_tables.c strcmp(t->target.u.kernel.target->name, target 302 net/ipv4/netfilter/ip_tables.c WARN_ON(!t->u.kernel.target); target 311 net/ipv4/netfilter/ip_tables.c if (!t->u.kernel.target->target) { target 343 net/ipv4/netfilter/ip_tables.c acpar.target = t->u.kernel.target; target 346 net/ipv4/netfilter/ip_tables.c verdict = t->u.kernel.target->target(skb, &acpar); target 398 net/ipv4/netfilter/ip_tables.c (strcmp(t->target.u.user.name, target 428 net/ipv4/netfilter/ip_tables.c if (strcmp(t->target.u.user.name, target 505 net/ipv4/netfilter/ip_tables.c .target = t->u.kernel.target, target 521 net/ipv4/netfilter/ip_tables.c struct xt_target *target; target 545 net/ipv4/netfilter/ip_tables.c target = xt_request_find_target(NFPROTO_IPV4, t->u.user.name, target 547 net/ipv4/netfilter/ip_tables.c if (IS_ERR(target)) { target 548 net/ipv4/netfilter/ip_tables.c ret = PTR_ERR(target); target 551 net/ipv4/netfilter/ip_tables.c t->u.kernel.target = target; target 559 net/ipv4/netfilter/ip_tables.c module_put(t->u.kernel.target->me); target 649 net/ipv4/netfilter/ip_tables.c par.target = t->u.kernel.target; target 652 net/ipv4/netfilter/ip_tables.c if (par.target->destroy != NULL) target 653 net/ipv4/netfilter/ip_tables.c par.target->destroy(&par); target 654 net/ipv4/netfilter/ip_tables.c module_put(par.target->me); target 904 net/ipv4/netfilter/ip_tables.c off += xt_compat_target_offset(t->u.kernel.target); target 1282 net/ipv4/netfilter/ip_tables.c module_put(t->u.kernel.target->me); target 1294 net/ipv4/netfilter/ip_tables.c struct xt_target *target; target 1327 net/ipv4/netfilter/ip_tables.c target = xt_request_find_target(NFPROTO_IPV4, t->u.user.name, target 1329 net/ipv4/netfilter/ip_tables.c if (IS_ERR(target)) { target 1330 net/ipv4/netfilter/ip_tables.c ret = PTR_ERR(target); target 1333 net/ipv4/netfilter/ip_tables.c t->u.kernel.target = target; target 1335 net/ipv4/netfilter/ip_tables.c off += xt_compat_target_offset(target); target 1344 net/ipv4/netfilter/ip_tables.c module_put(t->u.kernel.target->me); target 1706 net/ipv4/netfilter/ip_tables.c int target; target 1719 net/ipv4/netfilter/ip_tables.c target = 1; target 1721 net/ipv4/netfilter/ip_tables.c target = 0; target 1725 net/ipv4/netfilter/ip_tables.c target, &ret), target 1867 net/ipv4/netfilter/ip_tables.c .target = ipt_error, target 561 net/ipv4/netfilter/ipt_CLUSTERIP.c .target = clusterip_tg, target 115 net/ipv4/netfilter/ipt_ECN.c .target = ecn_tg, target 90 net/ipv4/netfilter/ipt_REJECT.c .target = reject_tg, target 99 net/ipv4/netfilter/ipt_SYNPROXY.c .target = synproxy_tg4, target 58 net/ipv4/netfilter/iptable_filter.c ((struct ipt_standard *)repl->entries)[1].target.verdict = target 478 net/ipv4/tcp.c int target, struct sock *sk) target 483 net/ipv4/tcp.c if (avail >= target) target 555 net/ipv4/tcp.c int target = sock_rcvlowat(sk, 0, INT_MAX); target 560 net/ipv4/tcp.c target++; target 562 net/ipv4/tcp.c if (tcp_stream_is_readable(tp, target, sk)) target 1963 net/ipv4/tcp.c int target; /* Read at least this many bytes */ target 2011 net/ipv4/tcp.c target = sock_rcvlowat(sk, flags & MSG_WAITALL, len); target 2056 net/ipv4/tcp.c if (copied >= target && !sk->sk_backlog.tail) target 2099 net/ipv4/tcp.c if (copied >= target) { target 187 net/ipv6/exthdrs_core.c int target, unsigned short *fragoff, int *flags) target 209 net/ipv6/exthdrs_core.c found = (nexthdr == target); target 212 net/ipv6/exthdrs_core.c if (target < 0 || found) target 250 net/ipv6/exthdrs_core.c if (target < 0 && target 265 net/ipv6/exthdrs_core.c if (flags && (*flags & IP6_FH_F_AUTH) && (target < 0)) target 503 net/ipv6/ip6_output.c struct in6_addr *target = NULL; target 514 net/ipv6/ip6_output.c target = &rt->rt6i_gateway; target 516 net/ipv6/ip6_output.c target = &hdr->daddr; target 524 net/ipv6/ip6_output.c ndisc_send_redirect(skb, target); target 561 net/ipv6/ndisc.c .target = *solicited_addr, target 632 net/ipv6/ndisc.c .target = *solicit, target 720 net/ipv6/ndisc.c struct in6_addr *target = (struct in6_addr *)&neigh->primary_key; target 732 net/ipv6/ndisc.c __func__, target); target 734 net/ipv6/ndisc.c ndisc_send_ns(dev, target, target, saddr, 0); target 738 net/ipv6/ndisc.c addrconf_addr_solict_mult(target, &mcaddr); target 739 net/ipv6/ndisc.c ndisc_send_ns(dev, target, &mcaddr, saddr, 0); target 790 net/ipv6/ndisc.c if (ipv6_addr_is_multicast(&msg->target)) { target 833 net/ipv6/ndisc.c ifp = ipv6_get_ifaddr(dev_net(dev), &msg->target, dev, 1); target 876 net/ipv6/ndisc.c ifp = ipv6_get_ifaddr(net, &msg->target, mdev, 1); target 888 net/ipv6/ndisc.c if (ipv6_chk_acast_addr(net, dev, &msg->target) || target 891 net/ipv6/ndisc.c (is_router = pndisc_is_router(&msg->target, dev)) >= 0)) { target 916 net/ipv6/ndisc.c ndisc_send_na(dev, &in6addr_linklocal_allnodes, &msg->target, target 938 net/ipv6/ndisc.c ndisc_send_na(dev, saddr, &msg->target, !!is_router, target 970 net/ipv6/ndisc.c if (ipv6_addr_is_multicast(&msg->target)) { target 1001 net/ipv6/ndisc.c ifp = ipv6_get_ifaddr(dev_net(dev), &msg->target, dev, 1); target 1024 net/ipv6/ndisc.c neigh = neigh_lookup(&nd_tbl, &msg->target, dev); target 1040 net/ipv6/ndisc.c pneigh_lookup(&nd_tbl, net, &msg->target, dev, 0)) { target 1576 net/ipv6/ndisc.c void ndisc_send_redirect(struct sk_buff *skb, const struct in6_addr *target) target 1606 net/ipv6/ndisc.c if (!ipv6_addr_equal(&ipv6_hdr(skb)->daddr, target) && target 1607 net/ipv6/ndisc.c ipv6_addr_type(target) != (IPV6_ADDR_UNICAST|IPV6_ADDR_LINKLOCAL)) { target 1640 net/ipv6/ndisc.c struct neighbour *neigh = dst_neigh_lookup(skb_dst(skb), target); target 1676 net/ipv6/ndisc.c .target = *target, target 189 net/ipv6/netfilter/ip6_tables.c if (strcmp(t->target.u.kernel.target->name, XT_ERROR_TARGET) == 0) { target 191 net/ipv6/netfilter/ip6_tables.c *chainname = t->target.data; target 197 net/ipv6/netfilter/ip6_tables.c strcmp(t->target.u.kernel.target->name, target 325 net/ipv6/netfilter/ip6_tables.c WARN_ON(!t->u.kernel.target); target 334 net/ipv6/netfilter/ip6_tables.c if (!t->u.kernel.target->target) { target 364 net/ipv6/netfilter/ip6_tables.c acpar.target = t->u.kernel.target; target 367 net/ipv6/netfilter/ip6_tables.c verdict = t->u.kernel.target->target(skb, &acpar); target 416 net/ipv6/netfilter/ip6_tables.c (strcmp(t->target.u.user.name, target 446 net/ipv6/netfilter/ip6_tables.c if (strcmp(t->target.u.user.name, target 523 net/ipv6/netfilter/ip6_tables.c .target = t->u.kernel.target, target 540 net/ipv6/netfilter/ip6_tables.c struct xt_target *target; target 564 net/ipv6/netfilter/ip6_tables.c target = xt_request_find_target(NFPROTO_IPV6, t->u.user.name, target 566 net/ipv6/netfilter/ip6_tables.c if (IS_ERR(target)) { target 567 net/ipv6/netfilter/ip6_tables.c ret = PTR_ERR(target); target 570 net/ipv6/netfilter/ip6_tables.c t->u.kernel.target = target; target 577 net/ipv6/netfilter/ip6_tables.c module_put(t->u.kernel.target->me); target 666 net/ipv6/netfilter/ip6_tables.c par.target = t->u.kernel.target; target 669 net/ipv6/netfilter/ip6_tables.c if (par.target->destroy != NULL) target 670 net/ipv6/netfilter/ip6_tables.c par.target->destroy(&par); target 671 net/ipv6/netfilter/ip6_tables.c module_put(par.target->me); target 920 net/ipv6/netfilter/ip6_tables.c off += xt_compat_target_offset(t->u.kernel.target); target 1298 net/ipv6/netfilter/ip6_tables.c module_put(t->u.kernel.target->me); target 1310 net/ipv6/netfilter/ip6_tables.c struct xt_target *target; target 1343 net/ipv6/netfilter/ip6_tables.c target = xt_request_find_target(NFPROTO_IPV6, t->u.user.name, target 1345 net/ipv6/netfilter/ip6_tables.c if (IS_ERR(target)) { target 1346 net/ipv6/netfilter/ip6_tables.c ret = PTR_ERR(target); target 1349 net/ipv6/netfilter/ip6_tables.c t->u.kernel.target = target; target 1351 net/ipv6/netfilter/ip6_tables.c off += xt_compat_target_offset(target); target 1360 net/ipv6/netfilter/ip6_tables.c module_put(t->u.kernel.target->me); target 1715 net/ipv6/netfilter/ip6_tables.c int target; target 1728 net/ipv6/netfilter/ip6_tables.c target = 1; target 1730 net/ipv6/netfilter/ip6_tables.c target = 0; target 1734 net/ipv6/netfilter/ip6_tables.c target, &ret), target 1878 net/ipv6/netfilter/ip6_tables.c .target = ip6t_error, target 110 net/ipv6/netfilter/ip6t_NPT.c .target = ip6t_snpt_tg, target 122 net/ipv6/netfilter/ip6t_NPT.c .target = ip6t_dnpt_tg, target 101 net/ipv6/netfilter/ip6t_REJECT.c .target = reject_tg6, target 102 net/ipv6/netfilter/ip6t_SYNPROXY.c .target = synproxy_tg6, target 59 net/ipv6/netfilter/ip6table_filter.c ((struct ip6t_standard *)repl->entries)[1].target.verdict = target 606 net/ipv6/route.c struct in6_addr target; target 616 net/ipv6/route.c addrconf_addr_solict_mult(&work->target, &mcaddr); target 617 net/ipv6/route.c ndisc_send_ns(work->dev, &work->target, &mcaddr, NULL, 0); target 671 net/ipv6/route.c work->target = *nh_gw; target 4004 net/ipv6/route.c if (ipv6_addr_equal(&msg->dest, &msg->target)) { target 4006 net/ipv6/route.c } else if (ipv6_addr_type(&msg->target) != target 4050 net/ipv6/route.c neigh = __neigh_lookup(&nd_tbl, &msg->target, skb->dev, 1); target 740 net/llc/af_llc.c int target; /* Read at least this many bytes */ target 756 net/llc/af_llc.c target = sock_rcvlowat(sk, flags & MSG_WAITALL, len); target 783 net/llc/af_llc.c if (copied >= target && !sk->sk_backlog.tail) target 821 net/llc/af_llc.c if (copied >= target) { /* Do not sleep, just process backlog. */ target 157 net/mac80211/debugfs_sta.c codel_time_to_us(sta->cparams.target), target 304 net/mac80211/mesh.h u8 ttl, const u8 *target, u32 target_sn, target 102 net/mac80211/mesh_hwmp.c u8 target_flags, const u8 *target, target 135 net/mac80211/mesh_hwmp.c mhwmp_dbg(sdata, "sending PREQ to %pM\n", target); target 161 net/mac80211/mesh_hwmp.c memcpy(pos, target, ETH_ALEN); target 182 net/mac80211/mesh_hwmp.c memcpy(pos, target, ETH_ALEN); target 235 net/mac80211/mesh_hwmp.c u8 ttl, const u8 *target, u32 target_sn, target 281 net/mac80211/mesh_hwmp.c memcpy(pos, target, ETH_ALEN); target 480 net/mac80211/sta_info.c sta->cparams.target = MS2TIME(20); target 2478 net/mac80211/sta_info.c sta->cparams.target = MS2TIME(50); target 2482 net/mac80211/sta_info.c sta->cparams.target = MS2TIME(20); target 1560 net/mac80211/tx.c local->cparams.target = MS2TIME(20); target 1008 net/netfilter/ipset/ip_set_hash_gen.h if (atomic_read(&t->ref) && ext->target) { target 1096 net/netfilter/ipset/ip_set_hash_gen.h if (atomic_read(&t->ref) && ext->target) { target 355 net/netfilter/nf_nat_proto.c const struct nf_conntrack_tuple *target, target 367 net/netfilter/nf_nat_proto.c if (!l4proto_manip_pkt(skb, iphdroff, hdroff, target, maniptype)) target 372 net/netfilter/nf_nat_proto.c csum_replace4(&iph->check, iph->saddr, target->src.u3.ip); target 373 net/netfilter/nf_nat_proto.c iph->saddr = target->src.u3.ip; target 375 net/netfilter/nf_nat_proto.c csum_replace4(&iph->check, iph->daddr, target->dst.u3.ip); target 376 net/netfilter/nf_nat_proto.c iph->daddr = target->dst.u3.ip; target 383 net/netfilter/nf_nat_proto.c const struct nf_conntrack_tuple *target, target 403 net/netfilter/nf_nat_proto.c !l4proto_manip_pkt(skb, iphdroff, hdroff, target, maniptype)) target 411 net/netfilter/nf_nat_proto.c ipv6h->saddr = target->src.u3.in6; target 413 net/netfilter/nf_nat_proto.c ipv6h->daddr = target->dst.u3.in6; target 423 net/netfilter/nf_nat_proto.c struct nf_conntrack_tuple target; target 426 net/netfilter/nf_nat_proto.c nf_ct_invert_tuple(&target, &ct->tuplehash[!dir].tuple); target 428 net/netfilter/nf_nat_proto.c switch (target.src.l3num) { target 430 net/netfilter/nf_nat_proto.c if (nf_nat_ipv6_manip_pkt(skb, 0, &target, mtype)) target 434 net/netfilter/nf_nat_proto.c if (nf_nat_ipv4_manip_pkt(skb, 0, &target, mtype)) target 571 net/netfilter/nf_nat_proto.c struct nf_conntrack_tuple target; target 615 net/netfilter/nf_nat_proto.c nf_ct_invert_tuple(&target, &ct->tuplehash[!dir].tuple); target 616 net/netfilter/nf_nat_proto.c target.dst.protonum = IPPROTO_ICMP; target 617 net/netfilter/nf_nat_proto.c if (!nf_nat_ipv4_manip_pkt(skb, 0, &target, manip)) target 793 net/netfilter/nf_nat_proto.c struct nf_conntrack_tuple target; target 839 net/netfilter/nf_nat_proto.c nf_ct_invert_tuple(&target, &ct->tuplehash[!dir].tuple); target 840 net/netfilter/nf_nat_proto.c target.dst.protonum = IPPROTO_ICMPV6; target 841 net/netfilter/nf_nat_proto.c if (!nf_nat_ipv6_manip_pkt(skb, 0, &target, manip)) target 62 net/netfilter/nft_compat.c par->target = xt; target 72 net/netfilter/nft_compat.c struct xt_target *target = expr->ops->data; target 76 net/netfilter/nft_compat.c nft_compat_set_par((struct xt_action_param *)&pkt->xt, target, info); target 78 net/netfilter/nft_compat.c ret = target->target(skb, &pkt->xt); target 98 net/netfilter/nft_compat.c struct xt_target *target = expr->ops->data; target 102 net/netfilter/nft_compat.c nft_compat_set_par((struct xt_action_param *)&pkt->xt, target, info); target 104 net/netfilter/nft_compat.c ret = target->target(skb, &pkt->xt); target 137 net/netfilter/nft_compat.c struct xt_target *target, void *info, target 162 net/netfilter/nft_compat.c par->target = target; target 221 net/netfilter/nft_compat.c struct xt_target *target = expr->ops->data; target 229 net/netfilter/nft_compat.c target_compat_from_user(target, nla_data(tb[NFTA_TARGET_INFO]), info); target 237 net/netfilter/nft_compat.c nft_target_set_tgchk_param(&par, ctx, target, info, &e, proto, inv); target 244 net/netfilter/nft_compat.c if (!target->target) target 253 net/netfilter/nft_compat.c struct xt_target *target = expr->ops->data; target 255 net/netfilter/nft_compat.c struct module *me = target->me; target 259 net/netfilter/nft_compat.c par.target = target; target 262 net/netfilter/nft_compat.c if (par.target->destroy != NULL) target 263 net/netfilter/nft_compat.c par.target->destroy(&par); target 289 net/netfilter/nft_compat.c const struct xt_target *target = expr->ops->data; target 292 net/netfilter/nft_compat.c if (nla_put_string(skb, NFTA_TARGET_NAME, target->name) || target 293 net/netfilter/nft_compat.c nla_put_be32(skb, NFTA_TARGET_REV, htonl(target->revision)) || target 295 net/netfilter/nft_compat.c target->targetsize, target->usersize)) target 308 net/netfilter/nft_compat.c struct xt_target *target = expr->ops->data; target 318 net/netfilter/nft_compat.c if (target->hooks && !(hook_mask & target->hooks)) target 321 net/netfilter/nft_compat.c ret = nft_compat_chain_validate_dependency(ctx, target->table); target 572 net/netfilter/nft_compat.c int rev, int target) target 590 net/netfilter/nft_compat.c nla_put_be32(skb, NFTA_COMPAT_TYPE, htonl(target))) target 607 net/netfilter/nft_compat.c int ret = 0, target; target 621 net/netfilter/nft_compat.c target = ntohl(nla_get_be32(tb[NFTA_COMPAT_TYPE])); target 649 net/netfilter/nft_compat.c rev, target, &ret), target 666 net/netfilter/nft_compat.c name, ret, target) <= 0) { target 788 net/netfilter/nft_compat.c struct xt_target *target; target 807 net/netfilter/nft_compat.c target = xt_request_find_target(family, tg_name, rev); target 808 net/netfilter/nft_compat.c if (IS_ERR(target)) target 811 net/netfilter/nft_compat.c if (!target->target) { target 816 net/netfilter/nft_compat.c if (target->targetsize > nla_len(tb[NFTA_TARGET_INFO])) { target 828 net/netfilter/nft_compat.c ops->size = NFT_EXPR_SIZE(XT_ALIGN(target->targetsize)); target 833 net/netfilter/nft_compat.c ops->data = target; target 842 net/netfilter/nft_compat.c module_put(target->me); target 848 net/netfilter/nft_compat.c struct xt_target *target = ops->data; target 850 net/netfilter/nft_compat.c module_put(target->me); target 71 net/netfilter/nft_exthdr.c unsigned int *offset, int target) target 101 net/netfilter/nft_exthdr.c switch (target) { target 106 net/netfilter/nft_exthdr.c found = target == IPOPT_SSRR ? opt->is_strictroute : target 126 net/netfilter/nft_exthdr.c return found ? target : -ENOENT; target 49 net/netfilter/x_tables.c struct list_head target; target 69 net/netfilter/x_tables.c int xt_register_target(struct xt_target *target) target 71 net/netfilter/x_tables.c u_int8_t af = target->family; target 74 net/netfilter/x_tables.c list_add(&target->list, &xt[af].target); target 81 net/netfilter/x_tables.c xt_unregister_target(struct xt_target *target) target 83 net/netfilter/x_tables.c u_int8_t af = target->family; target 86 net/netfilter/x_tables.c list_del(&target->list); target 92 net/netfilter/x_tables.c xt_register_targets(struct xt_target *target, unsigned int n) target 98 net/netfilter/x_tables.c err = xt_register_target(&target[i]); target 106 net/netfilter/x_tables.c xt_unregister_targets(target, i); target 112 net/netfilter/x_tables.c xt_unregister_targets(struct xt_target *target, unsigned int n) target 115 net/netfilter/x_tables.c xt_unregister_target(&target[n]); target 235 net/netfilter/x_tables.c list_for_each_entry(t, &xt[af].target, list) { target 257 net/netfilter/x_tables.c struct xt_target *target; target 262 net/netfilter/x_tables.c target = xt_find_target(af, name, revision); target 263 net/netfilter/x_tables.c if (IS_ERR(target)) { target 265 net/netfilter/x_tables.c target = xt_find_target(af, name, revision); target 268 net/netfilter/x_tables.c return target; target 323 net/netfilter/x_tables.c return XT_OBJ_TO_USER(u, t, target, 0) || target 324 net/netfilter/x_tables.c XT_DATA_TO_USER(u, t, target); target 353 net/netfilter/x_tables.c list_for_each_entry(t, &xt[af].target, list) { target 369 net/netfilter/x_tables.c int xt_find_revision(u8 af, const char *name, u8 revision, int target, target 375 net/netfilter/x_tables.c if (target == 1) target 523 net/netfilter/x_tables.c static int xt_check_entry_match(const char *match, const char *target, target 527 net/netfilter/x_tables.c int length = target - match; target 962 net/netfilter/x_tables.c unsigned int target, unsigned int size) target 969 net/netfilter/x_tables.c if (offsets[m] > target) target 971 net/netfilter/x_tables.c else if (offsets[m] < target) target 986 net/netfilter/x_tables.c if (XT_ALIGN(par->target->targetsize) != size) { target 988 net/netfilter/x_tables.c xt_prefix[par->family], par->target->name, target 989 net/netfilter/x_tables.c par->target->revision, target 990 net/netfilter/x_tables.c XT_ALIGN(par->target->targetsize), size); target 993 net/netfilter/x_tables.c if (par->target->table != NULL && target 994 net/netfilter/x_tables.c strcmp(par->target->table, par->table) != 0) { target 996 net/netfilter/x_tables.c xt_prefix[par->family], par->target->name, target 997 net/netfilter/x_tables.c par->target->table, par->table); target 1000 net/netfilter/x_tables.c if (par->target->hooks && (par->hook_mask & ~par->target->hooks) != 0) { target 1004 net/netfilter/x_tables.c xt_prefix[par->family], par->target->name, target 1008 net/netfilter/x_tables.c par->target->hooks, target 1012 net/netfilter/x_tables.c if (par->target->proto && (par->target->proto != proto || inv_proto)) { target 1014 net/netfilter/x_tables.c xt_prefix[par->family], par->target->name, target 1015 net/netfilter/x_tables.c par->target->proto); target 1018 net/netfilter/x_tables.c if (par->target->checkentry != NULL) { target 1019 net/netfilter/x_tables.c ret = par->target->checkentry(par); target 1105 net/netfilter/x_tables.c int xt_compat_target_offset(const struct xt_target *target) target 1107 net/netfilter/x_tables.c u_int16_t csize = target->compatsize ? : target->targetsize; target 1108 net/netfilter/x_tables.c return XT_ALIGN(target->targetsize) - COMPAT_XT_ALIGN(csize); target 1115 net/netfilter/x_tables.c const struct xt_target *target = t->u.kernel.target; target 1117 net/netfilter/x_tables.c int pad, off = xt_compat_target_offset(target); target 1123 net/netfilter/x_tables.c if (target->compat_from_user) target 1124 net/netfilter/x_tables.c target->compat_from_user(t->data, ct->data); target 1127 net/netfilter/x_tables.c pad = XT_ALIGN(target->targetsize) - target->targetsize; target 1129 net/netfilter/x_tables.c memset(t->data + target->targetsize, 0, pad); target 1133 net/netfilter/x_tables.c strlcpy(name, target->name, sizeof(name)); target 1134 net/netfilter/x_tables.c module_put(target->me); target 1145 net/netfilter/x_tables.c const struct xt_target *target = t->u.kernel.target; target 1147 net/netfilter/x_tables.c int off = xt_compat_target_offset(target); target 1150 net/netfilter/x_tables.c if (XT_OBJ_TO_USER(ct, t, target, tsize)) target 1153 net/netfilter/x_tables.c if (target->compat_to_user) { target 1154 net/netfilter/x_tables.c if (target->compat_to_user((void __user *)ct->data, t->data)) target 1157 net/netfilter/x_tables.c if (COMPAT_XT_DATA_TO_USER(ct, t, target, tsize - sizeof(*ct))) target 1562 net/netfilter/x_tables.c &xt[NFPROTO_UNSPEC].target : &xt[NFPROTO_UNSPEC].match; target 1571 net/netfilter/x_tables.c &xt[nfproto].target : &xt[nfproto].match; target 1660 net/netfilter/x_tables.c const struct xt_target *target; target 1667 net/netfilter/x_tables.c target = list_entry(trav->curr, struct xt_target, list); target 1668 net/netfilter/x_tables.c if (*target->name) target 1669 net/netfilter/x_tables.c seq_printf(seq, "%s\n", target->name); target 1907 net/netfilter/x_tables.c INIT_LIST_HEAD(&xt[i].target); target 132 net/netfilter/xt_AUDIT.c .target = audit_tg, target 140 net/netfilter/xt_AUDIT.c .target = audit_tg_ebt, target 69 net/netfilter/xt_CHECKSUM.c .target = checksum_tg, target 44 net/netfilter/xt_CLASSIFY.c .target = classify_tg, target 53 net/netfilter/xt_CLASSIFY.c .target = classify_tg, target 123 net/netfilter/xt_CONNSECMARK.c .target = connsecmark_tg, target 313 net/netfilter/xt_CT.c .target = xt_ct_target_v0, target 325 net/netfilter/xt_CT.c .target = xt_ct_target_v1, target 337 net/netfilter/xt_CT.c .target = xt_ct_target_v1, target 370 net/netfilter/xt_CT.c .target = notrack_tg, target 116 net/netfilter/xt_DSCP.c .target = dscp_tg, target 125 net/netfilter/xt_DSCP.c .target = dscp_tg6, target 135 net/netfilter/xt_DSCP.c .target = tos_tg, target 144 net/netfilter/xt_DSCP.c .target = tos_tg6, target 128 net/netfilter/xt_HL.c .target = ttl_tg, target 138 net/netfilter/xt_HL.c .target = hl_tg6, target 344 net/netfilter/xt_HMARK.c .target = hmark_tg_v4, target 353 net/netfilter/xt_HMARK.c .target = hmark_tg_v6, target 253 net/netfilter/xt_IDLETIMER.c .target = idletimer_tg_target, target 183 net/netfilter/xt_LED.c .target = led_tg, target 73 net/netfilter/xt_LOG.c .target = log_tg, target 83 net/netfilter/xt_LOG.c .target = log_tg, target 77 net/netfilter/xt_MASQUERADE.c .target = masquerade_tg6, target 88 net/netfilter/xt_MASQUERADE.c .target = masquerade_tg, target 124 net/netfilter/xt_NETMAP.c .target = netmap_tg6, target 139 net/netfilter/xt_NETMAP.c .target = netmap_tg4, target 65 net/netfilter/xt_NFLOG.c .target = nflog_tg, target 77 net/netfilter/xt_NFQUEUE.c if (par->target->revision == 2 && info->flags > 1) target 79 net/netfilter/xt_NFQUEUE.c if (par->target->revision == 3 && info->flags & ~NFQ_FLAG_MASK) target 114 net/netfilter/xt_NFQUEUE.c .target = nfqueue_tg, target 123 net/netfilter/xt_NFQUEUE.c .target = nfqueue_tg_v1, target 132 net/netfilter/xt_NFQUEUE.c .target = nfqueue_tg_v2, target 141 net/netfilter/xt_NFQUEUE.c .target = nfqueue_tg_v3, target 182 net/netfilter/xt_RATEEST.c .target = xt_rateest_tg, target 79 net/netfilter/xt_REDIRECT.c .target = redirect_tg6, target 90 net/netfilter/xt_REDIRECT.c .target = redirect_tg4, target 127 net/netfilter/xt_SECMARK.c .target = secmark_tg, target 316 net/netfilter/xt_TCPMSS.c .target = tcpmss_tg4, target 326 net/netfilter/xt_TCPMSS.c .target = tcpmss_tg6, target 118 net/netfilter/xt_TCPOPTSTRIP.c .target = tcpoptstrip_tg4, target 128 net/netfilter/xt_TCPOPTSTRIP.c .target = tcpoptstrip_tg6, target 151 net/netfilter/xt_TEE.c .target = tee_tg4, target 163 net/netfilter/xt_TEE.c .target = tee_tg6, target 227 net/netfilter/xt_TPROXY.c .target = tproxy_tg4_v0, target 238 net/netfilter/xt_TPROXY.c .target = tproxy_tg4_v1, target 250 net/netfilter/xt_TPROXY.c .target = tproxy_tg6_v1, target 37 net/netfilter/xt_TRACE.c .target = trace_tg, target 154 net/netfilter/xt_connmark.c .target = connmark_tg, target 164 net/netfilter/xt_connmark.c .target = connmark_tg_v2, target 46 net/netfilter/xt_mark.c .target = mark_tg, target 156 net/netfilter/xt_nat.c .target = xt_snat_target_v0, target 169 net/netfilter/xt_nat.c .target = xt_dnat_target_v0, target 182 net/netfilter/xt_nat.c .target = xt_snat_target_v1, target 194 net/netfilter/xt_nat.c .target = xt_dnat_target_v1, target 206 net/netfilter/xt_nat.c .target = xt_snat_target_v2, target 218 net/netfilter/xt_nat.c .target = xt_dnat_target_v2, target 621 net/netfilter/xt_set.c .target = set_target_v0, target 631 net/netfilter/xt_set.c .target = set_target_v1, target 641 net/netfilter/xt_set.c .target = set_target_v1, target 652 net/netfilter/xt_set.c .target = set_target_v2, target 662 net/netfilter/xt_set.c .target = set_target_v2, target 673 net/netfilter/xt_set.c .target = set_target_v3, target 683 net/netfilter/xt_set.c .target = set_target_v3, target 284 net/nfc/core.c struct nfc_target *target; target 309 net/nfc/core.c target = nfc_find_target(dev, target_index); target 310 net/nfc/core.c if (target == NULL) { target 315 net/nfc/core.c rc = dev->ops->dep_link_up(dev, target, comm_mode, gb, gb_len); target 317 net/nfc/core.c dev->active_target = target; target 368 net/nfc/core.c struct nfc_target *target; target 370 net/nfc/core.c target = nfc_find_target(dev, target_idx); target 371 net/nfc/core.c if (target == NULL) target 374 net/nfc/core.c dev->active_target = target; target 396 net/nfc/core.c struct nfc_target *target; target 413 net/nfc/core.c target = nfc_find_target(dev, target_idx); target 414 net/nfc/core.c if (target == NULL) { target 419 net/nfc/core.c rc = dev->ops->activate_target(dev, target, protocol); target 421 net/nfc/core.c dev->active_target = target; target 76 net/nfc/digital.h struct nfc_target *target, u8 protocol); target 81 net/nfc/digital.h struct nfc_target *target, __u8 comm_mode, __u8 *gb, target 84 net/nfc/digital.h struct nfc_target *target, struct sk_buff *skb, target 305 net/nfc/digital_core.c struct nfc_target *target, u8 protocol) target 386 net/nfc/digital_core.c target->supported_protocols = (1 << protocol); target 391 net/nfc/digital_core.c rc = nfc_targets_found(ddev->nfc_dev, target, 1); target 588 net/nfc/digital_core.c struct nfc_target *target, target 594 net/nfc/digital_core.c rc = digital_in_send_atr_req(ddev, target, comm_mode, gb, gb_len); target 614 net/nfc/digital_core.c struct nfc_target *target, __u32 protocol) target 634 net/nfc/digital_core.c struct nfc_target *target, target 693 net/nfc/digital_core.c static int digital_in_send(struct nfc_dev *nfc_dev, struct nfc_target *target, target 709 net/nfc/digital_core.c rc = digital_in_send_dep_req(ddev, target, skb, data_exch); target 301 net/nfc/digital_dep.c struct nfc_target *target = arg; target 350 net/nfc/digital_dep.c nfc_dep_link_is_up(ddev->nfc_dev, target->idx, NFC_COMM_ACTIVE, target 363 net/nfc/digital_dep.c struct nfc_target *target) target 395 net/nfc/digital_dep.c digital_in_recv_psl_res, target); target 405 net/nfc/digital_dep.c struct nfc_target *target = arg; target 457 net/nfc/digital_dep.c rc = digital_in_send_psl_req(ddev, target); target 462 net/nfc/digital_dep.c rc = nfc_dep_link_is_up(ddev->nfc_dev, target->idx, NFC_COMM_ACTIVE, target 475 net/nfc/digital_dep.c struct nfc_target *target, __u8 comm_mode, __u8 *gb, target 502 net/nfc/digital_dep.c if (target->nfcid2_len) target 503 net/nfc/digital_dep.c memcpy(atr_req->nfcid3, target->nfcid2, NFC_NFCID2_MAXSIZE); target 525 net/nfc/digital_dep.c digital_in_recv_atr_res, target); target 908 net/nfc/digital_dep.c struct nfc_target *target, struct sk_buff *skb, target 165 net/nfc/digital_technology.c struct nfc_target *target); target 218 net/nfc/digital_technology.c struct nfc_target *target = arg; target 241 net/nfc/digital_technology.c rc = digital_target_found(ddev, target, NFC_PROTO_ISO14443); target 245 net/nfc/digital_technology.c kfree(target); target 252 net/nfc/digital_technology.c struct nfc_target *target) target 265 net/nfc/digital_technology.c target); target 275 net/nfc/digital_technology.c struct nfc_target *target = arg; target 302 net/nfc/digital_technology.c rc = digital_in_send_sdd_req(ddev, target); target 309 net/nfc/digital_technology.c target->sel_res = sel_res; target 316 net/nfc/digital_technology.c rc = digital_in_send_rats(ddev, target); target 329 net/nfc/digital_technology.c rc = digital_target_found(ddev, target, nfc_proto); target 332 net/nfc/digital_technology.c kfree(target); target 342 net/nfc/digital_technology.c struct nfc_target *target, target 357 net/nfc/digital_technology.c if (target->nfcid1_len <= 4) target 359 net/nfc/digital_technology.c else if (target->nfcid1_len < 10) target 379 net/nfc/digital_technology.c target); target 390 net/nfc/digital_technology.c struct nfc_target *target = arg; target 427 net/nfc/digital_technology.c memcpy(target->nfcid1 + target->nfcid1_len, sdd_res->nfcid1 + offset, target 429 net/nfc/digital_technology.c target->nfcid1_len += size; target 431 net/nfc/digital_technology.c rc = digital_in_send_sel_req(ddev, target, sdd_res); target 437 net/nfc/digital_technology.c kfree(target); target 443 net/nfc/digital_technology.c struct nfc_target *target) target 458 net/nfc/digital_technology.c if (target->nfcid1_len == 0) target 460 net/nfc/digital_technology.c else if (target->nfcid1_len == 3) target 469 net/nfc/digital_technology.c target); target 475 net/nfc/digital_technology.c struct nfc_target *target = NULL; target 489 net/nfc/digital_technology.c target = kzalloc(sizeof(struct nfc_target), GFP_KERNEL); target 490 net/nfc/digital_technology.c if (!target) { target 495 net/nfc/digital_technology.c target->sens_res = __le16_to_cpu(*(__le16 *)resp->data); target 497 net/nfc/digital_technology.c if (!DIGITAL_SENS_RES_IS_VALID(target->sens_res)) { target 503 net/nfc/digital_technology.c if (DIGITAL_SENS_RES_IS_T1T(target->sens_res)) target 504 net/nfc/digital_technology.c rc = digital_target_found(ddev, target, NFC_PROTO_JEWEL); target 506 net/nfc/digital_technology.c rc = digital_in_send_sdd_req(ddev, target); target 512 net/nfc/digital_technology.c kfree(target); target 574 net/nfc/digital_technology.c struct nfc_target *target = arg; target 598 net/nfc/digital_technology.c rc = digital_target_found(ddev, target, NFC_PROTO_ISO14443_B); target 602 net/nfc/digital_technology.c kfree(target); target 609 net/nfc/digital_technology.c struct nfc_target *target, target 634 net/nfc/digital_technology.c target); target 644 net/nfc/digital_technology.c struct nfc_target *target = NULL; target 687 net/nfc/digital_technology.c target = kzalloc(sizeof(struct nfc_target), GFP_KERNEL); target 688 net/nfc/digital_technology.c if (!target) { target 693 net/nfc/digital_technology.c rc = digital_in_send_attrib_req(ddev, target, sensb_res); target 699 net/nfc/digital_technology.c kfree(target); target 743 net/nfc/digital_technology.c struct nfc_target target; target 767 net/nfc/digital_technology.c memset(&target, 0, sizeof(struct nfc_target)); target 771 net/nfc/digital_technology.c memcpy(target.sensf_res, sensf_res, resp->len); target 772 net/nfc/digital_technology.c target.sensf_res_len = resp->len; target 774 net/nfc/digital_technology.c memcpy(target.nfcid2, sensf_res->nfcid2, NFC_NFCID2_MAXSIZE); target 775 net/nfc/digital_technology.c target.nfcid2_len = NFC_NFCID2_MAXSIZE; target 777 net/nfc/digital_technology.c if (target.nfcid2[0] == DIGITAL_SENSF_NFCID2_NFC_DEP_B1 && target 778 net/nfc/digital_technology.c target.nfcid2[1] == DIGITAL_SENSF_NFCID2_NFC_DEP_B2) target 783 net/nfc/digital_technology.c rc = digital_target_found(ddev, &target, proto); target 840 net/nfc/digital_technology.c struct nfc_target *target = NULL; target 862 net/nfc/digital_technology.c target = kzalloc(sizeof(*target), GFP_KERNEL); target 863 net/nfc/digital_technology.c if (!target) { target 868 net/nfc/digital_technology.c target->is_iso15693 = 1; target 869 net/nfc/digital_technology.c target->iso15693_dsfid = res->dsfid; target 870 net/nfc/digital_technology.c memcpy(target->iso15693_uid, &res->uid, sizeof(target->iso15693_uid)); target 872 net/nfc/digital_technology.c rc = digital_target_found(ddev, target, NFC_PROTO_ISO15693); target 874 net/nfc/digital_technology.c kfree(target); target 663 net/nfc/hci/core.c static int hci_dep_link_up(struct nfc_dev *nfc_dev, struct nfc_target *target, target 671 net/nfc/hci/core.c return hdev->ops->dep_link_up(hdev, target, comm_mode, target 686 net/nfc/hci/core.c struct nfc_target *target, u32 protocol) target 692 net/nfc/hci/core.c struct nfc_target *target, target 722 net/nfc/hci/core.c static int hci_transceive(struct nfc_dev *nfc_dev, struct nfc_target *target, target 729 net/nfc/hci/core.c pr_debug("target_idx=%d\n", target->idx); target 731 net/nfc/hci/core.c switch (target->hci_reader_gate) { target 735 net/nfc/hci/core.c r = hdev->ops->im_transceive(hdev, target, skb, cb, target 747 net/nfc/hci/core.c r = nfc_hci_send_cmd_async(hdev, target->hci_reader_gate, target 753 net/nfc/hci/core.c r = hdev->ops->im_transceive(hdev, target, skb, cb, target 781 net/nfc/hci/core.c struct nfc_target *target) target 788 net/nfc/hci/core.c return hdev->ops->check_presence(hdev, target); target 848 net/nfc/nci/core.c struct nfc_target *target, __u32 protocol) target 856 net/nfc/nci/core.c pr_debug("target_idx %d, protocol 0x%x\n", target->idx, protocol); target 870 net/nfc/nci/core.c if (ndev->targets[i].idx == target->idx) { target 914 net/nfc/nci/core.c struct nfc_target *target, target 941 net/nfc/nci/core.c static int nci_dep_link_up(struct nfc_dev *nfc_dev, struct nfc_target *target, target 947 net/nfc/nci/core.c pr_debug("target_idx %d, comm_mode %d\n", target->idx, comm_mode); target 949 net/nfc/nci/core.c rc = nci_activate_target(nfc_dev, target, NFC_PROTO_NFC_DEP); target 956 net/nfc/nci/core.c rc = nfc_dep_link_is_up(nfc_dev, target->idx, NFC_COMM_PASSIVE, target 987 net/nfc/nci/core.c static int nci_transceive(struct nfc_dev *nfc_dev, struct nfc_target *target, target 999 net/nfc/nci/core.c pr_debug("target_idx %d, len %d\n", target->idx, skb->len); target 181 net/nfc/nci/ntf.c struct nfc_target *target, target 218 net/nfc/nci/ntf.c target->sens_res = nfca_poll->sens_res; target 219 net/nfc/nci/ntf.c target->sel_res = nfca_poll->sel_res; target 220 net/nfc/nci/ntf.c target->nfcid1_len = nfca_poll->nfcid1_len; target 221 net/nfc/nci/ntf.c if (target->nfcid1_len > 0) { target 222 net/nfc/nci/ntf.c memcpy(target->nfcid1, nfca_poll->nfcid1, target 223 net/nfc/nci/ntf.c target->nfcid1_len); target 228 net/nfc/nci/ntf.c target->sensb_res_len = nfcb_poll->sensb_res_len; target 229 net/nfc/nci/ntf.c if (target->sensb_res_len > 0) { target 230 net/nfc/nci/ntf.c memcpy(target->sensb_res, nfcb_poll->sensb_res, target 231 net/nfc/nci/ntf.c target->sensb_res_len); target 236 net/nfc/nci/ntf.c target->sensf_res_len = nfcf_poll->sensf_res_len; target 237 net/nfc/nci/ntf.c if (target->sensf_res_len > 0) { target 238 net/nfc/nci/ntf.c memcpy(target->sensf_res, nfcf_poll->sensf_res, target 239 net/nfc/nci/ntf.c target->sensf_res_len); target 244 net/nfc/nci/ntf.c target->is_iso15693 = 1; target 245 net/nfc/nci/ntf.c target->iso15693_dsfid = nfcv_poll->dsfid; target 246 net/nfc/nci/ntf.c memcpy(target->iso15693_uid, nfcv_poll->uid, NFC_ISO15693_UID_MAXSIZE); target 252 net/nfc/nci/ntf.c target->supported_protocols |= protocol; target 262 net/nfc/nci/ntf.c struct nfc_target *target; target 266 net/nfc/nci/ntf.c target = &ndev->targets[i]; target 267 net/nfc/nci/ntf.c if (target->logical_idx == ntf->rf_discovery_id) { target 269 net/nfc/nci/ntf.c nci_add_new_protocol(ndev, target, ntf->rf_protocol, target 282 net/nfc/nci/ntf.c target = &ndev->targets[ndev->n_targets]; target 284 net/nfc/nci/ntf.c rc = nci_add_new_protocol(ndev, target, ntf->rf_protocol, target 288 net/nfc/nci/ntf.c target->logical_idx = ntf->rf_discovery_id; target 291 net/nfc/nci/ntf.c pr_debug("logical idx %d, n_targets %d\n", target->logical_idx, target 442 net/nfc/nci/ntf.c struct nfc_target *target; target 445 net/nfc/nci/ntf.c target = &ndev->targets[ndev->n_targets]; target 447 net/nfc/nci/ntf.c rc = nci_add_new_protocol(ndev, target, ntf->rf_protocol, target 453 net/nfc/nci/ntf.c target->logical_idx = ntf->rf_discovery_id; target 457 net/nfc/nci/ntf.c target->logical_idx, ndev->n_targets); target 61 net/nfc/netlink.c static int nfc_genl_send_target(struct sk_buff *msg, struct nfc_target *target, target 73 net/nfc/netlink.c if (nla_put_u32(msg, NFC_ATTR_TARGET_INDEX, target->idx) || target 74 net/nfc/netlink.c nla_put_u32(msg, NFC_ATTR_PROTOCOLS, target->supported_protocols) || target 75 net/nfc/netlink.c nla_put_u16(msg, NFC_ATTR_TARGET_SENS_RES, target->sens_res) || target 76 net/nfc/netlink.c nla_put_u8(msg, NFC_ATTR_TARGET_SEL_RES, target->sel_res)) target 78 net/nfc/netlink.c if (target->nfcid1_len > 0 && target 79 net/nfc/netlink.c nla_put(msg, NFC_ATTR_TARGET_NFCID1, target->nfcid1_len, target 80 net/nfc/netlink.c target->nfcid1)) target 82 net/nfc/netlink.c if (target->sensb_res_len > 0 && target 83 net/nfc/netlink.c nla_put(msg, NFC_ATTR_TARGET_SENSB_RES, target->sensb_res_len, target 84 net/nfc/netlink.c target->sensb_res)) target 86 net/nfc/netlink.c if (target->sensf_res_len > 0 && target 87 net/nfc/netlink.c nla_put(msg, NFC_ATTR_TARGET_SENSF_RES, target->sensf_res_len, target 88 net/nfc/netlink.c target->sensf_res)) target 91 net/nfc/netlink.c if (target->is_iso15693) { target 93 net/nfc/netlink.c target->iso15693_dsfid) || target 95 net/nfc/netlink.c sizeof(target->iso15693_uid), target->iso15693_uid)) target 434 net/openvswitch/flow.c key->ipv6.nd.target = nd->target; target 472 net/openvswitch/flow.c memset(&key->ipv6.nd.target, 0, sizeof(key->ipv6.nd.target)); target 132 net/openvswitch/flow.h struct in6_addr target; /* ND target address. */ target 1699 net/openvswitch/flow_netlink.c SW_FLOW_KEY_MEMCPY(match, ipv6.nd.target, target 1701 net/openvswitch/flow_netlink.c sizeof(match->key->ipv6.nd.target), target 2190 net/openvswitch/flow_netlink.c memcpy(nd_key->nd_target, &output->ipv6.nd.target, target 225 net/phonet/af_phonet.c const struct sockaddr_pn *target) target 235 net/phonet/af_phonet.c if (target != NULL) { target 236 net/phonet/af_phonet.c dst = pn_sockaddr_get_object(target); target 237 net/phonet/af_phonet.c res = pn_sockaddr_get_resource(target); target 75 net/phonet/datagram.c DECLARE_SOCKADDR(struct sockaddr_pn *, target, msg->msg_name); target 83 net/phonet/datagram.c if (target == NULL) target 89 net/phonet/datagram.c if (target->spn_family != AF_PHONET) target 108 net/phonet/datagram.c err = pn_skb_send(sk, skb, target); target 37 net/sched/act_ipt.c struct xt_target *target; target 41 net/sched/act_ipt.c target = xt_request_find_target(AF_INET, t->u.user.name, target 43 net/sched/act_ipt.c if (IS_ERR(target)) target 44 net/sched/act_ipt.c return PTR_ERR(target); target 46 net/sched/act_ipt.c t->u.kernel.target = target; target 51 net/sched/act_ipt.c par.target = target; target 58 net/sched/act_ipt.c module_put(t->u.kernel.target->me); target 67 net/sched/act_ipt.c .target = t->u.kernel.target, target 72 net/sched/act_ipt.c if (par.target->destroy != NULL) target 73 net/sched/act_ipt.c par.target->destroy(&par); target 74 net/sched/act_ipt.c module_put(par.target->me); target 249 net/sched/act_ipt.c par.target = ipt->tcfi_t->u.kernel.target; target 251 net/sched/act_ipt.c ret = par.target->target(skb, &par); target 296 net/sched/act_ipt.c strcpy(t->u.user.name, ipt->tcfi_t->u.kernel.target->name); target 92 net/sched/sch_cake.c u64 target; target 227 net/sched/sch_cake.c u64 target; target 455 net/sched/sch_cake.c if (ktime_to_ns(ktime_sub(now, vars->blue_timer)) > p->target) { target 480 net/sched/sch_cake.c ktime_to_ns(ktime_sub(now, vars->blue_timer)) > p->target) { target 531 net/sched/sch_cake.c over_target = sojourn > p->target && target 2148 net/sched/sch_cake.c q->tins[i].cparams.target); target 2221 net/sched/sch_cake.c b->cparams.target = max((byte_target_ns * 3) / 2, target_ns); target 2223 net/sched/sch_cake.c b->cparams.target - target_ns, target 2224 net/sched/sch_cake.c b->cparams.target * 2); target 2243 net/sched/sch_cake.c us_to_ns(q->target), us_to_ns(q->interval)); target 2267 net/sched/sch_cake.c cake_set_rate(b, rate, mtu, us_to_ns(q->target), target 2364 net/sched/sch_cake.c cake_set_rate(b, rate, mtu, us_to_ns(q->target), target 2409 net/sched/sch_cake.c us_to_ns(q->target), us_to_ns(q->interval)); target 2411 net/sched/sch_cake.c us_to_ns(q->target), us_to_ns(q->interval)); target 2413 net/sched/sch_cake.c us_to_ns(q->target), us_to_ns(q->interval)); target 2415 net/sched/sch_cake.c us_to_ns(q->target), us_to_ns(q->interval)); target 2452 net/sched/sch_cake.c us_to_ns(q->target), us_to_ns(q->interval)); target 2454 net/sched/sch_cake.c us_to_ns(q->target), us_to_ns(q->interval)); target 2456 net/sched/sch_cake.c us_to_ns(q->target), us_to_ns(q->interval)); target 2603 net/sched/sch_cake.c q->target = nla_get_u32(tb[TCA_CAKE_TARGET]); target 2605 net/sched/sch_cake.c if (!q->target) target 2606 net/sched/sch_cake.c q->target = 1; target 2672 net/sched/sch_cake.c q->target = 5000; /* 5ms: codel RFC argues target 2755 net/sched/sch_cake.c if (nla_put_u32(skb, TCA_CAKE_TARGET, q->target)) target 2867 net/sched/sch_cake.c ktime_to_us(ns_to_ktime(b->cparams.target))); target 152 net/sched/sch_codel.c u32 target = nla_get_u32(tb[TCA_CODEL_TARGET]); target 154 net/sched/sch_codel.c q->params.target = ((u64)target * NSEC_PER_USEC) >> CODEL_SHIFT; target 226 net/sched/sch_codel.c codel_time_to_us(q->params.target)) || target 393 net/sched/sch_fq_codel.c u64 target = nla_get_u32(tb[TCA_FQ_CODEL_TARGET]); target 395 net/sched/sch_fq_codel.c q->cparams.target = (target * NSEC_PER_USEC) >> CODEL_SHIFT; target 523 net/sched/sch_fq_codel.c codel_time_to_us(q->cparams.target)) || target 30 net/sched/sch_pie.c psched_time_t target; /* user specified target delay in pschedtime */ target 77 net/sched/sch_pie.c params->target = PSCHED_NS2TICKS(15 * NSEC_PER_MSEC); /* 15 ms */ target 106 net/sched/sch_pie.c if ((q->vars.qdelay < q->params.target / 2) && target 220 net/sched/sch_pie.c u32 target = nla_get_u32(tb[TCA_PIE_TARGET]); target 223 net/sched/sch_pie.c q->params.target = PSCHED_NS2TICKS((u64)target * NSEC_PER_USEC); target 382 net/sched/sch_pie.c delta += alpha * (u64)(qdelay - q->params.target); target 436 net/sched/sch_pie.c if ((q->vars.qdelay < q->params.target / 2) && target 437 net/sched/sch_pie.c (q->vars.qdelay_old < q->params.target / 2) && target 492 net/sched/sch_pie.c ((u32)PSCHED_TICKS2NS(q->params.target)) / target 1060 net/sctp/sm_make_chunk.c void *target; target 1062 net/sctp/sm_make_chunk.c target = skb_put(chunk->skb, len); target 1065 net/sctp/sm_make_chunk.c memcpy(target, data, len); target 1067 net/sctp/sm_make_chunk.c memset(target, 0, len); target 1073 net/sctp/sm_make_chunk.c return target; target 1495 net/sctp/sm_make_chunk.c void *target; target 1498 net/sctp/sm_make_chunk.c target = skb_put_data(chunk->skb, data, len); target 1504 net/sctp/sm_make_chunk.c return target; target 1514 net/sctp/sm_make_chunk.c void *target; target 1517 net/sctp/sm_make_chunk.c target = skb_put(chunk->skb, len); target 1520 net/sctp/sm_make_chunk.c if (!copy_from_iter_full(target, len, from)) target 296 net/smc/smc_rx.c int target; /* Read at least these many bytes */ target 308 net/smc/smc_rx.c target = sock_rcvlowat(sk, flags & MSG_WAITALL, len); target 314 net/smc/smc_rx.c if (read_done >= target || (pipe && read_done)) target 1736 net/tls/tls_sw.c int target, err = 0; target 1764 net/tls/tls_sw.c target = sock_rcvlowat(sk, flags & MSG_WAITALL, len); target 1768 net/tls/tls_sw.c while (len && (decrypted + copied < target || ctx->recv_pkt)) { target 2215 net/unix/af_unix.c int target; target 2232 net/unix/af_unix.c target = sock_rcvlowat(sk, flags & MSG_WAITALL, size); target 2259 net/unix/af_unix.c if (copied >= target) target 1662 net/vmw_vsock/af_vsock.c size_t target; target 1717 net/vmw_vsock/af_vsock.c target = sock_rcvlowat(sk, flags & MSG_WAITALL, len); target 1718 net/vmw_vsock/af_vsock.c if (target >= transport->stream_rcvhiwat(vsk)) { target 1725 net/vmw_vsock/af_vsock.c err = transport->notify_recv_init(vsk, target, &recv_data); target 1751 net/vmw_vsock/af_vsock.c vsk, target, &recv_data); target 1785 net/vmw_vsock/af_vsock.c vsk, target, &recv_data); target 1800 net/vmw_vsock/af_vsock.c vsk, target, read, target 1805 net/vmw_vsock/af_vsock.c if (read >= target || flags & MSG_PEEK) target 1808 net/vmw_vsock/af_vsock.c target -= read; target 723 net/vmw_vsock/hyperv_transport.c int hvs_notify_poll_in(struct vsock_sock *vsk, size_t target, bool *readable) target 732 net/vmw_vsock/hyperv_transport.c int hvs_notify_poll_out(struct vsock_sock *vsk, size_t target, bool *writable) target 740 net/vmw_vsock/hyperv_transport.c int hvs_notify_recv_init(struct vsock_sock *vsk, size_t target, target 747 net/vmw_vsock/hyperv_transport.c int hvs_notify_recv_pre_block(struct vsock_sock *vsk, size_t target, target 754 net/vmw_vsock/hyperv_transport.c int hvs_notify_recv_pre_dequeue(struct vsock_sock *vsk, size_t target, target 761 net/vmw_vsock/hyperv_transport.c int hvs_notify_recv_post_dequeue(struct vsock_sock *vsk, size_t target, target 497 net/vmw_vsock/virtio_transport_common.c size_t target, target 511 net/vmw_vsock/virtio_transport_common.c size_t target, target 527 net/vmw_vsock/virtio_transport_common.c size_t target, struct vsock_transport_recv_notify_data *data) target 534 net/vmw_vsock/virtio_transport_common.c size_t target, struct vsock_transport_recv_notify_data *data) target 541 net/vmw_vsock/virtio_transport_common.c size_t target, struct vsock_transport_recv_notify_data *data) target 548 net/vmw_vsock/virtio_transport_common.c size_t target, ssize_t copied, bool data_read, target 1926 net/vmw_vsock/vmci_transport.c size_t target, target 1930 net/vmw_vsock/vmci_transport.c &vsk->sk, target, data_ready_now); target 1935 net/vmw_vsock/vmci_transport.c size_t target, target 1939 net/vmw_vsock/vmci_transport.c &vsk->sk, target, space_available_now); target 1944 net/vmw_vsock/vmci_transport.c size_t target, target 1948 net/vmw_vsock/vmci_transport.c &vsk->sk, target, target 1954 net/vmw_vsock/vmci_transport.c size_t target, target 1958 net/vmw_vsock/vmci_transport.c &vsk->sk, target, target 1964 net/vmw_vsock/vmci_transport.c size_t target, target 1968 net/vmw_vsock/vmci_transport.c &vsk->sk, target, target 1974 net/vmw_vsock/vmci_transport.c size_t target, target 1980 net/vmw_vsock/vmci_transport.c &vsk->sk, target, copied, data_read, target 339 net/vmw_vsock/vmci_transport_notify.c size_t target, bool *data_ready_now) target 363 net/vmw_vsock/vmci_transport_notify.c size_t target, bool *space_avail_now) target 393 net/vmw_vsock/vmci_transport_notify.c size_t target, target 404 net/vmw_vsock/vmci_transport_notify.c if (PKT_FIELD(vsk, write_notify_min_window) < target + 1) { target 405 net/vmw_vsock/vmci_transport_notify.c PKT_FIELD(vsk, write_notify_min_window) = target + 1; target 429 net/vmw_vsock/vmci_transport_notify.c size_t target, target 435 net/vmw_vsock/vmci_transport_notify.c if (!send_waiting_read(sk, target)) { target 455 net/vmw_vsock/vmci_transport_notify.c size_t target, target 475 net/vmw_vsock/vmci_transport_notify.c size_t target, target 42 net/vmw_vsock/vmci_transport_notify.h int (*poll_in) (struct sock *sk, size_t target, target 44 net/vmw_vsock/vmci_transport_notify.h int (*poll_out) (struct sock *sk, size_t target, target 51 net/vmw_vsock/vmci_transport_notify.h int (*recv_init) (struct sock *sk, size_t target, target 53 net/vmw_vsock/vmci_transport_notify.h int (*recv_pre_block) (struct sock *sk, size_t target, target 55 net/vmw_vsock/vmci_transport_notify.h int (*recv_pre_dequeue) (struct sock *sk, size_t target, target 57 net/vmw_vsock/vmci_transport_notify.h int (*recv_post_dequeue) (struct sock *sk, size_t target, target 160 net/vmw_vsock/vmci_transport_notify_qstate.c size_t target, bool *data_ready_now) target 181 net/vmw_vsock/vmci_transport_notify_qstate.c size_t target, bool *space_avail_now) target 203 net/vmw_vsock/vmci_transport_notify_qstate.c size_t target, target 212 net/vmw_vsock/vmci_transport_notify_qstate.c if (PKT_FIELD(vsk, write_notify_min_window) < target + 1) { target 213 net/vmw_vsock/vmci_transport_notify_qstate.c PKT_FIELD(vsk, write_notify_min_window) = target + 1; target 235 net/vmw_vsock/vmci_transport_notify_qstate.c size_t target, target 255 net/vmw_vsock/vmci_transport_notify_qstate.c size_t target, target 391 net/vmw_vsock/vmci_transport_notify_qstate.c size_t target, target 20 samples/bpf/offwaketime_kern.c char target[TASK_COMM_LEN]; target 83 samples/bpf/offwaketime_kern.c bpf_get_current_comm(&key.target, sizeof(key.target)); target 43 samples/bpf/offwaketime_user.c char target[TASK_COMM_LEN]; target 54 samples/bpf/offwaketime_user.c printf("%s;", key->target); target 107 samples/seccomp/user-trap.c char path[PATH_MAX], source[PATH_MAX], target[PATH_MAX]; target 171 samples/seccomp/user-trap.c ret = read(mem, target, sizeof(target)); target 182 samples/seccomp/user-trap.c if (!strncmp(source, "/tmp/", 5) && !strncmp(target, "/tmp/", 5)) { target 183 samples/seccomp/user-trap.c if (mount(source, target, NULL, req->data.args[3], NULL) < 0) { target 311 scripts/basic/fixdep.c static void parse_dep_file(char *m, const char *target) target 359 scripts/basic/fixdep.c target, m); target 360 scripts/basic/fixdep.c xprintf("deps_%s := \\\n", target); target 387 scripts/basic/fixdep.c xprintf("\n%s: $(deps_%s)\n\n", target, target); target 388 scripts/basic/fixdep.c xprintf("$(deps_%s):\n", target); target 393 scripts/basic/fixdep.c const char *depfile, *target, *cmdline; target 400 scripts/basic/fixdep.c target = argv[2]; target 403 scripts/basic/fixdep.c xprintf("cmd_%s := %s\n\n", target, cmdline); target 406 scripts/basic/fixdep.c parse_dep_file(buf, target); target 175 scripts/dtc/dtc-parser.y struct node *target = get_node_by_ref($1, $3); target 177 scripts/dtc/dtc-parser.y if (target) { target 178 scripts/dtc/dtc-parser.y add_label(&target->labels, $2); target 179 scripts/dtc/dtc-parser.y merge_nodes(target, $4); target 194 scripts/dtc/dtc-parser.y struct node *target = get_node_by_ref($1, $2); target 196 scripts/dtc/dtc-parser.y if (target) target 197 scripts/dtc/dtc-parser.y merge_nodes(target, $3); target 205 scripts/dtc/dtc-parser.y struct node *target = get_node_by_ref($1, $2); target 207 scripts/dtc/dtc-parser.y if (target) { target 208 scripts/dtc/dtc-parser.y merge_nodes(target, $3); target 224 scripts/dtc/dtc-parser.y struct node *target = get_node_by_ref($1, $3); target 226 scripts/dtc/dtc-parser.y if (target) target 227 scripts/dtc/dtc-parser.y delete_node(target); target 236 scripts/dtc/dtc-parser.y struct node *target = get_node_by_ref($1, $3); target 238 scripts/dtc/dtc-parser.y if (target) target 239 scripts/dtc/dtc-parser.y omit_node_if_unused(target); target 556 scripts/dtc/libfdt/fdt_overlay.c static int overlay_apply_node(void *fdt, int target, target 575 scripts/dtc/libfdt/fdt_overlay.c ret = fdt_setprop(fdt, target, name, prop, prop_len); target 585 scripts/dtc/libfdt/fdt_overlay.c nnode = fdt_add_subnode(fdt, target, name); target 587 scripts/dtc/libfdt/fdt_overlay.c nnode = fdt_subnode_offset(fdt, target, name); target 624 scripts/dtc/libfdt/fdt_overlay.c int target; target 638 scripts/dtc/libfdt/fdt_overlay.c target = overlay_get_target(fdt, fdto, fragment, NULL); target 639 scripts/dtc/libfdt/fdt_overlay.c if (target < 0) target 640 scripts/dtc/libfdt/fdt_overlay.c return target; target 642 scripts/dtc/libfdt/fdt_overlay.c ret = overlay_apply_node(fdt, target, fdto, overlay); target 696 scripts/dtc/libfdt/fdt_overlay.c int root_sym, ov_sym, prop, path_len, fragment, target; target 774 scripts/dtc/libfdt/fdt_overlay.c target = ret; target 778 scripts/dtc/libfdt/fdt_overlay.c ret = get_path_len(fdt, target); target 796 scripts/dtc/libfdt/fdt_overlay.c target = ret; target 802 scripts/dtc/libfdt/fdt_overlay.c ret = fdt_get_path(fdt, target, buf, len + 1); target 281 scripts/kconfig/expr.h struct menu *target; target 378 scripts/kconfig/mconf.c data->targets[k] = pos->target; target 727 scripts/kconfig/menu.c jump->target = prop->menu; target 729 scripts/kconfig/menu.c jump->target = location; target 75 security/apparmor/apparmorfs.c static int mangle_name(const char *name, char *target) target 77 security/apparmor/apparmorfs.c char *t = target; target 82 security/apparmor/apparmorfs.c if (target) { target 104 security/apparmor/apparmorfs.c return t - target; target 326 security/apparmor/apparmorfs.c const char *target, target 333 security/apparmor/apparmorfs.c if (target) { target 1525 security/apparmor/apparmorfs.c char *target; target 1534 security/apparmor/apparmorfs.c target = gen_symlink_name(depth, profile->rawdata->name, name); target 1537 security/apparmor/apparmorfs.c if (IS_ERR(target)) target 1538 security/apparmor/apparmorfs.c return target; target 1540 security/apparmor/apparmorfs.c set_delayed_call(done, rawdata_link_cb, target); target 1542 security/apparmor/apparmorfs.c return target; target 287 security/apparmor/domain.c struct aa_label *target, bool stack, target 298 security/apparmor/domain.c return label_match(profile, target, stack, start, true, request, perms); target 625 security/apparmor/domain.c const char *info = NULL, *name = NULL, *target = NULL; target 663 security/apparmor/domain.c new = x_to_label(profile, bprm, name, perms.xindex, &target, target 733 security/apparmor/domain.c aa_audit_file(profile, &perms, OP_EXEC, MAY_EXEC, name, target, new, target 1170 security/apparmor/domain.c struct aa_label *label, *previous, *new = NULL, *target = NULL; target 1226 security/apparmor/domain.c target = new; target 1247 security/apparmor/domain.c target = previous; target 1271 security/apparmor/domain.c AA_MAY_CHANGEHAT, NULL, NULL, target, target 1280 security/apparmor/domain.c struct aa_label *target, bool stack, target 1287 security/apparmor/domain.c error = change_profile_perms(profile, target, stack, request, target 1291 security/apparmor/domain.c NULL, target, GLOBAL_ROOT_UID, info, target 1313 security/apparmor/domain.c struct aa_label *label, *new = NULL, *target = NULL; target 1361 security/apparmor/domain.c target = aa_label_parse(label, fqname, GFP_KERNEL, true, false); target 1362 security/apparmor/domain.c if (IS_ERR(target)) { target 1366 security/apparmor/domain.c error = PTR_ERR(target); target 1367 security/apparmor/domain.c target = NULL; target 1383 security/apparmor/domain.c target = &tprofile->label; target 1397 security/apparmor/domain.c profile, target, stack, target 1407 security/apparmor/domain.c error = may_change_ptraced_domain(target, &info); target 1425 security/apparmor/domain.c aa_get_label(target), target 1443 security/apparmor/domain.c new = aa_label_merge(label, target, GFP_KERNEL); target 1462 security/apparmor/domain.c error = aa_set_current_onexec(target, stack); target 1468 security/apparmor/domain.c NULL, new ? new : target, target 1473 security/apparmor/domain.c aa_put_label(target); target 80 security/apparmor/file.c } else if (aad(sa)->fs.target) { target 82 security/apparmor/file.c audit_log_untrustedstring(ab, aad(sa)->fs.target); target 103 security/apparmor/file.c const char *target, struct aa_label *tlabel, target 112 security/apparmor/file.c aad(&sa)->fs.target = target; target 356 security/apparmor/file.c static inline bool xindex_is_subset(u32 link, u32 target) target 358 security/apparmor/file.c if (((link & ~AA_X_UNSAFE) != (target & ~AA_X_UNSAFE)) || target 359 security/apparmor/file.c ((link & AA_X_UNSAFE) && !(target & AA_X_UNSAFE))) target 367 security/apparmor/file.c const struct path *target, char *buffer2, target 383 security/apparmor/file.c error = path_name(OP_LINK, &profile->label, target, profile->path_flags, target 468 security/apparmor/file.c struct path target = { .mnt = new_dir->mnt, .dentry = old_dentry }; target 480 security/apparmor/file.c profile_path_link(profile, &link, buffer, &target, target 122 security/apparmor/include/audit.h const char *target; target 161 security/apparmor/include/file.h const char *target, struct aa_label *tlabel, kuid_t ouid, target 34 security/apparmor/include/ipc.h int aa_may_signal(struct aa_label *sender, struct aa_label *target, int sig); target 150 security/apparmor/include/perms.h int aa_profile_label_perm(struct aa_profile *profile, struct aa_profile *target, target 208 security/apparmor/ipc.c int aa_may_signal(struct aa_label *sender, struct aa_label *target, int sig) target 215 security/apparmor/ipc.c return xcheck_labels(sender, target, profile, target 216 security/apparmor/ipc.c profile_signal_perm(profile, target, MAY_WRITE, &sa), target 395 security/apparmor/lib.c int aa_profile_label_perm(struct aa_profile *profile, struct aa_profile *target, target 402 security/apparmor/lib.c aad(sa)->peer = &target->label; target 405 security/apparmor/lib.c aa_profile_match_label(profile, &target->label, type, request, &perms); target 137 security/apparmor/lsm.c static int apparmor_capget(struct task_struct *target, kernel_cap_t *effective, target 144 security/apparmor/lsm.c cred = __task_cred(target); target 729 security/apparmor/lsm.c static int apparmor_task_kill(struct task_struct *target, struct kernel_siginfo *info, target 740 security/apparmor/lsm.c tl = aa_get_task_label(target); target 748 security/apparmor/lsm.c tl = aa_get_task_label(target); target 662 security/apparmor/mount.c struct aa_label *target = NULL; target 671 security/apparmor/mount.c target = fn_label_build(label, profile, GFP_ATOMIC, target 674 security/apparmor/mount.c if (!target) { target 678 security/apparmor/mount.c } else if (!IS_ERR(target)) { target 679 security/apparmor/mount.c error = aa_replace_current_label(target); target 682 security/apparmor/mount.c aa_put_label(target); target 687 security/apparmor/mount.c error = PTR_ERR(target); target 199 security/commoncap.c int cap_capget(struct task_struct *target, kernel_cap_t *effective, target 206 security/commoncap.c cred = __task_cred(target); target 747 security/commoncap.c #define __cap_gained(field, target, source) \ target 748 security/commoncap.c !cap_issubset(target->cap_##field, source->cap_##field) target 749 security/commoncap.c #define __cap_grew(target, source, cred) \ target 750 security/commoncap.c !cap_issubset(cred->cap_##target, cred->cap_##source) target 262 security/inode.c const char *target, target 268 security/inode.c if (target) { target 269 security/inode.c link = kstrdup(target, GFP_KERNEL); target 195 security/keys/internal.h extern struct key *request_key_auth_new(struct key *target, target 161 security/keys/request_key_auth.c struct key *request_key_auth_new(struct key *target, const char *op, target 171 security/keys/request_key_auth.c kenter("%d,", target->serial); target 210 security/keys/request_key_auth.c rka->target_key = key_get(target); target 214 security/keys/request_key_auth.c sprintf(desc, "%x", target->serial); target 706 security/security.c int security_capget(struct task_struct *target, target 711 security/security.c return call_int_hook(capget, 0, target, target 1796 security/security.c struct task_struct *target, long type, int mode) target 1798 security/security.c return call_int_hook(msg_queue_msgrcv, 0, msq, msg, target, type, mode); target 2147 security/selinux/hooks.c static int selinux_capget(struct task_struct *target, kernel_cap_t *effective, target 2151 security/selinux/hooks.c current_sid(), task_sid(target), SECCLASS_PROCESS, target 6062 security/selinux/hooks.c struct task_struct *target, target 6068 security/selinux/hooks.c u32 sid = task_sid(target); target 542 security/selinux/ss/services.c struct type_datum *target; target 551 security/selinux/ss/services.c target = policydb->type_val_to_struct[tcontext->type - 1]; target 552 security/selinux/ss/services.c BUG_ON(!target); target 559 security/selinux/ss/services.c if (target->bounds) { target 561 security/selinux/ss/services.c lo_tcontext.type = target->bounds; target 2191 security/selinux/ss/services.c convert_params.target = newsidtab; target 306 security/selinux/ss/sidtab.c dst_convert = sidtab_do_lookup(convert->target, count, 1); target 319 security/selinux/ss/sidtab.c convert->target->count = count + 1; target 426 security/selinux/ss/sidtab.c rc = sidtab_do_lookup(params->target, count - 1, 1) ? 0 : -ENOMEM; target 433 security/selinux/ss/sidtab.c params->target->count = count; target 445 security/selinux/ss/sidtab.c rc = sidtab_convert_tree(¶ms->target->roots[level], target 66 security/selinux/ss/sidtab.h struct sidtab *target; target 3206 security/smack/smack_lsm.c struct task_struct *target, long type, int mode) target 993 security/tomoyo/common.h const char *target); target 803 security/tomoyo/file.c int tomoyo_path_perm(const u8 operation, const struct path *path, const char *target) target 831 security/tomoyo/file.c symlink_target.name = tomoyo_encode(target); target 47 security/yama/yama_lsm.c struct task_struct *target; target 57 security/yama/yama_lsm.c target_cmd = kstrdup_quotable_cmdline(info->target, GFP_KERNEL); target 62 security/yama/yama_lsm.c info->access, target_cmd, info->target->pid, agent_cmd, target 69 security/yama/yama_lsm.c put_task_struct(info->target); target 74 security/yama/yama_lsm.c static void report_access(const char *access, struct task_struct *target, target 80 security/yama/yama_lsm.c assert_spin_locked(&target->alloc_lock); /* for target->comm */ target 88 security/yama/yama_lsm.c access, target->comm, target->pid, target 97 security/yama/yama_lsm.c get_task_struct(target); target 100 security/yama/yama_lsm.c info->target = target; target 106 security/yama/yama_lsm.c put_task_struct(target); target 70 sound/firewire/bebob/bebob.h int (*get)(struct snd_bebob *bebob, u32 *target, unsigned int size); target 629 sound/firewire/bebob/bebob_maudio.c special_meter_get(struct snd_bebob *bebob, u32 *target, unsigned int size) target 651 sound/firewire/bebob/bebob_maudio.c target[i++] = be16_to_cpu(buf[c]) << 16; target 572 sound/pci/hda/hda_codec.c pin->target = val; target 589 sound/pci/hda/hda_codec.c return pin->target; target 2832 sound/pci/hda/hda_codec.c unsigned int target; target 2835 sound/pci/hda/hda_codec.c target = codec->power_filter(codec, nid, AC_PWRST_D0); target 2836 sound/pci/hda/hda_codec.c if (target == AC_PWRST_D0) target 2838 sound/pci/hda/hda_codec.c if (!snd_hda_check_power_state(codec, nid, target)) target 2840 sound/pci/hda/hda_codec.c AC_VERB_SET_POWER_STATE, target); target 664 sound/pci/hda/patch_ca0132.c unsigned int target[AE5_CA0113_OUT_SET_COMMANDS]; target 670 sound/pci/hda/patch_ca0132.c .target = { 0x2e, 0x30, 0x0d, 0x17, 0x19, 0x32 }, target 674 sound/pci/hda/patch_ca0132.c .target = { 0x2e, 0x30, 0x0d, 0x17, 0x19, 0x32 }, target 678 sound/pci/hda/patch_ca0132.c .target = { 0x2e, 0x30, 0x0d, 0x17, 0x19, 0x32 }, target 3255 sound/pci/hda/patch_ca0132.c unsigned int target, unsigned int value) target 3270 sound/pci/hda/patch_ca0132.c write_val = (target & 0xff); target 3294 sound/pci/hda/patch_ca0132.c unsigned int group, unsigned int target, unsigned int value) target 3309 sound/pci/hda/patch_ca0132.c write_val = (target & 0xff); target 4165 sound/pci/hda/patch_ca0132.c ae5_ca0113_output_presets[spec->cur_out_type].target[i], target 348 sound/pci/ice1712/se.c } target; target 358 sound/pci/ice1712/se.c .target = WM8776out, target 364 sound/pci/ice1712/se.c .target = WM8766, target 371 sound/pci/ice1712/se.c .target = WM8766, target 378 sound/pci/ice1712/se.c .target = WM8766, target 385 sound/pci/ice1712/se.c .target = WM8776in, target 390 sound/pci/ice1712/se.c .target = WM8776sel, target 396 sound/pci/ice1712/se.c .target = WM8776agc, target 402 sound/pci/ice1712/se.c .target = WM8776afl, target 478 sound/pci/ice1712/se.c switch (se200pci_cont[n].target) { target 2151 sound/soc/codecs/arizona.c unsigned int target, target 2205 sound/soc/codecs/arizona.c if (target % (ratio * Fref)) { target 2231 sound/soc/codecs/arizona.c if (target % (ratio * Fref)) { target 2258 sound/soc/codecs/arizona.c unsigned int target, div, gcd_fll; target 2270 sound/soc/codecs/arizona.c target = fll->fout * div / fll->vco_mult; target 2273 sound/soc/codecs/arizona.c arizona_fll_dbg(fll, "Fvco=%dHz\n", target); target 2276 sound/soc/codecs/arizona.c ratio = arizona_calc_fratio(fll, cfg, target, Fref, sync); target 2283 sound/soc/codecs/arizona.c cfg->n = target / (ratio * Fref); target 2285 sound/soc/codecs/arizona.c if (target % (ratio * Fref)) { target 2286 sound/soc/codecs/arizona.c gcd_fll = gcd(target, ratio * Fref); target 2289 sound/soc/codecs/arizona.c cfg->theta = (target - (cfg->n * ratio * Fref)) target 331 sound/soc/codecs/mt6358.c int target = 7; target 334 sound/soc/codecs/mt6358.c for (i = 0; i <= target; i++) { target 335 sound/soc/codecs/mt6358.c stage = up ? i : target - i; target 1129 sound/soc/codecs/rt5668.c int target, const int div[], int size) target 1133 sound/soc/codecs/rt5668.c if (rt5668->sysclk < target) { target 1141 sound/soc/codecs/rt5668.c if (target * div[i] == rt5668->sysclk) target 1143 sound/soc/codecs/rt5668.c if (target * div[i + 1] > rt5668->sysclk) { target 1150 sound/soc/codecs/rt5668.c if (target * div[i] < rt5668->sysclk) target 1155 sound/soc/codecs/rt5682.c int target, const int div[], int size) target 1159 sound/soc/codecs/rt5682.c if (rt5682->sysclk < target) { target 1167 sound/soc/codecs/rt5682.c if (target * div[i] == rt5682->sysclk) target 1169 sound/soc/codecs/rt5682.c if (target * div[i + 1] > rt5682->sysclk) { target 1176 sound/soc/codecs/rt5682.c if (target * div[i] < rt5682->sysclk) target 1860 sound/soc/codecs/wm2200.c unsigned int target; target 1894 sound/soc/codecs/wm2200.c target = Fout * div; target 1897 sound/soc/codecs/wm2200.c pr_debug("FLL Fvco=%dHz\n", target); target 1912 sound/soc/codecs/wm2200.c fll_div->n = target / (fratio * Fref); target 1914 sound/soc/codecs/wm2200.c if (target % Fref == 0) { target 1918 sound/soc/codecs/wm2200.c gcd_fll = gcd(target, fratio * Fref); target 1920 sound/soc/codecs/wm2200.c fll_div->theta = (target - (fll_div->n * fratio * Fref)) target 1680 sound/soc/codecs/wm5100.c unsigned int target; target 1714 sound/soc/codecs/wm5100.c target = Fout * div; target 1717 sound/soc/codecs/wm5100.c pr_debug("FLL Fvco=%dHz\n", target); target 1732 sound/soc/codecs/wm5100.c fll_div->n = target / (fratio * Fref); target 1734 sound/soc/codecs/wm5100.c if (target % Fref == 0) { target 1738 sound/soc/codecs/wm5100.c gcd_fll = gcd(target, fratio * Fref); target 1740 sound/soc/codecs/wm5100.c fll_div->theta = (target - (fll_div->n * fratio * Fref)) target 868 sound/soc/codecs/wm8400.c unsigned int K, Nmod, target; target 881 sound/soc/codecs/wm8400.c target = Fout * factors->outdiv; target 907 sound/soc/codecs/wm8400.c factors->n = target / (Fref * factors->fratio); target 908 sound/soc/codecs/wm8400.c Nmod = target % (Fref * factors->fratio); target 277 sound/soc/codecs/wm8510.c static void pll_factors(unsigned int target, unsigned int source) target 282 sound/soc/codecs/wm8510.c Ndiv = target / source; target 286 sound/soc/codecs/wm8510.c Ndiv = target / source; target 296 sound/soc/codecs/wm8510.c Nmod = target % source; target 400 sound/soc/codecs/wm8580.c static int pll_factors(struct _pll_div *pll_div, unsigned int target, target 407 sound/soc/codecs/wm8580.c pr_debug("wm8580: PLL %uHz->%uHz\n", source, target); target 413 sound/soc/codecs/wm8580.c if (target * post_table[i].div >= 90000000 && target 414 sound/soc/codecs/wm8580.c target * post_table[i].div <= 100000000) { target 417 sound/soc/codecs/wm8580.c target *= post_table[i].div; target 424 sound/soc/codecs/wm8580.c "%u\n", target); target 428 sound/soc/codecs/wm8580.c Ndiv = target / source; target 433 sound/soc/codecs/wm8580.c Ndiv = target / source; target 444 sound/soc/codecs/wm8580.c Nmod = target % source; target 702 sound/soc/codecs/wm8753.c static void pll_factors(struct _pll_div *pll_div, unsigned int target, target 708 sound/soc/codecs/wm8753.c Ndiv = target / source; target 712 sound/soc/codecs/wm8753.c Ndiv = target / source; target 721 sound/soc/codecs/wm8753.c Nmod = target % source; target 344 sound/soc/codecs/wm8804.c static int pll_factors(struct pll_div *pll_div, unsigned int target, target 356 sound/soc/codecs/wm8804.c tmp = target * post_table[i].div; target 361 sound/soc/codecs/wm8804.c target *= post_table[i].div; target 368 sound/soc/codecs/wm8804.c __func__, target); target 373 sound/soc/codecs/wm8804.c Ndiv = target / source; target 377 sound/soc/codecs/wm8804.c Ndiv = target / source; target 387 sound/soc/codecs/wm8804.c Nmod = target % source; target 689 sound/soc/codecs/wm8900.c unsigned int K, Ndiv, Nmod, target; target 697 sound/soc/codecs/wm8900.c target = Fout; target 699 sound/soc/codecs/wm8900.c while (target < 90000000) { target 701 sound/soc/codecs/wm8900.c target *= 2; target 704 sound/soc/codecs/wm8900.c if (target > 100000000) target 706 sound/soc/codecs/wm8900.c " Fout=%u\n", target, Fref, Fout); target 710 sound/soc/codecs/wm8900.c div, Fref, Fout, target); target 721 sound/soc/codecs/wm8900.c Ndiv = target / Fref; target 729 sound/soc/codecs/wm8900.c Nmod = (target / fll_div->fll_ratio) % Fref; target 744 sound/soc/codecs/wm8900.c if (WARN_ON(target != Fout * (fll_div->fllclk_div << 2)) || target 745 sound/soc/codecs/wm8900.c WARN_ON(!K && target != Fref * fll_div->fll_ratio * fll_div->n)) target 1609 sound/soc/codecs/wm8904.c unsigned int K, Ndiv, Nmod, target; target 1642 sound/soc/codecs/wm8904.c target = Fout * div; target 1645 sound/soc/codecs/wm8904.c pr_debug("Fvco=%dHz\n", target); target 1651 sound/soc/codecs/wm8904.c target /= fll_fratios[i].ratio; target 1661 sound/soc/codecs/wm8904.c Ndiv = target / Fref; target 1664 sound/soc/codecs/wm8904.c Nmod = target % Fref; target 524 sound/soc/codecs/wm8940.c static void pll_factors(unsigned int target, unsigned int source) target 529 sound/soc/codecs/wm8940.c Ndiv = target / source; target 535 sound/soc/codecs/wm8940.c Ndiv = target / source; target 540 sound/soc/codecs/wm8940.c Ndiv = target / source; target 545 sound/soc/codecs/wm8940.c Ndiv = target / source; target 555 sound/soc/codecs/wm8940.c Nmod = target % source; target 147 sound/soc/codecs/wm8955.c unsigned int K, Ndiv, Nmod, target; target 155 sound/soc/codecs/wm8955.c target = Fout * 4; target 156 sound/soc/codecs/wm8955.c if (target < 90000000) { target 158 sound/soc/codecs/wm8955.c target *= 2; target 163 sound/soc/codecs/wm8955.c WARN_ON(target < 90000000 || target > 100000000); target 165 sound/soc/codecs/wm8955.c dev_dbg(dev, "Fvco=%dHz\n", target); target 168 sound/soc/codecs/wm8955.c Ndiv = target / Fref; target 171 sound/soc/codecs/wm8955.c Nmod = target % Fref; target 48 sound/soc/codecs/wm8960.c static bool is_pll_freq_available(unsigned int source, unsigned int target); target 1111 sound/soc/codecs/wm8960.c static bool is_pll_freq_available(unsigned int source, unsigned int target) target 1115 sound/soc/codecs/wm8960.c if (source == 0 || target == 0) target 1119 sound/soc/codecs/wm8960.c target *= 4; target 1120 sound/soc/codecs/wm8960.c Ndiv = target / source; target 1124 sound/soc/codecs/wm8960.c Ndiv = target / source; target 1137 sound/soc/codecs/wm8960.c static int pll_factors(unsigned int source, unsigned int target, target 1143 sound/soc/codecs/wm8960.c pr_debug("WM8960 PLL: setting %dHz->%dHz\n", source, target); target 1146 sound/soc/codecs/wm8960.c target *= 4; target 1148 sound/soc/codecs/wm8960.c Ndiv = target / source; target 1152 sound/soc/codecs/wm8960.c Ndiv = target / source; target 1162 sound/soc/codecs/wm8960.c Nmod = target % source; target 507 sound/soc/codecs/wm8961.c int i, best, target, fs; target 532 sound/soc/codecs/wm8961.c target = wm8961->sysclk / fs; target 534 sound/soc/codecs/wm8961.c if (substream->stream == SNDRV_PCM_STREAM_PLAYBACK && target < 64) { target 539 sound/soc/codecs/wm8961.c if (substream->stream == SNDRV_PCM_STREAM_CAPTURE && target < 256) { target 546 sound/soc/codecs/wm8961.c if (wm8961_clk_sys_ratio[i].ratio >= target) target 2735 sound/soc/codecs/wm8962.c unsigned int target; target 2769 sound/soc/codecs/wm8962.c target = Fout * div; target 2772 sound/soc/codecs/wm8962.c pr_debug("FLL Fvco=%dHz\n", target); target 2787 sound/soc/codecs/wm8962.c fll_div->n = target / (fratio * Fref); target 2789 sound/soc/codecs/wm8962.c if (target % Fref == 0) { target 2793 sound/soc/codecs/wm8962.c gcd_fll = gcd(target, fratio * Fref); target 2795 sound/soc/codecs/wm8962.c fll_div->theta = (target - (fll_div->n * fratio * Fref)) target 281 sound/soc/codecs/wm8974.c unsigned int target, unsigned int source) target 287 sound/soc/codecs/wm8974.c target *= 4; target 289 sound/soc/codecs/wm8974.c Ndiv = target / source; target 293 sound/soc/codecs/wm8974.c Ndiv = target / source; target 303 sound/soc/codecs/wm8974.c Nmod = target % source; target 409 sound/soc/codecs/wm8978.c struct wm8978_pll_div *pll_div, unsigned int target, unsigned int source) target 414 sound/soc/codecs/wm8978.c n_div = target / source; target 418 sound/soc/codecs/wm8978.c n_div = target / source; target 429 sound/soc/codecs/wm8978.c n_mod = target - source * n_div; target 750 sound/soc/codecs/wm8983.c static int pll_factors(struct pll_div *pll_div, unsigned int target, target 757 sound/soc/codecs/wm8983.c Ndiv = target / source; target 761 sound/soc/codecs/wm8983.c Ndiv = target / source; target 771 sound/soc/codecs/wm8983.c Nmod = target % source; target 847 sound/soc/codecs/wm8985.c static int pll_factors(struct pll_div *pll_div, unsigned int target, target 854 sound/soc/codecs/wm8985.c Ndiv = target / source; target 858 sound/soc/codecs/wm8985.c Ndiv = target / source; target 868 sound/soc/codecs/wm8985.c Nmod = target % source; target 893 sound/soc/codecs/wm8990.c static void pll_factors(struct _pll_div *pll_div, unsigned int target, target 900 sound/soc/codecs/wm8990.c Ndiv = target / source; target 904 sound/soc/codecs/wm8990.c Ndiv = target / source; target 913 sound/soc/codecs/wm8990.c Nmod = target % source; target 885 sound/soc/codecs/wm8991.c static void pll_factors(struct _pll_div *pll_div, unsigned int target, target 892 sound/soc/codecs/wm8991.c Ndiv = target / source; target 896 sound/soc/codecs/wm8991.c Ndiv = target / source; target 905 sound/soc/codecs/wm8991.c Nmod = target % source; target 386 sound/soc/codecs/wm8993.c unsigned int K, Ndiv, Nmod, target; target 411 sound/soc/codecs/wm8993.c target = Fout * 2; target 412 sound/soc/codecs/wm8993.c while (target < 90000000) { target 414 sound/soc/codecs/wm8993.c target *= 2; target 423 sound/soc/codecs/wm8993.c pr_debug("Fvco=%dHz\n", target); target 429 sound/soc/codecs/wm8993.c target /= fll_fratios[i].ratio; target 439 sound/soc/codecs/wm8993.c Ndiv = target / Fref; target 442 sound/soc/codecs/wm8993.c Nmod = target % Fref; target 1917 sound/soc/codecs/wm8996.c unsigned int target; target 1961 sound/soc/codecs/wm8996.c target = Fout * div; target 1964 sound/soc/codecs/wm8996.c pr_debug("FLL Fvco=%dHz\n", target); target 1979 sound/soc/codecs/wm8996.c fll_div->n = target / (fratio * Fref); target 1981 sound/soc/codecs/wm8996.c if (target % Fref == 0) { target 1985 sound/soc/codecs/wm8996.c gcd_fll = gcd(target, fratio * Fref); target 1987 sound/soc/codecs/wm8996.c fll_div->theta = (target - (fll_div->n * fratio * Fref)) target 466 sound/soc/codecs/wm9081.c unsigned int K, Ndiv, Nmod, target; target 490 sound/soc/codecs/wm9081.c target = Fout * 2; target 491 sound/soc/codecs/wm9081.c while (target < 90000000) { target 493 sound/soc/codecs/wm9081.c target *= 2; target 502 sound/soc/codecs/wm9081.c pr_debug("Fvco=%dHz\n", target); target 508 sound/soc/codecs/wm9081.c target /= fll_fratios[i].ratio; target 518 sound/soc/codecs/wm9081.c Ndiv = target / Fref; target 521 sound/soc/codecs/wm9081.c Nmod = target % Fref; target 639 sound/soc/codecs/wm9081.c int new_sysclk, i, target; target 669 sound/soc/codecs/wm9081.c target = wm9081->fs * clk_sys_rates[i].ratio; target 670 sound/soc/codecs/wm9081.c new_sysclk = target; target 671 sound/soc/codecs/wm9081.c if (target >= wm9081->bclk && target 672 sound/soc/codecs/wm9081.c target > 3000000) target 756 sound/soc/codecs/wm9713.c unsigned int K, Ndiv, Nmod, target; target 759 sound/soc/codecs/wm9713.c target = 98304000; target 782 sound/soc/codecs/wm9713.c target >>= 2; target 786 sound/soc/codecs/wm9713.c Ndiv = target / source; target 793 sound/soc/codecs/wm9713.c Nmod = target % source; target 4119 sound/soc/codecs/wm_adsp.c static int wm_adsp_buffer_capture_block(struct wm_adsp_compr *compr, int target) target 4141 sound/soc/codecs/wm_adsp.c if (nwords > target) target 4142 sound/soc/codecs/wm_adsp.c nwords = target; target 246 sound/soc/intel/common/sst-dsp.c u32 target, u32 time, char *operation) target 263 sound/soc/intel/common/sst-dsp.c while ((((reg = sst_dsp_shim_read_unlocked(ctx, offset)) & mask) != target) target 272 sound/soc/intel/common/sst-dsp.c if ((reg & mask) == target) { target 951 sound/soc/sh/fsi.c unsigned long target = 0; /* 12288000 or 11289600 */ target 960 sound/soc/sh/fsi.c target = 12288000; target 962 sound/soc/sh/fsi.c target = 11289600; target 963 sound/soc/sh/fsi.c if (!target) { target 969 sound/soc/sh/fsi.c ackmd = target / rate; target 996 sound/soc/sh/fsi.c cout = target * adj; target 1005 sound/soc/sh/fsi.c diff = abs(actual - target); target 391 sound/soc/sh/rcar/core.c struct rsnd_mod *target; target 418 sound/soc/sh/rcar/core.c target = src ? src : ssiu; target 422 sound/soc/sh/rcar/core.c target = cmd ? cmd : ssiu; target 430 sound/soc/sh/rcar/core.c (mod != target)) target 463 sound/soc/sof/ops.h u32 mask, u32 target, u32 timeout_ms, target 105 tools/arch/arm/include/uapi/asm/kvm.h __u32 target; target 111 tools/arch/arm64/include/uapi/asm/kvm.h __u32 target; target 8 tools/arch/x86/include/asm/mcsafe_test.h .macro MCSAFE_TEST_SRC reg count target target 11 tools/arch/x86/include/asm/mcsafe_test.h .macro MCSAFE_TEST_DST reg count target target 81 tools/bpf/bpftool/common.c mnt_fs(const char *target, const char *type, char *buff, size_t bufflen) target 85 tools/bpf/bpftool/common.c while (mount("", target, "none", MS_PRIVATE | MS_REC, NULL)) { target 89 tools/bpf/bpftool/common.c target, strerror(errno)); target 93 tools/bpf/bpftool/common.c if (mount(target, target, "none", MS_BIND, NULL)) { target 96 tools/bpf/bpftool/common.c target, target, strerror(errno)); target 103 tools/bpf/bpftool/common.c if (mount(type, target, type, 0, "mode=0700")) { target 105 tools/bpf/bpftool/common.c type, type, target, strerror(errno)); target 112 tools/bpf/bpftool/common.c int mount_tracefs(const char *target) target 117 tools/bpf/bpftool/common.c err = mnt_fs(target, "tracefs", err_str, ERR_MAX_LEN); target 577 tools/bpf/bpftool/feature.c enum probe_component target = COMPONENT_UNSPEC; target 596 tools/bpf/bpftool/feature.c if (target != COMPONENT_UNSPEC) { target 600 tools/bpf/bpftool/feature.c target = COMPONENT_KERNEL; target 605 tools/bpf/bpftool/feature.c if (target != COMPONENT_UNSPEC || ifindex) { target 612 tools/bpf/bpftool/feature.c target = COMPONENT_DEVICE; target 649 tools/bpf/bpftool/feature.c switch (target) { target 111 tools/bpf/bpftool/main.h int mount_tracefs(const char *target); target 24 tools/build/fixdep.c char *target; target 39 tools/build/fixdep.c printf("cmd_%s := %s\n\n", target, cmdline); target 97 tools/build/fixdep.c target, s); target 99 tools/build/fixdep.c target); target 117 tools/build/fixdep.c printf("\n%s: $(deps_%s)\n\n", target, target); target 118 tools/build/fixdep.c printf("$(deps_%s):\n", target); target 163 tools/build/fixdep.c target = argv[2]; target 971 tools/lib/subcmd/parse-options.c int *target = opt->value; target 975 tools/lib/subcmd/parse-options.c *target = 0; target 977 tools/lib/subcmd/parse-options.c if (*target >= 0) target 978 tools/lib/subcmd/parse-options.c (*target)++; target 980 tools/lib/subcmd/parse-options.c *target = 1; target 982 tools/lib/subcmd/parse-options.c if (*target <= 0) target 983 tools/lib/subcmd/parse-options.c (*target)--; target 985 tools/lib/subcmd/parse-options.c *target = -1; target 16 tools/perf/arch/arm64/annotate/instructions.c char *s = strchr(ops->raw, ','), *target, *endptr; target 28 tools/perf/arch/arm64/annotate/instructions.c target = ++s; target 29 tools/perf/arch/arm64/annotate/instructions.c ops->target.raw = strdup(target); target 30 tools/perf/arch/arm64/annotate/instructions.c if (ops->target.raw == NULL) target 33 tools/perf/arch/arm64/annotate/instructions.c ops->target.addr = strtoull(target, &endptr, 16); target 34 tools/perf/arch/arm64/annotate/instructions.c if (endptr == target) target 46 tools/perf/arch/arm64/annotate/instructions.c ops->target.name = strdup(s); target 49 tools/perf/arch/arm64/annotate/instructions.c if (ops->target.name == NULL) target 55 tools/perf/arch/arm64/annotate/instructions.c zfree(&ops->target.raw); target 137 tools/perf/arch/powerpc/util/sym-handling.c map = get_target_map(pev->target, pev->nsi, pev->uprobes); target 9 tools/perf/arch/s390/annotate/instructions.c struct addr_map_symbol target = { target 17 tools/perf/arch/s390/annotate/instructions.c ops->target.addr = strtoull(tok + 1, &endptr, 16); target 34 tools/perf/arch/s390/annotate/instructions.c ops->target.name = strdup(name); target 37 tools/perf/arch/s390/annotate/instructions.c if (ops->target.name == NULL) target 39 tools/perf/arch/s390/annotate/instructions.c target.addr = map__objdump_2mem(map, ops->target.addr); target 41 tools/perf/arch/s390/annotate/instructions.c if (map_groups__find_ams(&target) == 0 && target 42 tools/perf/arch/s390/annotate/instructions.c map__rip_2objdump(target.map, map->map_ip(target.map, target.addr)) == ops->target.addr) target 43 tools/perf/arch/s390/annotate/instructions.c ops->target.sym = target.sym; target 60 tools/perf/arch/s390/annotate/instructions.c char *s = strchr(ops->raw, ','), *target, *endptr; target 72 tools/perf/arch/s390/annotate/instructions.c target = ++s; target 73 tools/perf/arch/s390/annotate/instructions.c ops->target.raw = strdup(target); target 74 tools/perf/arch/s390/annotate/instructions.c if (ops->target.raw == NULL) target 77 tools/perf/arch/s390/annotate/instructions.c ops->target.addr = strtoull(target, &endptr, 16); target 78 tools/perf/arch/s390/annotate/instructions.c if (endptr == target) target 89 tools/perf/arch/s390/annotate/instructions.c ops->target.name = strdup(s + 1); target 91 tools/perf/arch/s390/annotate/instructions.c if (ops->target.name == NULL) target 97 tools/perf/arch/s390/annotate/instructions.c zfree(&ops->target.raw); target 53 tools/perf/arch/x86/tests/perf-time-to-tsc.c .target = { target 167 tools/perf/arch/x86/util/intel-pt.c static int intel_pt_pick_bit(int bits, int target) target 173 tools/perf/arch/x86/util/intel-pt.c if (pos <= target || pick < 0) target 175 tools/perf/arch/x86/util/intel-pt.c if (pos >= target) target 710 tools/perf/arch/x86/util/intel-pt.c bool cpu_wide = !target__none(&opts->target) && target 711 tools/perf/arch/x86/util/intel-pt.c !target__has_task(&opts->target); target 803 tools/perf/arch/x86/util/intel-pt.c !target__none(&opts->target)) target 34 tools/perf/builtin-ftrace.c struct target target; target 159 tools/perf/builtin-ftrace.c if (target__has_cpu(&ftrace->target)) target 200 tools/perf/builtin-ftrace.c if (!target__has_cpu(&ftrace->target)) target 313 tools/perf/builtin-ftrace.c &ftrace->target, argv, false, target 453 tools/perf/builtin-ftrace.c .target = { .uid = UINT_MAX, }, target 463 tools/perf/builtin-ftrace.c OPT_STRING('p', "pid", &ftrace.target.pid, "pid", target 467 tools/perf/builtin-ftrace.c OPT_BOOLEAN('a', "all-cpus", &ftrace.target.system_wide, target 469 tools/perf/builtin-ftrace.c OPT_STRING('C', "cpu", &ftrace.target.cpu_list, "cpu", target 495 tools/perf/builtin-ftrace.c if (!argc && target__none(&ftrace.target)) target 498 tools/perf/builtin-ftrace.c ret = target__validate(&ftrace.target); target 502 tools/perf/builtin-ftrace.c target__strerror(&ftrace.target, ret, errbuf, 512); target 513 tools/perf/builtin-ftrace.c ret = perf_evlist__create_maps(ftrace.evlist, &ftrace.target); target 577 tools/perf/builtin-kvm.c if (kvm->opts.target.system_wide) target 579 tools/perf/builtin-kvm.c else if (kvm->opts.target.pid) target 580 tools/perf/builtin-kvm.c pr_info("pid(s) %s, ", kvm->opts.target.pid); target 1126 tools/perf/builtin-kvm.c if (kvm->opts.target.pid) { target 1127 tools/perf/builtin-kvm.c kvm->pid_list = intlist__new(kvm->opts.target.pid); target 1266 tools/perf/builtin-kvm.c OPT_STRING('p', "pid", &kvm->opts.target.pid, "pid", target 1286 tools/perf/builtin-kvm.c if (!kvm->opts.target.pid) target 1287 tools/perf/builtin-kvm.c kvm->opts.target.system_wide = true; target 1349 tools/perf/builtin-kvm.c OPT_STRING('p', "pid", &kvm->opts.target.pid, "pid", target 1356 tools/perf/builtin-kvm.c OPT_BOOLEAN('a', "all-cpus", &kvm->opts.target.system_wide, target 1399 tools/perf/builtin-kvm.c kvm->opts.target.uses_mmap = false; target 1400 tools/perf/builtin-kvm.c kvm->opts.target.uid_str = NULL; target 1401 tools/perf/builtin-kvm.c kvm->opts.target.uid = UINT_MAX; target 1420 tools/perf/builtin-kvm.c err = target__validate(&kvm->opts.target); target 1422 tools/perf/builtin-kvm.c target__strerror(&kvm->opts.target, err, errbuf, BUFSIZ); target 1426 tools/perf/builtin-kvm.c if (target__none(&kvm->opts.target)) target 1427 tools/perf/builtin-kvm.c kvm->opts.target.system_wide = true; target 1445 tools/perf/builtin-kvm.c if (perf_evlist__create_maps(kvm->evlist, &kvm->opts.target) < 0) target 1459 tools/perf/builtin-kvm.c machine__synthesize_threads(&kvm->session->machines.host, &kvm->opts.target, target 47 tools/perf/builtin-probe.c char *target; target 65 tools/perf/builtin-probe.c if (params.target) { target 66 tools/perf/builtin-probe.c pev->target = strdup(params.target); target 67 tools/perf/builtin-probe.c if (!pev->target) target 115 tools/perf/builtin-probe.c if (!params.target && ptr && *ptr == '/') { target 116 tools/perf/builtin-probe.c params.target = strdup(ptr); target 117 tools/perf/builtin-probe.c if (!params.target) target 193 tools/perf/builtin-probe.c free(params.target); target 194 tools/perf/builtin-probe.c params.target = tmp; target 319 tools/perf/builtin-probe.c free(params.target); target 659 tools/perf/builtin-probe.c ret = show_available_funcs(params.target, params.nsi, target 666 tools/perf/builtin-probe.c ret = show_line_range(¶ms.line_range, params.target, target 693 tools/perf/builtin-probe.c if (params.target && !params.target_used) { target 783 tools/perf/builtin-record.c perf_evsel__open_strerror(pos, &opts->target, target 1121 tools/perf/builtin-record.c if (target__none(&rec->opts.target)) target 1172 tools/perf/builtin-record.c if (target__none(&rec->opts.target)) target 1328 tools/perf/builtin-record.c err = __machine__synthesize_threads(machine, tool, &opts->target, rec->evlist->core.threads, target 1392 tools/perf/builtin-record.c err = perf_evlist__prepare_workload(rec->evlist, &opts->target, target 1460 tools/perf/builtin-record.c if (perf_evlist__start_sb_thread(sb_evlist, &rec->opts.target)) { target 1485 tools/perf/builtin-record.c if (!target__none(&opts->target) && !opts->initial_delay) target 1636 tools/perf/builtin-record.c if (done && !disabled && !target__none(&opts->target)) { target 1660 tools/perf/builtin-record.c if (target__none(&rec->opts.target)) target 2078 tools/perf/builtin-record.c .target = { target 2117 tools/perf/builtin-record.c OPT_STRING('p', "pid", &record.opts.target.pid, "pid", target 2119 tools/perf/builtin-record.c OPT_STRING('t', "tid", &record.opts.target.tid, "tid", target 2127 tools/perf/builtin-record.c OPT_BOOLEAN('a', "all-cpus", &record.opts.target.system_wide, target 2129 tools/perf/builtin-record.c OPT_STRING('C', "cpu", &record.opts.target.cpu_list, "cpu", target 2187 tools/perf/builtin-record.c OPT_STRING('u', "uid", &record.opts.target.uid_str, "user", target 2201 tools/perf/builtin-record.c OPT_BOOLEAN(0, "per-thread", &record.opts.target.per_thread, target 2316 tools/perf/builtin-record.c if (!argc && target__none(&rec->opts.target)) target 2317 tools/perf/builtin-record.c rec->opts.target.system_wide = true; target 2319 tools/perf/builtin-record.c if (nr_cgroups && !rec->opts.target.system_wide) { target 2419 tools/perf/builtin-record.c if (rec->opts.target.tid && !rec->opts.no_inherit_set) target 2422 tools/perf/builtin-record.c err = target__validate(&rec->opts.target); target 2424 tools/perf/builtin-record.c target__strerror(&rec->opts.target, err, errbuf, BUFSIZ); target 2428 tools/perf/builtin-record.c err = target__parse_uid(&rec->opts.target); target 2432 tools/perf/builtin-record.c target__strerror(&rec->opts.target, err, errbuf, BUFSIZ); target 2440 tools/perf/builtin-record.c rec->opts.ignore_missing_thread = rec->opts.target.uid != UINT_MAX || rec->opts.target.pid; target 2443 tools/perf/builtin-record.c if (perf_evlist__create_maps(rec->evlist, &rec->opts.target) < 0) target 139 tools/perf/builtin-stat.c static struct target target = { target 273 tools/perf/builtin-stat.c if (target__has_cpu(&target) && !target__has_per_thread(&target)) target 368 tools/perf/builtin-stat.c if (!target__none(&target) || stat_config.initial_delay) target 379 tools/perf/builtin-stat.c if (!target__none(&target)) target 401 tools/perf/builtin-stat.c static bool is_target_alive(struct target *_target, target 449 tools/perf/builtin-stat.c if (perf_evlist__prepare_workload(evsel_list, &target, argv, is_pipe, target 462 tools/perf/builtin-stat.c if (create_perf_stat_counter(counter, &stat_config, &target) < 0) { target 491 tools/perf/builtin-stat.c } else if (target__has_per_thread(&target) && target 505 tools/perf/builtin-stat.c perf_evsel__open_strerror(counter, &target, target 586 tools/perf/builtin-stat.c if (!is_target_alive(&target, evsel_list->core.threads)) target 657 tools/perf/builtin-stat.c perf_evlist__print_counters(evsel_list, &stat_config, &target, target 736 tools/perf/builtin-stat.c OPT_STRING('p', "pid", &target.pid, "pid", target 738 tools/perf/builtin-stat.c OPT_STRING('t', "tid", &target.tid, "tid", target 740 tools/perf/builtin-stat.c OPT_BOOLEAN('a', "all-cpus", &target.system_wide, target 761 tools/perf/builtin-stat.c OPT_STRING('C', "cpu", &target.cpu_list, "cpu", target 1330 tools/perf/builtin-stat.c if (nr_cgroups || !target__has_cpu(&target)) { target 1362 tools/perf/builtin-stat.c if (target__has_cpu(&target)) target 1665 tools/perf/builtin-stat.c if (!target__none(&target)) target 1669 tools/perf/builtin-stat.c target.system_wide = true; target 1679 tools/perf/builtin-stat.c target.system_wide = true; target 1810 tools/perf/builtin-stat.c if ((stat_config.run_count == 1) && target__none(&target)) target 1831 tools/perf/builtin-stat.c !target__has_task(&target)) { target 1832 tools/perf/builtin-stat.c if (!target.system_wide || target.cpu_list) { target 1848 tools/perf/builtin-stat.c !target__has_cpu(&target)) { target 1861 tools/perf/builtin-stat.c target__validate(&target); target 1863 tools/perf/builtin-stat.c if ((stat_config.aggr_mode == AGGR_THREAD) && (target.system_wide)) target 1864 tools/perf/builtin-stat.c target.per_thread = true; target 1866 tools/perf/builtin-stat.c if (perf_evlist__create_maps(evsel_list, &target) < 0) { target 1867 tools/perf/builtin-stat.c if (target__has_task(&target)) { target 1871 tools/perf/builtin-stat.c } else if (target__has_cpu(&target)) { target 1885 tools/perf/builtin-stat.c if (target.system_wide) { target 352 tools/perf/builtin-top.c static void prompt_integer(int *target, const char *msg) target 373 tools/perf/builtin-top.c *target = tmp; target 378 tools/perf/builtin-top.c static void prompt_percent(int *target, const char *msg) target 384 tools/perf/builtin-top.c *target = tmp; target 637 tools/perf/builtin-top.c hists->uid_filter_str = top->record_opts.target.uid_str; target 1040 tools/perf/builtin-top.c perf_evsel__open_strerror(counter, &opts->target, target 1252 tools/perf/builtin-top.c machine__synthesize_threads(&top->session->machines.host, &opts->target, target 1285 tools/perf/builtin-top.c if (!target__none(&opts->target)) target 1403 tools/perf/builtin-top.c .target = { target 1422 tools/perf/builtin-top.c struct target *target = &opts->target; target 1428 tools/perf/builtin-top.c OPT_STRING('p', "pid", &target->pid, "pid", target 1430 tools/perf/builtin-top.c OPT_STRING('t', "tid", &target->tid, "tid", target 1432 tools/perf/builtin-top.c OPT_BOOLEAN('a', "all-cpus", &target->system_wide, target 1434 tools/perf/builtin-top.c OPT_STRING('C', "cpu", &target->cpu_list, "cpu", target 1513 tools/perf/builtin-top.c OPT_STRING('u', "uid", &target->uid_str, "user", "user to profile"), target 1614 tools/perf/builtin-top.c status = target__validate(target); target 1616 tools/perf/builtin-top.c target__strerror(target, status, errbuf, BUFSIZ); target 1620 tools/perf/builtin-top.c status = target__parse_uid(target); target 1624 tools/perf/builtin-top.c target__strerror(target, status, errbuf, BUFSIZ); target 1631 tools/perf/builtin-top.c if (target__none(target)) target 1632 tools/perf/builtin-top.c target->system_wide = true; target 1634 tools/perf/builtin-top.c if (perf_evlist__create_maps(top.evlist, target) < 0) { target 1686 tools/perf/builtin-top.c if (perf_evlist__start_sb_thread(sb_evlist, target)) { target 1418 tools/perf/builtin-trace.c err = __machine__synthesize_threads(trace->host, &trace->tool, &trace->opts.target, target 3334 tools/perf/builtin-trace.c err = perf_evlist__create_maps(evlist, &trace->opts.target); target 3352 tools/perf/builtin-trace.c err = perf_evlist__prepare_workload(evlist, &trace->opts.target, target 3419 tools/perf/builtin-trace.c if (!target__none(&trace->opts.target) && !trace->opts.initial_delay) target 3594 tools/perf/builtin-trace.c if (trace->opts.target.pid) target 3595 tools/perf/builtin-trace.c symbol_conf.pid_list_str = strdup(trace->opts.target.pid); target 3597 tools/perf/builtin-trace.c if (trace->opts.target.tid) target 3598 tools/perf/builtin-trace.c symbol_conf.tid_list_str = strdup(trace->opts.target.tid); target 4080 tools/perf/builtin-trace.c .target = { target 4113 tools/perf/builtin-trace.c OPT_STRING('p', "pid", &trace.opts.target.pid, "pid", target 4115 tools/perf/builtin-trace.c OPT_STRING('t', "tid", &trace.opts.target.tid, "tid", target 4119 tools/perf/builtin-trace.c OPT_BOOLEAN('a', "all-cpus", &trace.opts.target.system_wide, target 4121 tools/perf/builtin-trace.c OPT_STRING('C', "cpu", &trace.opts.target.cpu_list, "cpu", target 4128 tools/perf/builtin-trace.c OPT_STRING('u', "uid", &trace.opts.target.uid_str, "user", target 4213 tools/perf/builtin-trace.c if ((nr_cgroups || trace.cgroup) && !trace.opts.target.system_wide) { target 4403 tools/perf/builtin-trace.c err = target__validate(&trace.opts.target); target 4405 tools/perf/builtin-trace.c target__strerror(&trace.opts.target, err, bf, sizeof(bf)); target 4410 tools/perf/builtin-trace.c err = target__parse_uid(&trace.opts.target); target 4412 tools/perf/builtin-trace.c target__strerror(&trace.opts.target, err, bf, sizeof(bf)); target 4417 tools/perf/builtin-trace.c if (!argc && target__none(&trace.opts.target)) target 4418 tools/perf/builtin-trace.c trace.opts.target.system_wide = true; target 92 tools/perf/tests/backward-ring-buffer.c .target = { target 103 tools/perf/tests/backward-ring-buffer.c opts.target.tid = opts.target.pid = pid; target 111 tools/perf/tests/backward-ring-buffer.c err = perf_evlist__create_maps(evlist, &opts.target); target 114 tools/perf/tests/bpf.c .target = { target 144 tools/perf/tests/bpf.c opts.target.tid = opts.target.pid = pid; target 153 tools/perf/tests/bpf.c err = perf_evlist__create_maps(evlist, &opts.target); target 560 tools/perf/tests/code-reading.c .target = { target 20 tools/perf/tests/event-times.c struct target target = { target 29 tools/perf/tests/event-times.c err = perf_evlist__create_maps(evlist, &target); target 35 tools/perf/tests/event-times.c err = perf_evlist__prepare_workload(evlist, &target, argv, false, NULL); target 69 tools/perf/tests/keep-tracking.c .target = { target 27 tools/perf/tests/openat-syscall-tp-fields.c .target = { target 56 tools/perf/tests/openat-syscall-tp-fields.c err = perf_evlist__create_maps(evlist, &opts.target); target 46 tools/perf/tests/perf-record.c .target = { target 86 tools/perf/tests/perf-record.c err = perf_evlist__create_maps(evlist, &opts.target); target 98 tools/perf/tests/perf-record.c err = perf_evlist__prepare_workload(evlist, &opts.target, argv, false, NULL); target 59 tools/perf/tests/sdt.c static int search_cached_probe(const char *target, target 62 tools/perf/tests/sdt.c struct probe_cache *cache = probe_cache__new(target, NULL); target 66 tools/perf/tests/sdt.c pr_debug("Failed to open probe cache of %s\n", target); target 331 tools/perf/tests/switch-tracking.c .target = { target 47 tools/perf/tests/task-exit.c struct target target = { target 85 tools/perf/tests/task-exit.c err = perf_evlist__prepare_workload(evlist, &target, argv, false, target 151 tools/perf/ui/browsers/annotate.c struct annotation_line *target; target 186 tools/perf/ui/browsers/annotate.c target = notes->offsets[cursor->ops.target.offset]; target 187 tools/perf/ui/browsers/annotate.c if (target == NULL) { target 189 tools/perf/ui/browsers/annotate.c cursor->ops.target.offset); target 195 tools/perf/ui/browsers/annotate.c to = target->idx_asm; target 198 tools/perf/ui/browsers/annotate.c to = (u64)target->idx; target 418 tools/perf/ui/browsers/annotate.c if (!dl->ops.target.sym) { target 423 tools/perf/ui/browsers/annotate.c notes = symbol__annotation(dl->ops.target.sym); target 426 tools/perf/ui/browsers/annotate.c if (!symbol__hists(dl->ops.target.sym, evsel->evlist->core.nr_entries)) { target 429 tools/perf/ui/browsers/annotate.c dl->ops.target.sym->name); target 434 tools/perf/ui/browsers/annotate.c symbol__tui_annotate(dl->ops.target.sym, ms->map, evsel, hbt, browser->opts); target 469 tools/perf/ui/browsers/annotate.c if (dl->ops.target.outside) { target 474 tools/perf/ui/browsers/annotate.c offset = dl->ops.target.offset; target 50 tools/perf/ui/browsers/map.c char target[512]; target 54 tools/perf/ui/browsers/map.c target, "ENTER: OK, ESC: Cancel", 0); target 58 tools/perf/ui/browsers/map.c if (target[0] == '0' && tolower(target[1]) == 'x') { target 59 tools/perf/ui/browsers/map.c u64 addr = strtoull(target, NULL, 16); target 62 tools/perf/ui/browsers/map.c sym = map__find_symbol_by_name(browser->map, target); target 70 tools/perf/ui/browsers/map.c ui_helpline__fpush("%s not found!", target); target 213 tools/perf/util/annotate.c zfree(&ops->target.raw); target 214 tools/perf/util/annotate.c zfree(&ops->target.name); target 244 tools/perf/util/annotate.c struct addr_map_symbol target = { target 248 tools/perf/util/annotate.c ops->target.addr = strtoull(ops->raw, &endptr, 16); target 265 tools/perf/util/annotate.c ops->target.name = strdup(name); target 268 tools/perf/util/annotate.c if (ops->target.name == NULL) target 271 tools/perf/util/annotate.c target.addr = map__objdump_2mem(map, ops->target.addr); target 273 tools/perf/util/annotate.c if (map_groups__find_ams(&target) == 0 && target 274 tools/perf/util/annotate.c map__rip_2objdump(target.map, map->map_ip(target.map, target.addr)) == ops->target.addr) target 275 tools/perf/util/annotate.c ops->target.sym = target.sym; target 287 tools/perf/util/annotate.c ops->target.addr = strtoull(endptr, NULL, 16); target 295 tools/perf/util/annotate.c if (ops->target.sym) target 296 tools/perf/util/annotate.c return scnprintf(bf, size, "%-*s %s", max_ins_name, ins->name, ops->target.sym->name); target 298 tools/perf/util/annotate.c if (ops->target.addr == 0) target 301 tools/perf/util/annotate.c if (ops->target.name) target 302 tools/perf/util/annotate.c return scnprintf(bf, size, "%-*s %s", max_ins_name, ins->name, ops->target.name); target 304 tools/perf/util/annotate.c return scnprintf(bf, size, "%-*s *%" PRIx64, max_ins_name, ins->name, ops->target.addr); target 333 tools/perf/util/annotate.c struct addr_map_symbol target = { target 358 tools/perf/util/annotate.c ops->target.addr = strtoull(c, NULL, 16); target 359 tools/perf/util/annotate.c if (!ops->target.addr) { target 363 tools/perf/util/annotate.c ops->target.addr = strtoull(c, NULL, 16); target 366 tools/perf/util/annotate.c ops->target.addr = strtoull(ops->raw, NULL, 16); target 369 tools/perf/util/annotate.c target.addr = map__objdump_2mem(map, ops->target.addr); target 373 tools/perf/util/annotate.c ops->target.outside = target.addr < start || target.addr > end; target 393 tools/perf/util/annotate.c if (map_groups__find_ams(&target) == 0 && target 394 tools/perf/util/annotate.c map__rip_2objdump(target.map, map->map_ip(target.map, target.addr)) == ops->target.addr) target 395 tools/perf/util/annotate.c ops->target.sym = target.sym; target 397 tools/perf/util/annotate.c if (!ops->target.outside) { target 398 tools/perf/util/annotate.c ops->target.offset = target.addr - start; target 399 tools/perf/util/annotate.c ops->target.offset_avail = true; target 401 tools/perf/util/annotate.c ops->target.offset_avail = false; target 412 tools/perf/util/annotate.c if (!ops->target.addr || ops->target.offset < 0) target 415 tools/perf/util/annotate.c if (ops->target.outside && ops->target.sym != NULL) target 416 tools/perf/util/annotate.c return scnprintf(bf, size, "%-*s %s", max_ins_name, ins->name, ops->target.sym->name); target 437 tools/perf/util/annotate.c ops->target.offset); target 525 tools/perf/util/annotate.c zfree(&ops->target.raw); target 526 tools/perf/util/annotate.c zfree(&ops->target.name); target 537 tools/perf/util/annotate.c char *s = strchr(ops->raw, ','), *target, *comment, prev; target 549 tools/perf/util/annotate.c target = ++s; target 557 tools/perf/util/annotate.c while (s > target && isspace(s[0])) target 563 tools/perf/util/annotate.c ops->target.raw = strdup(target); target 566 tools/perf/util/annotate.c if (ops->target.raw == NULL) target 574 tools/perf/util/annotate.c comment__symbol(ops->target.raw, comment + 1, &ops->target.addr, &ops->target.name); target 588 tools/perf/util/annotate.c ops->target.name ?: ops->target.raw); target 598 tools/perf/util/annotate.c char *target, *comment, *s, prev; target 600 tools/perf/util/annotate.c target = s = ops->raw; target 607 tools/perf/util/annotate.c ops->target.raw = strdup(target); target 610 tools/perf/util/annotate.c if (ops->target.raw == NULL) target 618 tools/perf/util/annotate.c comment__symbol(ops->target.raw, comment + 1, &ops->target.addr, &ops->target.name); target 627 tools/perf/util/annotate.c ops->target.name ?: ops->target.raw); target 1549 tools/perf/util/annotate.c dl->ops.target.offset = dl->ops.target.addr - target 1551 tools/perf/util/annotate.c dl->ops.target.offset_avail = true; target 1555 tools/perf/util/annotate.c if (dl->ins.ops && ins__is_call(&dl->ins) && !dl->ops.target.sym) { target 1556 tools/perf/util/annotate.c struct addr_map_symbol target = { target 1558 tools/perf/util/annotate.c .addr = dl->ops.target.addr, target 1561 tools/perf/util/annotate.c if (!map_groups__find_ams(&target) && target 1562 tools/perf/util/annotate.c target.sym->start == target.al_addr) target 1563 tools/perf/util/annotate.c dl->ops.target.sym = target.sym; target 2525 tools/perf/util/annotate.c !disasm_line__has_local_offset(dl) || dl->ops.target.offset < 0 || target 2526 tools/perf/util/annotate.c dl->ops.target.offset >= (s64)symbol__size(sym)) target 2549 tools/perf/util/annotate.c al = notes->offsets[dl->ops.target.offset]; target 2621 tools/perf/util/annotate.c notes->widths.addr = notes->widths.target = target 2631 tools/perf/util/annotate.c notes->widths.target = notes->widths.min_addr; target 2633 tools/perf/util/annotate.c notes->widths.target = notes->widths.max_addr; target 2635 tools/perf/util/annotate.c notes->widths.addr = notes->widths.target; target 2770 tools/perf/util/annotate.c if (dl->ops.target.outside) target 2772 tools/perf/util/annotate.c fwd = dl->ops.target.offset > dl->al.offset; target 2945 tools/perf/util/annotate.c notes->widths.target, addr); target 42 tools/perf/util/annotate.h } target; target 189 tools/perf/util/annotate.h return dl->ops.target.offset_avail && !dl->ops.target.outside; target 289 tools/perf/util/annotate.h u8 target; target 19 tools/perf/util/bpf-event.h struct target; target 139 tools/perf/util/bpf-loader.c pev->target = strdup(value); target 140 tools/perf/util/bpf-loader.c if (!pev->target) target 149 tools/perf/util/bpf-loader.c pev->target = strdup(value); target 150 tools/perf/util/bpf-loader.c if (!pev->target) target 328 tools/perf/util/bpf-prologue.c struct bpf_insn *target; target 340 tools/perf/util/bpf-prologue.c target = error_code; target 343 tools/perf/util/bpf-prologue.c target = success_code; target 346 tools/perf/util/bpf-prologue.c target = user_code; target 353 tools/perf/util/bpf-prologue.c insn->off = target - (insn + 1); target 937 tools/perf/util/evlist.c int perf_evlist__create_maps(struct evlist *evlist, struct target *target) target 939 tools/perf/util/evlist.c bool all_threads = (target->per_thread && target->system_wide); target 961 tools/perf/util/evlist.c threads = thread_map__new_str(target->pid, target->tid, target->uid, target 967 tools/perf/util/evlist.c if (target__uses_dummy_map(target)) target 970 tools/perf/util/evlist.c cpus = perf_cpu_map__new(target->cpu_list); target 975 tools/perf/util/evlist.c evlist->core.has_user_cpus = !!target->cpu_list; target 1270 tools/perf/util/evlist.c int perf_evlist__prepare_workload(struct evlist *evlist, struct target *target, target 1351 tools/perf/util/evlist.c if (target__none(target)) { target 1755 tools/perf/util/evlist.c struct target *target) target 1762 tools/perf/util/evlist.c if (perf_evlist__create_maps(evlist, target)) target 115 tools/perf/util/evlist.h struct target *target); target 172 tools/perf/util/evlist.h struct target *target, target 206 tools/perf/util/evlist.h int perf_evlist__create_maps(struct evlist *evlist, struct target *target); target 942 tools/perf/util/evsel.c bool per_cpu = opts->target.default_per_cpu && !opts->target.per_thread; target 1041 tools/perf/util/evsel.c if (target__has_cpu(&opts->target) || opts->sample_cpu) target 1049 tools/perf/util/evsel.c (!opts->no_inherit || target__has_cpu(&opts->target) || per_cpu || target 1112 tools/perf/util/evsel.c if (target__none(&opts->target) && perf_evsel__is_group_leader(evsel) && target 2419 tools/perf/util/evsel.c int perf_evsel__open_strerror(struct evsel *evsel, struct target *target, target 2447 tools/perf/util/evsel.c target->system_wide ? "system-wide " : "", target 2467 tools/perf/util/evsel.c if (target->cpu_list) target 123 tools/perf/util/evsel.h struct target; target 360 tools/perf/util/evsel.h int perf_evsel__open_strerror(struct evsel *evsel, struct target *target, target 17 tools/perf/util/machine.h struct target; target 167 tools/perf/util/probe-event.c struct map *get_target_map(const char *target, struct nsinfo *nsi, bool user) target 173 tools/perf/util/probe-event.c map = dso__new_map(target); target 178 tools/perf/util/probe-event.c return kernel_get_module_map(target); target 355 tools/perf/util/probe-event.c const char *target, struct nsinfo *nsi, target 367 tools/perf/util/probe-event.c map = get_target_map(target, nsi, uprobes); target 411 tools/perf/util/probe-event.c ret = find_alternative_probe_point(dinfo, tmp, &pev->point, pev->target, target 421 tools/perf/util/probe-event.c const char *target, bool user) target 434 tools/perf/util/probe-event.c target, NULL, user); target 828 tools/perf/util/probe-event.c dinfo = open_debuginfo(pev->target, pev->nsi, !need_dwarf); target 856 tools/perf/util/probe-event.c pev->target, pev->uprobes, dinfo); target 1118 tools/perf/util/probe-event.c dinfo = open_debuginfo(pevs->target, pevs->nsi, false); target 1372 tools/perf/util/probe-event.c pev->target = build_id_cache__origname(tmp); target 1375 tools/perf/util/probe-event.c pev->target = strdup_esc(ptr + 1); target 1376 tools/perf/util/probe-event.c if (!pev->target) target 2219 tools/perf/util/probe-event.c zfree(&pev->target); target 2290 tools/perf/util/probe-event.c dst->target = strdup_or_goto(src->target, out_err); target 2808 tools/perf/util/probe-event.c cache = probe_cache__new(pev->target, pev->nsi); target 2893 tools/perf/util/probe-event.c map = get_target_map(pev->target, pev->nsi, pev->uprobes); target 2912 tools/perf/util/probe-event.c pev->target ? : "kernel"); target 2917 tools/perf/util/probe-event.c pev->target ? : "kernel"); target 2923 tools/perf/util/probe-event.c if (!pev->uprobes && !pev->target && target 2963 tools/perf/util/probe-event.c if (!pev->uprobes && !pev->target && target 2977 tools/perf/util/probe-event.c if (pev->target) { target 2979 tools/perf/util/probe-event.c tev->point.module = strdup_or_goto(pev->target, target 2982 tools/perf/util/probe-event.c mod_name = find_module_name(pev->target); target 2984 tools/perf/util/probe-event.c strdup(mod_name ? mod_name : pev->target); target 3086 tools/perf/util/probe-event.c if (pev->target) { target 3087 tools/perf/util/probe-event.c tp->module = strdup(pev->target); target 3174 tools/perf/util/probe-event.c const char *target) target 3182 tools/perf/util/probe-event.c cache = probe_cache__new(target, pev->nsi); target 3207 tools/perf/util/probe-event.c if (ntevs > 0 && target && target[0] == '/') target 3267 tools/perf/util/probe-event.c if (!pev->target) target 3270 tools/perf/util/probe-event.c return find_cached_events(pev, tevs, pev->target); target 3272 tools/perf/util/probe-event.c cache = probe_cache__new(pev->target, pev->nsi); target 3286 tools/perf/util/probe-event.c pev->target ? : "kernel"); target 3324 tools/perf/util/probe-event.c ret = convert_exec_to_group(pev->target, &pev->group); target 3500 tools/perf/util/probe-event.c int show_available_funcs(const char *target, struct nsinfo *nsi, target 3512 tools/perf/util/probe-event.c map = get_target_map(target, nsi, user); target 3514 tools/perf/util/probe-event.c pr_err("Failed to get a map for %s\n", (target) ? : "kernel"); target 3527 tools/perf/util/probe-event.c (target) ? : "kernel"); target 97 tools/perf/util/probe-event.h char *target; /* Target binary */ target 192 tools/perf/util/probe-event.h struct map *get_target_map(const char *target, struct nsinfo *nsi, bool user); target 410 tools/perf/util/probe-file.c static int probe_cache__open(struct probe_cache *pcache, const char *target, target 420 tools/perf/util/probe-file.c if (target && build_id_cache__cached(target)) { target 422 tools/perf/util/probe-file.c strlcpy(sbuildid, target, SBUILD_ID_SIZE); target 427 tools/perf/util/probe-file.c if (!target || !strcmp(target, DSO__NAME_KALLSYMS)) { target 428 tools/perf/util/probe-file.c target = DSO__NAME_KALLSYMS; target 433 tools/perf/util/probe-file.c ret = filename__sprintf_build_id(target, sbuildid); target 438 tools/perf/util/probe-file.c pr_debug("Failed to get build-id from %s.\n", target); target 444 tools/perf/util/probe-file.c ret = build_id_cache__add_s(sbuildid, target, nsi, target 447 tools/perf/util/probe-file.c pr_debug("Failed to add build-id cache: %s\n", target); target 452 tools/perf/util/probe-file.c dir_name = build_id_cache__cachedir(sbuildid, target, nsi, is_kallsyms, target 456 tools/perf/util/probe-file.c pr_debug("Failed to get cache from %s\n", target); target 557 tools/perf/util/probe-file.c struct probe_cache *probe_cache__new(const char *target, struct nsinfo *nsi) target 565 tools/perf/util/probe-file.c ret = probe_cache__open(pcache, target, nsi); target 55 tools/perf/util/probe-file.h struct probe_cache *probe_cache__new(const char *target, struct nsinfo *nsi); target 15 tools/perf/util/record.h struct target target; target 698 tools/perf/util/stat-display.c struct target *_target) target 744 tools/perf/util/stat-display.c struct target *_target, target 1010 tools/perf/util/stat-display.c struct target *_target, target 1162 tools/perf/util/stat-display.c struct target *_target, target 466 tools/perf/util/stat.c struct target *target) target 505 tools/perf/util/stat.c if (target__none(target) && !config->initial_delay) target 509 tools/perf/util/stat.c if (target__has_cpu(target) && !target__has_per_thread(target)) target 203 tools/perf/util/stat.h struct target; target 214 tools/perf/util/stat.h struct target *target); target 218 tools/perf/util/stat.h struct target *_target, target 73 tools/perf/util/svghelper.c double target = 10.0; target 78 tools/perf/util/svghelper.c if (size >= target) target 79 tools/perf/util/svghelper.c return target; target 80 tools/perf/util/svghelper.c target = target / 2.0; target 2231 tools/perf/util/symbol-elf.c int get_sdt_note_list(struct list_head *head, const char *target) target 2236 tools/perf/util/symbol-elf.c fd = open(target, O_RDONLY); target 263 tools/perf/util/symbol.h int get_sdt_note_list(struct list_head *head, const char *target); target 1471 tools/perf/util/synthetic-events.c struct target *target, struct perf_thread_map *threads, target 1475 tools/perf/util/synthetic-events.c if (target__has_task(target)) target 1477 tools/perf/util/synthetic-events.c else if (target__has_cpu(target)) target 1485 tools/perf/util/synthetic-events.c int machine__synthesize_threads(struct machine *machine, struct target *target, target 1489 tools/perf/util/synthetic-events.c return __machine__synthesize_threads(machine, NULL, target, threads, target 25 tools/perf/util/synthetic-events.h struct target; target 65 tools/perf/util/synthetic-events.h struct target *target, struct perf_thread_map *threads, target 68 tools/perf/util/synthetic-events.h int machine__synthesize_threads(struct machine *machine, struct target *target, target 17 tools/perf/util/target.c enum target_errno target__validate(struct target *target) target 21 tools/perf/util/target.c if (target->pid) target 22 tools/perf/util/target.c target->tid = target->pid; target 25 tools/perf/util/target.c if (target->tid && target->cpu_list) { target 26 tools/perf/util/target.c target->cpu_list = NULL; target 32 tools/perf/util/target.c if (target->tid && target->uid_str) { target 33 tools/perf/util/target.c target->uid_str = NULL; target 39 tools/perf/util/target.c if (target->uid_str && target->cpu_list) { target 40 tools/perf/util/target.c target->cpu_list = NULL; target 46 tools/perf/util/target.c if (target->tid && target->system_wide) { target 47 tools/perf/util/target.c target->system_wide = false; target 53 tools/perf/util/target.c if (target->uid_str && target->system_wide) { target 54 tools/perf/util/target.c target->system_wide = false; target 60 tools/perf/util/target.c if (target->per_thread && (target->system_wide || target->cpu_list)) { target 61 tools/perf/util/target.c target->per_thread = false; target 69 tools/perf/util/target.c enum target_errno target__parse_uid(struct target *target) target 73 tools/perf/util/target.c const char *str = target->uid_str; target 75 tools/perf/util/target.c target->uid = UINT_MAX; target 98 tools/perf/util/target.c target->uid = result->pw_uid; target 116 tools/perf/util/target.c int target__strerror(struct target *target, int errnum, target 143 tools/perf/util/target.c snprintf(buf, buflen, msg, target->uid_str); target 47 tools/perf/util/target.h enum target_errno target__validate(struct target *target); target 48 tools/perf/util/target.h enum target_errno target__parse_uid(struct target *target); target 50 tools/perf/util/target.h int target__strerror(struct target *target, int errnum, char *buf, size_t buflen); target 52 tools/perf/util/target.h static inline bool target__has_task(struct target *target) target 54 tools/perf/util/target.h return target->tid || target->pid || target->uid_str; target 57 tools/perf/util/target.h static inline bool target__has_cpu(struct target *target) target 59 tools/perf/util/target.h return target->system_wide || target->cpu_list; target 62 tools/perf/util/target.h static inline bool target__none(struct target *target) target 64 tools/perf/util/target.h return !target__has_task(target) && !target__has_cpu(target); target 67 tools/perf/util/target.h static inline bool target__has_per_thread(struct target *target) target 69 tools/perf/util/target.h return target->system_wide && target->per_thread; target 72 tools/perf/util/target.h static inline bool target__uses_dummy_map(struct target *target) target 76 tools/perf/util/target.h if (target->default_per_cpu) target 77 tools/perf/util/target.h use_dummy = target->per_thread ? true : false; target 78 tools/perf/util/target.h else if (target__has_task(target) || target 79 tools/perf/util/target.h (!target__has_cpu(target) && !target->uses_mmap)) target 81 tools/perf/util/target.h else if (target__has_per_thread(target)) target 29 tools/perf/util/top.c struct target *target = &opts->target; target 84 tools/perf/util/top.c if (target->pid) target 86 tools/perf/util/top.c target->pid); target 87 tools/perf/util/top.c else if (target->tid) target 89 tools/perf/util/top.c target->tid); target 90 tools/perf/util/top.c else if (target->uid_str != NULL) target 92 tools/perf/util/top.c target->uid_str); target 96 tools/perf/util/top.c if (target->cpu_list) target 99 tools/perf/util/top.c target->cpu_list); target 101 tools/perf/util/top.c if (target->tid) target 240 tools/testing/selftests/kvm/lib/aarch64/processor.c struct kvm_vcpu_init default_init = { .target = -1, }; target 246 tools/testing/selftests/kvm/lib/aarch64/processor.c if (init->target == -1) { target 249 tools/testing/selftests/kvm/lib/aarch64/processor.c init->target = preferred.target; target 162 tools/testing/selftests/timers/leap-a-day.c struct timespec now, target; target 165 tools/testing/selftests/timers/leap-a-day.c target = timespec_add(now, NSEC_PER_SEC/2); target 166 tools/testing/selftests/timers/leap-a-day.c clock_nanosleep(CLOCK_REALTIME, TIMER_ABSTIME, &target, NULL); target 169 tools/testing/selftests/timers/leap-a-day.c if (!in_order(target, now)) { target 63 tools/testing/selftests/timers/mqueue-lat.c struct timespec start, end, now, target; target 81 tools/testing/selftests/timers/mqueue-lat.c target = now; target 82 tools/testing/selftests/timers/mqueue-lat.c target = timespec_add(now, TARGET_TIMEOUT); /* 100ms */ target 84 tools/testing/selftests/timers/mqueue-lat.c ret = mq_timedreceive(q, buf, sizeof(buf), NULL, &target); target 103 tools/testing/selftests/timers/nanosleep.c struct timespec now, target, rel; target 108 tools/testing/selftests/timers/nanosleep.c target = timespec_add(now, ns); target 110 tools/testing/selftests/timers/nanosleep.c if (clock_nanosleep(clockid, TIMER_ABSTIME, &target, NULL)) target 114 tools/testing/selftests/timers/nanosleep.c if (!in_order(target, now)) target 122 tools/testing/selftests/timers/nanosleep.c target = timespec_add(now, ns); target 126 tools/testing/selftests/timers/nanosleep.c if (!in_order(target, now)) target 100 tools/testing/selftests/timers/nsleep-lat.c struct timespec start, end, target; target 104 tools/testing/selftests/timers/nsleep-lat.c target.tv_sec = ns/NSEC_PER_SEC; target 105 tools/testing/selftests/timers/nsleep-lat.c target.tv_nsec = ns%NSEC_PER_SEC; target 109 tools/testing/selftests/timers/nsleep-lat.c if (clock_nanosleep(clockid, 0, &target, NULL)) target 117 tools/testing/selftests/timers/nsleep-lat.c clock_nanosleep(clockid, 0, &target, NULL); target 128 tools/testing/selftests/timers/nsleep-lat.c target = timespec_add(start, ns); target 129 tools/testing/selftests/timers/nsleep-lat.c clock_nanosleep(clockid, TIMER_ABSTIME, &target, NULL); target 131 tools/testing/selftests/timers/nsleep-lat.c latency += timespec_sub(target, end); target 103 usr/gen_init_cpio.c static int cpio_mkslink(const char *name, const char *target, target 119 usr/gen_init_cpio.c (unsigned)strlen(target)+1, /* filesize */ target 129 usr/gen_init_cpio.c push_string(target); target 137 usr/gen_init_cpio.c char target[PATH_MAX + 1]; target 143 usr/gen_init_cpio.c if (5 != sscanf(line, "%" str(PATH_MAX) "s %" str(PATH_MAX) "s %o %d %d", name, target, &mode, &uid, &gid)) { target 147 usr/gen_init_cpio.c rc = cpio_mkslink(name, target, mode, uid, gid); target 344 virt/kvm/arm/arm.c vcpu->arch.target = -1; target 651 virt/kvm/arm/arm.c return vcpu->arch.target >= 0; target 967 virt/kvm/arm/arm.c if (init->target != phys_target) target 974 virt/kvm/arm/arm.c if (vcpu->arch.target != -1 && vcpu->arch.target != init->target) target 988 virt/kvm/arm/arm.c if (vcpu->arch.target != -1 && i < KVM_VCPU_MAX_FEATURES && target 996 virt/kvm/arm/arm.c vcpu->arch.target = phys_target; target 1001 virt/kvm/arm/arm.c vcpu->arch.target = -1; target 184 virt/kvm/arm/vgic/vgic-mmio-v2.c int target; target 189 virt/kvm/arm/vgic/vgic-mmio-v2.c target = irq->targets ? __ffs(irq->targets) : 0; target 190 virt/kvm/arm/vgic/vgic-mmio-v2.c irq->target_vcpu = kvm_get_vcpu(vcpu->kvm, target); target 2574 virt/kvm/kvm_main.c int kvm_vcpu_yield_to(struct kvm_vcpu *target) target 2581 virt/kvm/kvm_main.c pid = rcu_dereference(target->pid);