cpl 109 arch/ia64/include/asm/processor.h __u64 cpl : 2; cpl 91 arch/ia64/include/asm/ptrace.h # define user_mode(regs) (((struct ia64_psr *) &(regs)->cr_ipsr)->cpl != 0) cpl 59 arch/ia64/kernel/brl_emu.c unsigned long opcode, btype, qp, offset, cpl; cpl 161 arch/ia64/kernel/brl_emu.c cpl = ia64_psr(regs)->cpl; cpl 163 arch/ia64/kernel/brl_emu.c | (ar_ec << 52) | (cpl << 62)); cpl 534 arch/ia64/kernel/mca_drv.c if (psr1->cpl != 0 || cpl 535 arch/ia64/kernel/mca_drv.c ((psr2->cpl != 0) && mca_recover_range(pmsa->pmsa_iip))) { cpl 551 arch/ia64/kernel/mca_drv.c psr2->cpl = 0; cpl 522 arch/ia64/kernel/traps.c ia64_psr(®s)->cpl = 3; cpl 56 arch/mips/ath25/ar2315.c static irqreturn_t ar2315_ahb_err_handler(int cpl, void *dev_id) cpl 57 arch/mips/ath25/ar5312.c static irqreturn_t ar5312_ahb_err_handler(int cpl, void *dev_id) cpl 115 arch/mips/cavium-octeon/oct_ilm.c static irqreturn_t cvm_oct_ciu_timer_interrupt(int cpl, void *dev_id) cpl 88 arch/mips/loongson64/lemote-2f/irq.c static irqreturn_t ip6_action(int cpl, void *dev_id) cpl 350 arch/mips/pci/msi-octeon.c static irqreturn_t octeon_msi_interrupt##x(int cpl, void *dev_id) \ cpl 200 arch/powerpc/platforms/powermac/pic.c static irqreturn_t gatwick_action(int cpl, void *dev_id) cpl 207 arch/x86/include/asm/kvm_emulate.h int (*cpl)(struct x86_emulate_ctxt *ctxt); cpl 173 arch/x86/include/asm/svm.h u8 cpl; cpl 310 arch/x86/include/asm/xen/interface.h uint8_t cpl; cpl 1638 arch/x86/kvm/emulate.c u16 selector, int seg, u8 cpl, cpl 1683 arch/x86/kvm/emulate.c if (ctxt->mode != X86EMUL_MODE_PROT64 || rpl != cpl) cpl 1693 arch/x86/kvm/emulate.c seg_desc.dpl = cpl; cpl 1730 arch/x86/kvm/emulate.c if (rpl != cpl || (seg_desc.type & 0xa) != 0x2 || dpl != cpl) cpl 1739 arch/x86/kvm/emulate.c if (dpl > cpl) cpl 1743 arch/x86/kvm/emulate.c if (rpl > cpl || dpl != cpl) cpl 1756 arch/x86/kvm/emulate.c selector = (selector & 0xfffc) | cpl; cpl 1780 arch/x86/kvm/emulate.c (rpl > dpl && cpl > dpl))) cpl 1814 arch/x86/kvm/emulate.c u8 cpl = ctxt->ops->cpl(ctxt); cpl 1830 arch/x86/kvm/emulate.c return __load_segment_descriptor(ctxt, selector, seg, cpl, cpl 1924 arch/x86/kvm/emulate.c int cpl = ctxt->ops->cpl(ctxt); cpl 1939 arch/x86/kvm/emulate.c if (cpl == 0) cpl 1941 arch/x86/kvm/emulate.c if (cpl <= iopl) cpl 2227 arch/x86/kvm/emulate.c u8 cpl = ctxt->ops->cpl(ctxt); cpl 2231 arch/x86/kvm/emulate.c rc = __load_segment_descriptor(ctxt, sel, VCPU_SREG_CS, cpl, cpl 2301 arch/x86/kvm/emulate.c int cpl = ctxt->ops->cpl(ctxt); cpl 2311 arch/x86/kvm/emulate.c if (ctxt->mode >= X86EMUL_MODE_PROT16 && (cs & 3) > cpl) cpl 2313 arch/x86/kvm/emulate.c rc = __load_segment_descriptor(ctxt, (u16)cs, VCPU_SREG_CS, cpl, cpl 2964 arch/x86/kvm/emulate.c return ctxt->ops->cpl(ctxt) > iopl; cpl 3075 arch/x86/kvm/emulate.c u8 cpl; cpl 3098 arch/x86/kvm/emulate.c cpl = tss->cs & 3; cpl 3104 arch/x86/kvm/emulate.c ret = __load_segment_descriptor(ctxt, tss->ldt, VCPU_SREG_LDTR, cpl, cpl 3108 arch/x86/kvm/emulate.c ret = __load_segment_descriptor(ctxt, tss->es, VCPU_SREG_ES, cpl, cpl 3112 arch/x86/kvm/emulate.c ret = __load_segment_descriptor(ctxt, tss->cs, VCPU_SREG_CS, cpl, cpl 3116 arch/x86/kvm/emulate.c ret = __load_segment_descriptor(ctxt, tss->ss, VCPU_SREG_SS, cpl, cpl 3120 arch/x86/kvm/emulate.c ret = __load_segment_descriptor(ctxt, tss->ds, VCPU_SREG_DS, cpl, cpl 3190 arch/x86/kvm/emulate.c u8 cpl; cpl 3227 arch/x86/kvm/emulate.c cpl = 3; cpl 3230 arch/x86/kvm/emulate.c cpl = tss->cs & 3; cpl 3238 arch/x86/kvm/emulate.c cpl, X86_TRANSFER_TASK_SWITCH, NULL); cpl 3241 arch/x86/kvm/emulate.c ret = __load_segment_descriptor(ctxt, tss->es, VCPU_SREG_ES, cpl, cpl 3245 arch/x86/kvm/emulate.c ret = __load_segment_descriptor(ctxt, tss->cs, VCPU_SREG_CS, cpl, cpl 3249 arch/x86/kvm/emulate.c ret = __load_segment_descriptor(ctxt, tss->ss, VCPU_SREG_SS, cpl, cpl 3253 arch/x86/kvm/emulate.c ret = __load_segment_descriptor(ctxt, tss->ds, VCPU_SREG_DS, cpl, cpl 3257 arch/x86/kvm/emulate.c ret = __load_segment_descriptor(ctxt, tss->fs, VCPU_SREG_FS, cpl, cpl 3261 arch/x86/kvm/emulate.c ret = __load_segment_descriptor(ctxt, tss->gs, VCPU_SREG_GS, cpl, cpl 3350 arch/x86/kvm/emulate.c if ((tss_selector & 3) > dpl || ops->cpl(ctxt) > dpl) cpl 3535 arch/x86/kvm/emulate.c int cpl = ctxt->ops->cpl(ctxt); cpl 3542 arch/x86/kvm/emulate.c rc = __load_segment_descriptor(ctxt, sel, VCPU_SREG_CS, cpl, cpl 3748 arch/x86/kvm/emulate.c ctxt->ops->cpl(ctxt) > 0) cpl 3852 arch/x86/kvm/emulate.c ctxt->ops->cpl(ctxt) > 0) cpl 3915 arch/x86/kvm/emulate.c ctxt->ops->cpl(ctxt) > 0) cpl 3998 arch/x86/kvm/emulate.c ctxt->ops->cpl(ctxt)) { cpl 4377 arch/x86/kvm/emulate.c if (cr4 & X86_CR4_TSD && ctxt->ops->cpl(ctxt)) cpl 4395 arch/x86/kvm/emulate.c if ((!(cr4 & X86_CR4_PCE) && ctxt->ops->cpl(ctxt)) || cpl 5621 arch/x86/kvm/emulate.c if ((ctxt->d & Priv) && ops->cpl(ctxt)) { cpl 160 arch/x86/kvm/mmu.h int cpl = kvm_x86_ops->get_cpl(vcpu); cpl 176 arch/x86/kvm/mmu.h unsigned long smap = (cpl - 3) & (rflags & X86_EFLAGS_AC); cpl 2522 arch/x86/kvm/svm.c var->dpl = to_svm(vcpu)->vmcb->save.cpl; cpl 2531 arch/x86/kvm/svm.c return save->cpl; cpl 2679 arch/x86/kvm/svm.c svm->vmcb->save.cpl = (var->dpl & 3); cpl 3057 arch/x86/kvm/svm.c if (svm->vmcb->save.cpl) { cpl 3398 arch/x86/kvm/svm.c nested_vmcb->save.cpl = vmcb->save.cpl; cpl 3471 arch/x86/kvm/svm.c svm->vmcb->save.cpl = 0; cpl 3585 arch/x86/kvm/svm.c svm->vmcb->save.cpl = nested_vmcb->save.cpl; cpl 4938 arch/x86/kvm/svm.c save->cpl, save->efer); cpl 6304 arch/x86/kvm/x86.c .cpl = emulator_get_cpl, cpl 443 arch/x86/xen/pmu.c return !!(xenpmu_data->pmu.r.regs.cpl & 3); cpl 478 arch/x86/xen/pmu.c if (xen_regs->cpl) cpl 433 drivers/crypto/chelsio/chcr_ipsec.c struct cpl_tx_pkt_core *cpl; cpl 450 drivers/crypto/chelsio/chcr_ipsec.c cpl = (struct cpl_tx_pkt_core *)pos; cpl 460 drivers/crypto/chelsio/chcr_ipsec.c cpl->ctrl0 = htonl(ctrl0); cpl 461 drivers/crypto/chelsio/chcr_ipsec.c cpl->pack = htons(0); cpl 462 drivers/crypto/chelsio/chcr_ipsec.c cpl->len = htons(skb->len); cpl 463 drivers/crypto/chelsio/chcr_ipsec.c cpl->ctrl1 = cpu_to_be64(cntrl); cpl 135 drivers/gpu/drm/pl111/pl111_display.c u32 cpl, tim2; cpl 157 drivers/gpu/drm/pl111/pl111_display.c cpl = mode->hdisplay - 1; cpl 230 drivers/gpu/drm/pl111/pl111_display.c tim2 |= cpl << 16; cpl 518 drivers/infiniband/hw/cxgb4/cm.c int cpl) cpl 523 drivers/infiniband/hw/cxgb4/cm.c rpl->ot.opcode = cpl; cpl 3799 drivers/infiniband/hw/cxgb4/cm.c struct cpl_pass_accept_req *cpl; cpl 3810 drivers/infiniband/hw/cxgb4/cm.c cpl = (struct cpl_pass_accept_req *)cplhdr(rpl_skb); cpl 3811 drivers/infiniband/hw/cxgb4/cm.c OPCODE_TID(cpl) = htonl(MK_OPCODE_TID(CPL_PASS_ACCEPT_REQ, cpl 3930 drivers/infiniband/hw/cxgb4/cm.c struct cpl_rx_pkt *cpl = cplhdr(skb); cpl 3938 drivers/infiniband/hw/cxgb4/cm.c vlantag = cpl->vlan; cpl 3939 drivers/infiniband/hw/cxgb4/cm.c len = cpl->len; cpl 3940 drivers/infiniband/hw/cxgb4/cm.c l2info = cpl->l2info; cpl 3941 drivers/infiniband/hw/cxgb4/cm.c hdr_len = cpl->hdr_len; cpl 3942 drivers/infiniband/hw/cxgb4/cm.c intf = cpl->iff; cpl 4000 drivers/infiniband/hw/cxgb4/cm.c struct cpl_pass_accept_req *cpl = cplhdr(skb); cpl 4019 drivers/infiniband/hw/cxgb4/cm.c FW_OFLD_CONNECTION_WR_RCV_SCALE_V(cpl->tcpopt.wsf) | cpl 4021 drivers/infiniband/hw/cxgb4/cm.c PASS_OPEN_TID_G(ntohl(cpl->tos_stid)))); cpl 4064 drivers/infiniband/hw/cxgb4/cm.c struct cpl_rx_pkt *cpl = (void *)skb->data; cpl 4077 drivers/infiniband/hw/cxgb4/cm.c if (!(cpl->l2info & cpu_to_be32(RXF_SYN_F))) cpl 4101 drivers/infiniband/hw/cxgb4/cm.c eth_hdr_len = RX_ETHHDR_LEN_G(be32_to_cpu(cpl->l2info)); cpl 4104 drivers/infiniband/hw/cxgb4/cm.c eth_hdr_len = RX_T5_ETHHDR_LEN_G(be32_to_cpu(cpl->l2info)); cpl 4107 drivers/infiniband/hw/cxgb4/cm.c eth_hdr_len = RX_T6_ETHHDR_LEN_G(be32_to_cpu(cpl->l2info)); cpl 4121 drivers/infiniband/hw/cxgb4/cm.c __vlan_hwaccel_put_tag(skb, htons(ETH_P_8021Q), ntohs(cpl->vlan)); cpl 57 drivers/input/serio/ct82c710.c static irqreturn_t ct82c710_interrupt(int cpl, void *dev_id) cpl 251 drivers/misc/apds990x.c u32 cpl; cpl 277 drivers/misc/apds990x.c cpl = ((u32)chip->atime * (u32)again[chip->again_next] * cpl 280 drivers/misc/apds990x.c thres = lux * cpl / 64; cpl 665 drivers/net/ethernet/cavium/octeon/octeon_mgmt.c static irqreturn_t octeon_mgmt_interrupt(int cpl, void *dev_id) cpl 1775 drivers/net/ethernet/chelsio/cxgb/sge.c struct cpl_tx_pkt *cpl; cpl 1811 drivers/net/ethernet/chelsio/cxgb/sge.c cpl = (struct cpl_tx_pkt *)hdr; cpl 1851 drivers/net/ethernet/chelsio/cxgb/sge.c cpl = __skb_push(skb, sizeof(*cpl)); cpl 1852 drivers/net/ethernet/chelsio/cxgb/sge.c cpl->opcode = CPL_TX_PKT; cpl 1853 drivers/net/ethernet/chelsio/cxgb/sge.c cpl->ip_csum_dis = 1; /* SW calculates IP csum */ cpl 1854 drivers/net/ethernet/chelsio/cxgb/sge.c cpl->l4_csum_dis = skb->ip_summed == CHECKSUM_PARTIAL ? 0 : 1; cpl 1859 drivers/net/ethernet/chelsio/cxgb/sge.c cpl->iff = dev->if_port; cpl 1862 drivers/net/ethernet/chelsio/cxgb/sge.c cpl->vlan_valid = 1; cpl 1863 drivers/net/ethernet/chelsio/cxgb/sge.c cpl->vlan = htons(skb_vlan_tag_get(skb)); cpl 1866 drivers/net/ethernet/chelsio/cxgb/sge.c cpl->vlan_valid = 0; cpl 1188 drivers/net/ethernet/chelsio/cxgb3/sge.c struct cpl_tx_pkt *cpl = (struct cpl_tx_pkt *)d; cpl 1190 drivers/net/ethernet/chelsio/cxgb3/sge.c cpl->len = htonl(skb->len); cpl 1199 drivers/net/ethernet/chelsio/cxgb3/sge.c struct cpl_tx_pkt_lso *hdr = (struct cpl_tx_pkt_lso *)cpl; cpl 1215 drivers/net/ethernet/chelsio/cxgb3/sge.c cpl->cntrl = htonl(cntrl); cpl 1217 drivers/net/ethernet/chelsio/cxgb3/sge.c if (skb->len <= WR_LEN - sizeof(*cpl)) { cpl 1226 drivers/net/ethernet/chelsio/cxgb3/sge.c cpl->wr.wr_hi = htonl(V_WR_BCNTLFLT(skb->len & 7) | cpl 1230 drivers/net/ethernet/chelsio/cxgb3/sge.c cpl->wr.wr_lo = htonl(V_WR_LEN(flits) | V_WR_GEN(gen) | cpl 2134 drivers/net/ethernet/chelsio/cxgb3/sge.c struct cpl_rx_pkt *cpl; cpl 2170 drivers/net/ethernet/chelsio/cxgb3/sge.c cpl = qs->lro_va = sd->pg_chunk.va + 2; cpl 2173 drivers/net/ethernet/chelsio/cxgb3/sge.c cpl->csum_valid && cpl->csum == htons(0xffff)) { cpl 2179 drivers/net/ethernet/chelsio/cxgb3/sge.c cpl = qs->lro_va; cpl 2198 drivers/net/ethernet/chelsio/cxgb3/sge.c if (cpl->vlan_valid) { cpl 2200 drivers/net/ethernet/chelsio/cxgb3/sge.c __vlan_hwaccel_put_tag(skb, htons(ETH_P_8021Q), ntohs(cpl->vlan)); cpl 65 drivers/net/ethernet/chelsio/cxgb4/cxgb4_uld.h #define INIT_TP_WR_CPL(w, cpl, tid) do { \ cpl 67 drivers/net/ethernet/chelsio/cxgb4/cxgb4_uld.h OPCODE_TID(w) = htonl(MK_OPCODE_TID(cpl, tid)); \ cpl 1373 drivers/net/ethernet/chelsio/cxgb4/sge.c struct cpl_tx_pkt_core *cpl; cpl 1487 drivers/net/ethernet/chelsio/cxgb4/sge.c len += sizeof(*cpl); cpl 1506 drivers/net/ethernet/chelsio/cxgb4/sge.c cpl = (void *)(tnl_lso + 1); cpl 1532 drivers/net/ethernet/chelsio/cxgb4/sge.c cpl = (void *)(lso + 1); cpl 1544 drivers/net/ethernet/chelsio/cxgb4/sge.c sgl = (u64 *)(cpl + 1); /* sgl start here */ cpl 1566 drivers/net/ethernet/chelsio/cxgb4/sge.c cpl = (void *)(wr + 1); cpl 1567 drivers/net/ethernet/chelsio/cxgb4/sge.c sgl = (u64 *)(cpl + 1); cpl 1595 drivers/net/ethernet/chelsio/cxgb4/sge.c cpl->ctrl0 = htonl(ctrl0); cpl 1596 drivers/net/ethernet/chelsio/cxgb4/sge.c cpl->pack = htons(0); cpl 1597 drivers/net/ethernet/chelsio/cxgb4/sge.c cpl->len = htons(skb->len); cpl 1598 drivers/net/ethernet/chelsio/cxgb4/sge.c cpl->ctrl1 = cpu_to_be64(cntrl); cpl 1711 drivers/net/ethernet/chelsio/cxgb4/sge.c struct cpl_tx_pkt_core *cpl; cpl 1822 drivers/net/ethernet/chelsio/cxgb4/sge.c sizeof(*cpl))); cpl 1843 drivers/net/ethernet/chelsio/cxgb4/sge.c cpl = (void *)(lso + 1); cpl 1859 drivers/net/ethernet/chelsio/cxgb4/sge.c ? skb->len + sizeof(*cpl) cpl 1860 drivers/net/ethernet/chelsio/cxgb4/sge.c : sizeof(*cpl)); cpl 1868 drivers/net/ethernet/chelsio/cxgb4/sge.c cpl = (void *)(wr + 1); cpl 1887 drivers/net/ethernet/chelsio/cxgb4/sge.c cpl->ctrl0 = cpu_to_be32(TXPKT_OPCODE_V(CPL_TX_PKT_XT) | cpl 1890 drivers/net/ethernet/chelsio/cxgb4/sge.c cpl->pack = cpu_to_be16(0); cpl 1891 drivers/net/ethernet/chelsio/cxgb4/sge.c cpl->len = cpu_to_be16(skb->len); cpl 1892 drivers/net/ethernet/chelsio/cxgb4/sge.c cpl->ctrl1 = cpu_to_be64(cntrl); cpl 1901 drivers/net/ethernet/chelsio/cxgb4/sge.c cxgb4_inline_tx_skb(skb, &txq->q, cpl + 1); cpl 1940 drivers/net/ethernet/chelsio/cxgb4/sge.c struct ulptx_sgl *sgl = (struct ulptx_sgl *)(cpl + 1); cpl 2802 drivers/net/ethernet/chelsio/cxgb4/sge.c struct cpl_rx_mps_pkt *cpl = NULL; cpl 2806 drivers/net/ethernet/chelsio/cxgb4/sge.c cpl = (struct cpl_rx_mps_pkt *)skb->data; cpl 2807 drivers/net/ethernet/chelsio/cxgb4/sge.c if (!(CPL_RX_MPS_PKT_TYPE_G(ntohl(cpl->op_to_r1_hi)) & cpl 2811 drivers/net/ethernet/chelsio/cxgb4/sge.c data = skb->data + sizeof(*cpl); cpl 497 drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c void *cpl = (void *)(rsp + 1); cpl 504 drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c const struct cpl_fw6_msg *fw_msg = cpl; cpl 520 drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c cpl = (void *)p; cpl 534 drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c const struct cpl_sge_egr_update *p = cpl; cpl 1167 drivers/net/ethernet/chelsio/cxgb4vf/sge.c struct cpl_tx_pkt_core *cpl; cpl 1288 drivers/net/ethernet/chelsio/cxgb4vf/sge.c sizeof(*cpl))); cpl 1312 drivers/net/ethernet/chelsio/cxgb4vf/sge.c cpl = (void *)(lso + 1); cpl 1327 drivers/net/ethernet/chelsio/cxgb4vf/sge.c len = is_eth_imm(skb) ? skb->len + sizeof(*cpl) : sizeof(*cpl); cpl 1336 drivers/net/ethernet/chelsio/cxgb4vf/sge.c cpl = (void *)(wr + 1); cpl 1357 drivers/net/ethernet/chelsio/cxgb4vf/sge.c cpl->ctrl0 = cpu_to_be32(TXPKT_OPCODE_V(CPL_TX_PKT_XT) | cpl 1360 drivers/net/ethernet/chelsio/cxgb4vf/sge.c cpl->pack = cpu_to_be16(0); cpl 1361 drivers/net/ethernet/chelsio/cxgb4vf/sge.c cpl->len = cpu_to_be16(skb->len); cpl 1362 drivers/net/ethernet/chelsio/cxgb4vf/sge.c cpl->ctrl1 = cpu_to_be64(cntrl); cpl 1379 drivers/net/ethernet/chelsio/cxgb4vf/sge.c inline_tx_skb(skb, &txq->q, cpl + 1); cpl 1419 drivers/net/ethernet/chelsio/cxgb4vf/sge.c struct ulptx_sgl *sgl = (struct ulptx_sgl *)(cpl + 1); cpl 716 drivers/pcmcia/pcmcia_resource.c static irqreturn_t test_action(int cpl, void *dev_id) cpl 1106 drivers/scsi/csiostor/csio_scsi.c struct cpl_fw6_msg *cpl; cpl 1112 drivers/scsi/csiostor/csio_scsi.c cpl = (struct cpl_fw6_msg *)((uintptr_t)wr + sizeof(__be64)); cpl 1114 drivers/scsi/csiostor/csio_scsi.c if (unlikely(cpl->opcode != CPL_FW6_MSG)) { cpl 1116 drivers/scsi/csiostor/csio_scsi.c cpl->opcode); cpl 1121 drivers/scsi/csiostor/csio_scsi.c tempwr = (uint8_t *)(cpl->data); cpl 1146 drivers/scsi/cxgbi/cxgb4i/cxgb4i.c struct cpl_rx_data *cpl = (struct cpl_rx_data *)skb->data; cpl 1147 drivers/scsi/cxgbi/cxgb4i/cxgb4i.c unsigned int tid = GET_TID(cpl); cpl 1167 drivers/scsi/cxgbi/cxgb4i/cxgb4i.c struct cpl_iscsi_hdr *cpl = (struct cpl_iscsi_hdr *)skb->data; cpl 1168 drivers/scsi/cxgbi/cxgb4i/cxgb4i.c unsigned short pdu_len_ddp = be16_to_cpu(cpl->pdu_len_ddp); cpl 1169 drivers/scsi/cxgbi/cxgb4i/cxgb4i.c unsigned int tid = GET_TID(cpl); cpl 1196 drivers/scsi/cxgbi/cxgb4i/cxgb4i.c cxgbi_skcb_tcp_seq(skb) = ntohl(cpl->seq); cpl 1200 drivers/scsi/cxgbi/cxgb4i/cxgb4i.c __skb_pull(skb, sizeof(*cpl)); cpl 1201 drivers/scsi/cxgbi/cxgb4i/cxgb4i.c __pskb_trim(skb, ntohs(cpl->len)); cpl 1222 drivers/scsi/cxgbi/cxgb4i/cxgb4i.c hlen = ntohs(cpl->len); cpl 1272 drivers/scsi/cxgbi/cxgb4i/cxgb4i.c struct cpl_iscsi_hdr *cpl = (struct cpl_iscsi_hdr *)skb->data; cpl 1276 drivers/scsi/cxgbi/cxgb4i/cxgb4i.c u32 tid = GET_TID(cpl); cpl 1277 drivers/scsi/cxgbi/cxgb4i/cxgb4i.c u16 pdu_len_ddp = be16_to_cpu(cpl->pdu_len_ddp); cpl 1303 drivers/scsi/cxgbi/cxgb4i/cxgb4i.c cxgbi_skcb_tcp_seq(skb) = be32_to_cpu(cpl->seq); cpl 1307 drivers/scsi/cxgbi/cxgb4i/cxgb4i.c __skb_pull(skb, sizeof(*cpl)); cpl 1308 drivers/scsi/cxgbi/cxgb4i/cxgb4i.c __pskb_trim(skb, ntohs(cpl->len)); cpl 65 drivers/staging/kpc2000/kpc_dma/fileops.c acd->cpl = &done; cpl 180 drivers/staging/kpc2000/kpc_dma/fileops.c acd->cpl = NULL; cpl 232 drivers/staging/kpc2000/kpc_dma/fileops.c if (acd->cpl) { cpl 233 drivers/staging/kpc2000/kpc_dma/fileops.c complete(acd->cpl); cpl 87 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.h struct completion *cpl; cpl 106 drivers/staging/octeon/ethernet-spi.c static irqreturn_t cvm_oct_spi_rml_interrupt(int cpl, void *dev_id) cpl 689 drivers/staging/octeon/ethernet-tx.c static irqreturn_t cvm_oct_tx_cleanup_watchdog(int cpl, void *dev_id) cpl 1909 drivers/target/iscsi/cxgbit/cxgbit_cm.c struct cpl_rx_data *cpl = cplhdr(skb); cpl 1910 drivers/target/iscsi/cxgbit/cxgbit_cm.c unsigned int tid = GET_TID(cpl); cpl 1950 drivers/target/iscsi/cxgbit/cxgbit_cm.c struct cpl_tx_data *cpl = cplhdr(skb); cpl 1953 drivers/target/iscsi/cxgbit/cxgbit_cm.c unsigned int tid = GET_TID(cpl); cpl 195 drivers/target/iscsi/cxgbit/cxgbit_main.c struct cpl_rx_iscsi_ddp *cpl = (struct cpl_rx_iscsi_ddp *)(rsp + 1); cpl 197 drivers/target/iscsi/cxgbit/cxgbit_main.c cxgbit_process_ddpvld(lro_cb->csk, pdu_cb, be32_to_cpu(cpl->ddpvld)); cpl 200 drivers/target/iscsi/cxgbit/cxgbit_main.c pdu_cb->ddigest = ntohl(cpl->ulp_crc); cpl 201 drivers/target/iscsi/cxgbit/cxgbit_main.c pdu_cb->pdulen = ntohs(cpl->len); cpl 243 drivers/target/iscsi/cxgbit/cxgbit_main.c struct cpl_iscsi_hdr *cpl = (struct cpl_iscsi_hdr *)gl->va; cpl 247 drivers/target/iscsi/cxgbit/cxgbit_main.c pdu_cb->seq = ntohl(cpl->seq); cpl 248 drivers/target/iscsi/cxgbit/cxgbit_main.c len = ntohs(cpl->len); cpl 258 drivers/target/iscsi/cxgbit/cxgbit_main.c struct cpl_iscsi_data *cpl = (struct cpl_iscsi_data *)gl->va; cpl 262 drivers/target/iscsi/cxgbit/cxgbit_main.c len = ntohs(cpl->len); cpl 269 drivers/target/iscsi/cxgbit/cxgbit_main.c struct cpl_rx_iscsi_cmp *cpl; cpl 271 drivers/target/iscsi/cxgbit/cxgbit_main.c cpl = (struct cpl_rx_iscsi_cmp *)gl->va; cpl 274 drivers/target/iscsi/cxgbit/cxgbit_main.c len = be16_to_cpu(cpl->len); cpl 278 drivers/target/iscsi/cxgbit/cxgbit_main.c pdu_cb->ddigest = be32_to_cpu(cpl->ulp_crc); cpl 279 drivers/target/iscsi/cxgbit/cxgbit_main.c pdu_cb->pdulen = ntohs(cpl->len); cpl 285 drivers/target/iscsi/cxgbit/cxgbit_main.c be32_to_cpu(cpl->ddpvld)); cpl 131 drivers/target/iscsi/cxgbit/cxgbit_target.c struct cpl_tx_data_iso *cpl; cpl 136 drivers/target/iscsi/cxgbit/cxgbit_target.c cpl = __skb_push(skb, sizeof(*cpl)); cpl 138 drivers/target/iscsi/cxgbit/cxgbit_target.c cpl->op_to_scsi = htonl(CPL_TX_DATA_ISO_OP_V(CPL_TX_DATA_ISO) | cpl 147 drivers/target/iscsi/cxgbit/cxgbit_target.c cpl->ahs_len = 0; cpl 148 drivers/target/iscsi/cxgbit/cxgbit_target.c cpl->mpdu = htons(DIV_ROUND_UP(iso_info->mpdu, 4)); cpl 149 drivers/target/iscsi/cxgbit/cxgbit_target.c cpl->burst_size = htonl(DIV_ROUND_UP(iso_info->burst_len, 4)); cpl 150 drivers/target/iscsi/cxgbit/cxgbit_target.c cpl->len = htonl(iso_info->len); cpl 151 drivers/target/iscsi/cxgbit/cxgbit_target.c cpl->reserved2_seglen_offset = htonl(0); cpl 152 drivers/target/iscsi/cxgbit/cxgbit_target.c cpl->datasn_offset = htonl(0); cpl 153 drivers/target/iscsi/cxgbit/cxgbit_target.c cpl->buffer_offset = htonl(0); cpl 154 drivers/target/iscsi/cxgbit/cxgbit_target.c cpl->reserved3 = 0; cpl 156 drivers/target/iscsi/cxgbit/cxgbit_target.c __skb_pull(skb, sizeof(*cpl)); cpl 248 drivers/video/fbdev/uvesafb.c struct completion *cpl = task->done; cpl 251 drivers/video/fbdev/uvesafb.c task->done = cpl; cpl 129 drivers/watchdog/octeon-wdt-main.c static irqreturn_t octeon_wdt_poke_irq(int cpl, void *dev_id) cpl 142 drivers/watchdog/octeon-wdt-main.c disable_irq_nosync(cpl); cpl 146 include/linux/amba/clcd.h u32 val, cpl; cpl 170 include/linux/amba/clcd.h cpl = var->xres_virtual; cpl 174 include/linux/amba/clcd.h cpl = cpl * 8 / 3; cpl 176 include/linux/amba/clcd.h cpl /= 8; cpl 178 include/linux/amba/clcd.h cpl /= 4; cpl 180 include/linux/amba/clcd.h regs->tim2 = val | ((cpl - 1) << 16); cpl 126 include/linux/interrupt.h extern irqreturn_t no_action(int cpl, void *dev_id); cpl 44 kernel/irq/handle.c irqreturn_t no_action(int cpl, void *dev_id)