msi 234 arch/alpha/include/asm/core_marvel.h unsigned msi : 1; /* 13 */ msi 500 arch/arm/kernel/bios32.c bridge->msi = hw->msi_ctrl; msi 210 arch/mips/include/asm/octeon/cvmx-npei-defs.h uint64_t msi:1; msi 214 arch/mips/include/asm/octeon/cvmx-npei-defs.h uint64_t msi:1; msi 322 arch/mips/include/asm/octeon/cvmx-npei-defs.h uint64_t msi:1; msi 326 arch/mips/include/asm/octeon/cvmx-npei-defs.h uint64_t msi:1; msi 432 arch/mips/include/asm/octeon/cvmx-npei-defs.h uint64_t msi:1; msi 436 arch/mips/include/asm/octeon/cvmx-npei-defs.h uint64_t msi:1; msi 543 arch/mips/include/asm/octeon/cvmx-npei-defs.h uint64_t msi:1; msi 547 arch/mips/include/asm/octeon/cvmx-npei-defs.h uint64_t msi:1; msi 743 arch/mips/include/asm/octeon/cvmx-pci-defs.h uint32_t msi:32; msi 745 arch/mips/include/asm/octeon/cvmx-pci-defs.h uint32_t msi:32; msi 370 arch/mips/pci/msi-octeon.c struct irq_chip *msi; msi 383 arch/mips/pci/msi-octeon.c msi = &octeon_irq_chip_msi_pcie; msi 394 arch/mips/pci/msi-octeon.c msi = &octeon_irq_chip_msi_pci; msi 398 arch/mips/pci/msi-octeon.c irq_set_chip_and_handler(irq, msi, handle_simple_irq); msi 234 arch/powerpc/kvm/mpic.c } msi[MAX_MSI]; msi 956 arch/powerpc/kvm/mpic.c opp->msi[srs].msir |= 1 << ibs; msi 988 arch/powerpc/kvm/mpic.c r = opp->msi[srs].msir; msi 990 arch/powerpc/kvm/mpic.c opp->msi[srs].msir = 0; msi 995 arch/powerpc/kvm/mpic.c r |= (opp->msi[i].msir ? 1 : 0) << i; msi 1819 arch/powerpc/kvm/mpic.c openpic_msi_write(kvm->arch.mpic, MSIIR_OFFSET, e->msi.data); msi 1842 arch/powerpc/kvm/mpic.c e->msi.address_lo = ue->u.msi.address_lo; msi 1843 arch/powerpc/kvm/mpic.c e->msi.address_hi = ue->u.msi.address_hi; msi 1844 arch/powerpc/kvm/mpic.c e->msi.data = ue->u.msi.data; msi 128 arch/powerpc/platforms/4xx/msi.c struct resource res, struct ppc4xx_msi *msi) msi 149 arch/powerpc/platforms/4xx/msi.c msi->msi_dev = of_find_node_by_name(NULL, "ppc4xx-msi"); msi 150 arch/powerpc/platforms/4xx/msi.c if (!msi->msi_dev) msi 153 arch/powerpc/platforms/4xx/msi.c msi->msi_regs = of_iomap(msi->msi_dev, 0); msi 154 arch/powerpc/platforms/4xx/msi.c if (!msi->msi_regs) { msi 160 arch/powerpc/platforms/4xx/msi.c (u32) (msi->msi_regs + PEIH_TERMADH), (u32) (msi->msi_regs)); msi 167 arch/powerpc/platforms/4xx/msi.c msi->msi_addr_hi = upper_32_bits(msi_phys); msi 168 arch/powerpc/platforms/4xx/msi.c msi->msi_addr_lo = lower_32_bits(msi_phys & 0xffffffff); msi 170 arch/powerpc/platforms/4xx/msi.c msi->msi_addr_hi, msi->msi_addr_lo); msi 176 arch/powerpc/platforms/4xx/msi.c out_be32(msi->msi_regs + PEIH_TERMADH, msi->msi_addr_hi); msi 177 arch/powerpc/platforms/4xx/msi.c out_be32(msi->msi_regs + PEIH_TERMADL, msi->msi_addr_lo); msi 180 arch/powerpc/platforms/4xx/msi.c out_be32(msi->msi_regs + PEIH_MSIED, *msi_data); msi 181 arch/powerpc/platforms/4xx/msi.c out_be32(msi->msi_regs + PEIH_MSIMK, *msi_mask); msi 188 arch/powerpc/platforms/4xx/msi.c iounmap(msi->msi_regs); msi 190 arch/powerpc/platforms/4xx/msi.c of_node_put(msi->msi_dev); msi 196 arch/powerpc/platforms/4xx/msi.c struct ppc4xx_msi *msi = dev->dev.platform_data; msi 201 arch/powerpc/platforms/4xx/msi.c virq = msi->msi_virqs[i]; msi 206 arch/powerpc/platforms/4xx/msi.c if (msi->bitmap.bitmap) msi 207 arch/powerpc/platforms/4xx/msi.c msi_bitmap_free(&msi->bitmap); msi 208 arch/powerpc/platforms/4xx/msi.c iounmap(msi->msi_regs); msi 209 arch/powerpc/platforms/4xx/msi.c of_node_put(msi->msi_dev); msi 216 arch/powerpc/platforms/4xx/msi.c struct ppc4xx_msi *msi; msi 223 arch/powerpc/platforms/4xx/msi.c msi = devm_kzalloc(&dev->dev, sizeof(*msi), GFP_KERNEL); msi 224 arch/powerpc/platforms/4xx/msi.c if (!msi) msi 226 arch/powerpc/platforms/4xx/msi.c dev->dev.platform_data = msi; msi 239 arch/powerpc/platforms/4xx/msi.c err = ppc4xx_setup_pcieh_hw(dev, res, msi); msi 243 arch/powerpc/platforms/4xx/msi.c err = ppc4xx_msi_init_allocator(dev, msi); msi 248 arch/powerpc/platforms/4xx/msi.c ppc4xx_msi = *msi; msi 532 arch/powerpc/platforms/4xx/pci.c int big_pim = 0, msi = 0, primary = 0; msi 553 arch/powerpc/platforms/4xx/pci.c msi = 1; msi 604 arch/powerpc/platforms/4xx/pci.c ppc4xx_configure_pcix_PIMs(hose, reg, &dma_window, big_pim, msi); msi 96 arch/powerpc/platforms/cell/axon_msi.c u32 write_offset, msi; msi 108 arch/powerpc/platforms/cell/axon_msi.c msi = le32_to_cpu(msic->fifo_virt[idx]); msi 109 arch/powerpc/platforms/cell/axon_msi.c msi &= 0xFFFF; msi 112 arch/powerpc/platforms/cell/axon_msi.c write_offset, msic->read_offset, msi); msi 114 arch/powerpc/platforms/cell/axon_msi.c if (msi < nr_irqs && irq_get_chip_data(msi) == msic) { msi 115 arch/powerpc/platforms/cell/axon_msi.c generic_handle_irq(msi); msi 126 arch/powerpc/platforms/cell/axon_msi.c pr_devel("axon_msi: invalid irq 0x%x!\n", msi); msi 132 arch/powerpc/platforms/cell/axon_msi.c msi, retry); msi 35 arch/powerpc/sysdev/fsl_msi.c #define msi_hwirq(msi, msir_index, intr_index) \ msi 36 arch/powerpc/sysdev/fsl_msi.c ((msir_index) << (msi)->srs_shift | \ msi 37 arch/powerpc/sysdev/fsl_msi.c ((intr_index) << (msi)->ibs_shift)) msi 326 arch/powerpc/sysdev/fsl_msi.c struct fsl_msi *msi = platform_get_drvdata(ofdev); msi 329 arch/powerpc/sysdev/fsl_msi.c if (msi->list.prev != NULL) msi 330 arch/powerpc/sysdev/fsl_msi.c list_del(&msi->list); msi 332 arch/powerpc/sysdev/fsl_msi.c if (msi->cascade_array[i]) { msi 333 arch/powerpc/sysdev/fsl_msi.c virq = msi->cascade_array[i]->virq; msi 337 arch/powerpc/sysdev/fsl_msi.c free_irq(virq, msi->cascade_array[i]); msi 338 arch/powerpc/sysdev/fsl_msi.c kfree(msi->cascade_array[i]); msi 342 arch/powerpc/sysdev/fsl_msi.c if (msi->bitmap.bitmap) msi 343 arch/powerpc/sysdev/fsl_msi.c msi_bitmap_free(&msi->bitmap); msi 344 arch/powerpc/sysdev/fsl_msi.c if ((msi->feature & FSL_PIC_IP_MASK) != FSL_PIC_IP_VMPIC) msi 345 arch/powerpc/sysdev/fsl_msi.c iounmap(msi->msi_regs); msi 346 arch/powerpc/sysdev/fsl_msi.c kfree(msi); msi 354 arch/powerpc/sysdev/fsl_msi.c static int fsl_msi_setup_hwirq(struct fsl_msi *msi, struct platform_device *dev, msi 375 arch/powerpc/sysdev/fsl_msi.c cascade_data->msi_data = msi; msi 377 arch/powerpc/sysdev/fsl_msi.c msi->cascade_array[irq_index] = cascade_data; msi 389 arch/powerpc/sysdev/fsl_msi.c msi_bitmap_free_hwirqs(&msi->bitmap, msi 390 arch/powerpc/sysdev/fsl_msi.c msi_hwirq(msi, offset, i), 1); msi 399 arch/powerpc/sysdev/fsl_msi.c struct fsl_msi *msi; msi 415 arch/powerpc/sysdev/fsl_msi.c msi = kzalloc(sizeof(struct fsl_msi), GFP_KERNEL); msi 416 arch/powerpc/sysdev/fsl_msi.c if (!msi) { msi 420 arch/powerpc/sysdev/fsl_msi.c platform_set_drvdata(dev, msi); msi 422 arch/powerpc/sysdev/fsl_msi.c msi->irqhost = irq_domain_add_linear(dev->dev.of_node, msi 423 arch/powerpc/sysdev/fsl_msi.c NR_MSI_IRQS_MAX, &fsl_msi_host_ops, msi); msi 425 arch/powerpc/sysdev/fsl_msi.c if (msi->irqhost == NULL) { msi 443 arch/powerpc/sysdev/fsl_msi.c msi->msi_regs = ioremap(res.start, resource_size(&res)); msi 444 arch/powerpc/sysdev/fsl_msi.c if (!msi->msi_regs) { msi 450 arch/powerpc/sysdev/fsl_msi.c msi->msiir_offset = msi 458 arch/powerpc/sysdev/fsl_msi.c msi->msiir_offset = features->msiir_offset + msi 461 arch/powerpc/sysdev/fsl_msi.c msi->msiir_offset = msiir.start & MSIIR_OFFSET_MASK; msi 464 arch/powerpc/sysdev/fsl_msi.c msi->feature = features->fsl_pic_ip; msi 469 arch/powerpc/sysdev/fsl_msi.c msi->feature |= MSI_HW_ERRATA_ENDIAN; msi 475 arch/powerpc/sysdev/fsl_msi.c msi->phandle = dev->dev.of_node->phandle; msi 477 arch/powerpc/sysdev/fsl_msi.c err = fsl_msi_init_allocator(msi); msi 487 arch/powerpc/sysdev/fsl_msi.c msi->srs_shift = MSIIR1_SRS_SHIFT; msi 488 arch/powerpc/sysdev/fsl_msi.c msi->ibs_shift = MSIIR1_IBS_SHIFT; msi 495 arch/powerpc/sysdev/fsl_msi.c err = fsl_msi_setup_hwirq(msi, dev, msi 504 arch/powerpc/sysdev/fsl_msi.c msi->srs_shift = MSIIR_SRS_SHIFT; msi 505 arch/powerpc/sysdev/fsl_msi.c msi->ibs_shift = MSIIR_IBS_SHIFT; msi 533 arch/powerpc/sysdev/fsl_msi.c err = fsl_msi_setup_hwirq(msi, dev, offset + j, msi 541 arch/powerpc/sysdev/fsl_msi.c list_add_tail(&msi->list, &msi_head); msi 239 arch/s390/pci/pci_irq.c struct msi_desc *msi; msi 274 arch/s390/pci/pci_irq.c for_each_pci_msi_entry(msi, pdev) { msi 280 arch/s390/pci/pci_irq.c msi->affinity : NULL); msi 283 arch/s390/pci/pci_irq.c rc = irq_set_msi_desc(irq, msi); msi 291 arch/s390/pci/pci_irq.c msg.address_lo |= msi->affinity ? msi 292 arch/s390/pci/pci_irq.c (cpumask_first(&msi->affinity->mask) << 8) : 0; msi 321 arch/s390/pci/pci_irq.c struct msi_desc *msi; msi 333 arch/s390/pci/pci_irq.c for_each_pci_msi_entry(msi, pdev) { msi 334 arch/s390/pci/pci_irq.c if (!msi->irq) msi 336 arch/s390/pci/pci_irq.c if (msi->msi_attrib.is_msix) msi 337 arch/s390/pci/pci_irq.c __pci_msix_desc_mask_irq(msi, 1); msi 339 arch/s390/pci/pci_irq.c __pci_msi_desc_mask_irq(msi, 1, 1); msi 340 arch/s390/pci/pci_irq.c irq_set_msi_desc(msi->irq, NULL); msi 341 arch/s390/pci/pci_irq.c irq_free_desc(msi->irq); msi 342 arch/s390/pci/pci_irq.c msi->msg.address_lo = 0; msi 343 arch/s390/pci/pci_irq.c msi->msg.address_hi = 0; msi 344 arch/s390/pci/pci_irq.c msi->msg.data = 0; msi 345 arch/s390/pci/pci_irq.c msi->irq = 0; msi 158 arch/sparc/kernel/pci_fire.c unsigned long *head, unsigned long *msi) msi 176 arch/sparc/kernel/pci_fire.c *msi = msi_num = ((ep->word0 & MSIQ_WORD0_DATA0) >> msi 200 arch/sparc/kernel/pci_fire.c unsigned long msi, int is_msi64) msi 204 arch/sparc/kernel/pci_fire.c val = upa_readq(pbm->pbm_regs + MSI_MAP(msi)); msi 207 arch/sparc/kernel/pci_fire.c upa_writeq(val, pbm->pbm_regs + MSI_MAP(msi)); msi 209 arch/sparc/kernel/pci_fire.c upa_writeq(MSI_CLEAR_EQWR_N, pbm->pbm_regs + MSI_CLEAR(msi)); msi 211 arch/sparc/kernel/pci_fire.c val = upa_readq(pbm->pbm_regs + MSI_MAP(msi)); msi 213 arch/sparc/kernel/pci_fire.c upa_writeq(val, pbm->pbm_regs + MSI_MAP(msi)); msi 218 arch/sparc/kernel/pci_fire.c static int pci_fire_msi_teardown(struct pci_pbm_info *pbm, unsigned long msi) msi 222 arch/sparc/kernel/pci_fire.c val = upa_readq(pbm->pbm_regs + MSI_MAP(msi)); msi 226 arch/sparc/kernel/pci_fire.c upa_writeq(val, pbm->pbm_regs + MSI_MAP(msi)); msi 40 arch/sparc/kernel/pci_impl.h unsigned long *head, unsigned long *msi); msi 44 arch/sparc/kernel/pci_impl.h unsigned long msi, int is_msi64); msi 45 arch/sparc/kernel/pci_impl.h int (*msi_teardown)(struct pci_pbm_info *pbm, unsigned long msi); msi 30 arch/sparc/kernel/pci_msi.c unsigned long msi; msi 32 arch/sparc/kernel/pci_msi.c err = ops->dequeue_msi(pbm, msiqid, &head, &msi); msi 36 arch/sparc/kernel/pci_msi.c irq = pbm->msi_irq_table[msi - pbm->msi_first]; msi 129 arch/sparc/kernel/pci_msi.c int msi, err; msi 144 arch/sparc/kernel/pci_msi.c msi = err; msi 148 arch/sparc/kernel/pci_msi.c err = ops->msi_setup(pbm, msiqid, msi, msi 153 arch/sparc/kernel/pci_msi.c pbm->msi_irq_table[msi - pbm->msi_first] = *irq_p; msi 162 arch/sparc/kernel/pci_msi.c msg.data = msi; msi 170 arch/sparc/kernel/pci_msi.c free_msi(pbm, msi); msi 979 arch/sparc/kernel/pci_sun4v.c unsigned long *msi) msi 998 arch/sparc/kernel/pci_sun4v.c *msi = ep->msi_data; msi 1030 arch/sparc/kernel/pci_sun4v.c unsigned long msi, int is_msi64) msi 1032 arch/sparc/kernel/pci_sun4v.c if (pci_sun4v_msi_setmsiq(pbm->devhandle, msi, msiqid, msi 1036 arch/sparc/kernel/pci_sun4v.c if (pci_sun4v_msi_setstate(pbm->devhandle, msi, HV_MSISTATE_IDLE)) msi 1038 arch/sparc/kernel/pci_sun4v.c if (pci_sun4v_msi_setvalid(pbm->devhandle, msi, HV_MSIVALID_VALID)) msi 1043 arch/sparc/kernel/pci_sun4v.c static int pci_sun4v_msi_teardown(struct pci_pbm_info *pbm, unsigned long msi) msi 1047 arch/sparc/kernel/pci_sun4v.c err = pci_sun4v_msi_getmsiq(pbm->devhandle, msi, &msiqid); msi 1051 arch/sparc/kernel/pci_sun4v.c pci_sun4v_msi_setvalid(pbm->devhandle, msi, HV_MSIVALID_INVALID); msi 107 arch/x86/kvm/irq_comm.c trace_kvm_msi_set_irq(e->msi.address_lo | (kvm->arch.x2apic_format ? msi 108 arch/x86/kvm/irq_comm.c (u64)e->msi.address_hi << 32 : 0), msi 109 arch/x86/kvm/irq_comm.c e->msi.data); msi 111 arch/x86/kvm/irq_comm.c irq->dest_id = (e->msi.address_lo & msi 114 arch/x86/kvm/irq_comm.c irq->dest_id |= MSI_ADDR_EXT_DEST_ID(e->msi.address_hi); msi 115 arch/x86/kvm/irq_comm.c irq->vector = (e->msi.data & msi 117 arch/x86/kvm/irq_comm.c irq->dest_mode = (1 << MSI_ADDR_DEST_MODE_SHIFT) & e->msi.address_lo; msi 118 arch/x86/kvm/irq_comm.c irq->trig_mode = (1 << MSI_DATA_TRIGGER_SHIFT) & e->msi.data; msi 119 arch/x86/kvm/irq_comm.c irq->delivery_mode = e->msi.data & 0x700; msi 120 arch/x86/kvm/irq_comm.c irq->msi_redir_hint = ((e->msi.address_lo msi 130 arch/x86/kvm/irq_comm.c return kvm->arch.x2apic_format && (e->msi.address_hi & 0xff); msi 305 arch/x86/kvm/irq_comm.c e->msi.address_lo = ue->u.msi.address_lo; msi 306 arch/x86/kvm/irq_comm.c e->msi.address_hi = ue->u.msi.address_hi; msi 307 arch/x86/kvm/irq_comm.c e->msi.data = ue->u.msi.data; msi 71 drivers/ata/sata_mv.c static int msi; msi 72 drivers/ata/sata_mv.c module_param(msi, int, S_IRUGO); msi 73 drivers/ata/sata_mv.c MODULE_PARM_DESC(msi, "Enable use of PCI MSI (0=off, 1=on)"); msi 4426 drivers/ata/sata_mv.c if (msi && pci_enable_msi(pdev) == 0) msi 2479 drivers/ata/sata_nv.c module_param_named(msi, msi_enabled, bool, 0444); msi 2480 drivers/ata/sata_nv.c MODULE_PARM_DESC(msi, "Enable use of MSI (Default: false)"); msi 414 drivers/ata/sata_sil24.c module_param_named(msi, sata_sil24_msi, bool, S_IRUGO); msi 415 drivers/ata/sata_sil24.c MODULE_PARM_DESC(msi, "Enable MSI (Default: false)"); msi 279 drivers/crypto/hisilicon/qm.c u32 msi); msi 982 drivers/crypto/hisilicon/qm.c u32 msi) msi 991 drivers/crypto/hisilicon/qm.c u32 msi) msi 993 drivers/crypto/hisilicon/qm.c u32 irq_enable = ce | nfe | fe | msi; msi 997 drivers/crypto/hisilicon/qm.c qm->msi_mask = msi; msi 1006 drivers/crypto/hisilicon/qm.c writel(msi, qm->io_base + QM_RAS_MSI_INT_SEL); msi 1862 drivers/crypto/hisilicon/qm.c u32 msi) msi 1870 drivers/crypto/hisilicon/qm.c qm->ops->hw_error_init(qm, ce, nfe, fe, msi); msi 211 drivers/crypto/hisilicon/qm.h u32 msi); msi 701 drivers/dma/dw-edma/dw-edma-core.c memcpy(&chan->msi, &irq->msi, sizeof(chan->msi)); msi 705 drivers/dma/dw-edma/dw-edma-core.c chan->msi.address_hi, chan->msi.address_lo, msi 706 drivers/dma/dw-edma/dw-edma-core.c chan->msi.data); msi 793 drivers/dma/dw-edma/dw-edma-core.c &dw->irq[0].msi); msi 819 drivers/dma/dw-edma/dw-edma-core.c &dw->irq[i].msi); msi 90 drivers/dma/dw-edma/dw-edma-core.h struct msi_msg msi; msi 100 drivers/dma/dw-edma/dw-edma-core.h struct msi_msg msi; msi 276 drivers/dma/dw-edma/dw-edma-v0-core.c SET_RW(dw, chan->dir, done_imwr_low, chan->msi.address_lo); msi 277 drivers/dma/dw-edma/dw-edma-v0-core.c SET_RW(dw, chan->dir, done_imwr_high, chan->msi.address_hi); msi 279 drivers/dma/dw-edma/dw-edma-v0-core.c SET_RW(dw, chan->dir, abort_imwr_low, chan->msi.address_lo); msi 280 drivers/dma/dw-edma/dw-edma-v0-core.c SET_RW(dw, chan->dir, abort_imwr_high, chan->msi.address_hi); msi 308 drivers/dma/dw-edma/dw-edma-v0-core.c chan->msi.data); msi 313 drivers/dma/dw-edma/dw-edma-v0-core.c chan->msi.data); msi 416 drivers/dma/ioat/init.c goto msi; msi 430 drivers/dma/ioat/init.c goto msi; msi 444 drivers/dma/ioat/init.c goto msi; msi 451 drivers/dma/ioat/init.c msi: msi 763 drivers/dma/qcom/hidma.c bool msi; msi 831 drivers/dma/qcom/hidma.c msi = hidma_test_capability(&pdev->dev, HIDMA_MSI_CAP); msi 867 drivers/dma/qcom/hidma.c if (msi) msi 870 drivers/dma/qcom/hidma.c if (!msi || rc) { msi 897 drivers/dma/qcom/hidma.c if (msi) msi 148 drivers/dma/qcom/hidma.h void hidma_ll_setup_irq(struct hidma_lldev *lldev, bool msi); msi 687 drivers/dma/qcom/hidma_ll.c void hidma_ll_setup_irq(struct hidma_lldev *lldev, bool msi) msi 691 drivers/dma/qcom/hidma_ll.c lldev->msi_support = msi; msi 238 drivers/gpu/drm/amd/amdgpu/amdgpu_drv.c MODULE_PARM_DESC(msi, "MSI support (1 = enable, 0 = disable, -1 = auto)"); msi 239 drivers/gpu/drm/amd/amdgpu/amdgpu_drv.c module_param_named(msi, amdgpu_msi, int, 0444); msi 33 drivers/gpu/drm/nouveau/include/nvkm/subdev/pci.h bool msi; msi 79 drivers/gpu/drm/nouveau/nvkm/subdev/pci/base.c if (pci->msi) msi 147 drivers/gpu/drm/nouveau/nvkm/subdev/pci/base.c if (pci->msi) msi 169 drivers/gpu/drm/nouveau/nvkm/subdev/pci/base.c if (pci->msi) msi 213 drivers/gpu/drm/nouveau/nvkm/subdev/pci/base.c pci->msi = true; msi 219 drivers/gpu/drm/nouveau/nvkm/subdev/pci/base.c pci->msi = false; msi 222 drivers/gpu/drm/nouveau/nvkm/subdev/pci/base.c pci->msi = nvkm_boolopt(device->cfgopt, "NvMSI", pci->msi); msi 223 drivers/gpu/drm/nouveau/nvkm/subdev/pci/base.c if (pci->msi && func->msi_rearm) { msi 224 drivers/gpu/drm/nouveau/nvkm/subdev/pci/base.c pci->msi = pci_enable_msi(pci->pdev) == 0; msi 225 drivers/gpu/drm/nouveau/nvkm/subdev/pci/base.c if (pci->msi) msi 228 drivers/gpu/drm/nouveau/nvkm/subdev/pci/base.c pci->msi = false; msi 256 drivers/gpu/drm/radeon/radeon_drv.c MODULE_PARM_DESC(msi, "MSI support (1 = enable, 0 = disable, -1 = auto)"); msi 257 drivers/gpu/drm/radeon/radeon_drv.c module_param_named(msi, radeon_msi, int, 0444); msi 133 drivers/iommu/dma-iommu.c struct iommu_dma_msi_page *msi, *tmp; msi 141 drivers/iommu/dma-iommu.c list_for_each_entry_safe(msi, tmp, &cookie->msi_page_list, list) { msi 142 drivers/iommu/dma-iommu.c list_del(&msi->list); msi 143 drivers/iommu/dma-iommu.c kfree(msi); msi 812 drivers/iommu/virtio-iommu.c struct iommu_resv_region *entry, *new_entry, *msi = NULL; msi 819 drivers/iommu/virtio-iommu.c msi = entry; msi 831 drivers/iommu/virtio-iommu.c if (!msi) { msi 832 drivers/iommu/virtio-iommu.c msi = iommu_alloc_resv_region(MSI_IOVA_BASE, MSI_IOVA_LENGTH, msi 834 drivers/iommu/virtio-iommu.c if (!msi) msi 837 drivers/iommu/virtio-iommu.c list_add_tail(&msi->list, head); msi 35 drivers/irqchip/irq-gic-v3-its-pci-msi.c int msi, msix, *count = data; msi 37 drivers/irqchip/irq-gic-v3-its-pci-msi.c msi = max(pci_msi_vec_count(pdev), 0); msi 39 drivers/irqchip/irq-gic-v3-its-pci-msi.c *count += max(msi, msix); msi 46 drivers/media/pci/ddbridge/ddbridge-main.c static int msi = 1; msi 48 drivers/media/pci/ddbridge/ddbridge-main.c static int msi; msi 50 drivers/media/pci/ddbridge/ddbridge-main.c module_param(msi, int, 0444); msi 52 drivers/media/pci/ddbridge/ddbridge-main.c MODULE_PARM_DESC(msi, "Control MSI interrupts: 0-disable, 1-enable (default)"); msi 54 drivers/media/pci/ddbridge/ddbridge-main.c MODULE_PARM_DESC(msi, "Control MSI interrupts: 0-disable (default), 1-enable"); msi 71 drivers/media/pci/ddbridge/ddbridge-main.c if (dev->msi) msi 79 drivers/media/pci/ddbridge/ddbridge-main.c if (dev->msi == 2) msi 107 drivers/media/pci/ddbridge/ddbridge-main.c if (msi && pci_msi_enabled()) { msi 111 drivers/media/pci/ddbridge/ddbridge-main.c dev->msi = stat; msi 113 drivers/media/pci/ddbridge/ddbridge-main.c dev->msi); msi 138 drivers/media/pci/ddbridge/ddbridge-main.c if (dev->msi) msi 140 drivers/media/pci/ddbridge/ddbridge-main.c if (dev->msi == 2) { msi 162 drivers/media/pci/ddbridge/ddbridge-main.c if (dev->msi == 2) { msi 324 drivers/media/pci/ddbridge/ddbridge.h int msi; msi 1252 drivers/media/pci/saa7164/saa7164-core.c dev->msi = true; msi 1464 drivers/media/pci/saa7164/saa7164-core.c if (dev->msi) { msi 1466 drivers/media/pci/saa7164/saa7164-core.c dev->msi = false; msi 455 drivers/media/pci/saa7164/saa7164.h bool msi; msi 375 drivers/net/ethernet/broadcom/bnx2.c ((unsigned long) bnapi->status_blk.msi + msi 877 drivers/net/ethernet/broadcom/bnx2.c if (bnapi->status_blk.msi) msi 878 drivers/net/ethernet/broadcom/bnx2.c bnapi->status_blk.msi = NULL; msi 888 drivers/net/ethernet/broadcom/bnx2.c bnapi->status_blk.msi = bp->status_blk; msi 890 drivers/net/ethernet/broadcom/bnx2.c &bnapi->status_blk.msi->status_tx_quick_consumer_index0; msi 892 drivers/net/ethernet/broadcom/bnx2.c &bnapi->status_blk.msi->status_rx_quick_consumer_index0; msi 2804 drivers/net/ethernet/broadcom/bnx2.c struct status_block *sblk = bnapi->status_blk.msi; msi 3321 drivers/net/ethernet/broadcom/bnx2.c prefetch(bnapi->status_blk.msi); msi 3341 drivers/net/ethernet/broadcom/bnx2.c prefetch(bnapi->status_blk.msi); msi 3357 drivers/net/ethernet/broadcom/bnx2.c struct status_block *sblk = bnapi->status_blk.msi; msi 3409 drivers/net/ethernet/broadcom/bnx2.c struct status_block *sblk = bnapi->status_blk.msi; msi 3460 drivers/net/ethernet/broadcom/bnx2.c bnapi->status_blk.msi); msi 3467 drivers/net/ethernet/broadcom/bnx2.c struct status_block *sblk = bnapi->status_blk.msi; msi 3532 drivers/net/ethernet/broadcom/bnx2.c struct status_block *sblk = bnapi->status_blk.msi; msi 5013 drivers/net/ethernet/broadcom/bnx2.c memset(bp->bnx2_napi[0].status_blk.msi, 0, bp->status_stats_size); msi 6768 drivers/net/ethernet/broadcom/bnx2.h struct status_block *msi; msi 1551 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c bool msi = (bp->flags & USING_MSI_FLAG) ? true : false; msi 1560 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c } else if (msi) { msi 1586 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c (msix ? "MSI-X" : (msi ? "MSI" : "INTx"))); msi 1614 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c bool msi = (bp->flags & USING_MSI_FLAG) ? true : false; msi 1626 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c } else if (msi) { msi 1647 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c val, (msix ? "MSI-X" : (msi ? "MSI" : "INTx"))); msi 125 drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c static int msi = 2; msi 127 drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c module_param(msi, int, 0644); msi 128 drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c MODULE_PARM_DESC(msi, "whether to use MSI or MSI-X"); msi 3376 drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c if (msi > 1 && cxgb_enable_msix(adapter) == 0) msi 3378 drivers/net/ethernet/chelsio/cxgb3/cxgb3_main.c else if (msi > 0 && pci_enable_msi(pdev) == 0) msi 152 drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c static int msi = 2; msi 154 drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c module_param(msi, int, 0644); msi 155 drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c MODULE_PARM_DESC(msi, "whether to use INTx (0), MSI (1) or MSI-X (2)"); msi 6025 drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c if (msi > 1 && enable_msix(adapter) == 0) msi 6027 drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c else if (msi > 0 && pci_enable_msi(pdev) == 0) { msi 6029 drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c if (msi > 1) msi 89 drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c static int msi = MSI_DEFAULT; msi 91 drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c module_param(msi, int, 0644); msi 92 drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c MODULE_PARM_DESC(msi, "whether to use MSI-X or MSI"); msi 369 drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c int qs, msi; msi 371 drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c for (qs = 0, msi = MSIX_IQFLINT; qs < pi->nqsets; qs++, msi++) { msi 372 drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c snprintf(adapter->msix_info[msi].desc, namelen, msi 374 drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c adapter->msix_info[msi].desc[namelen] = 0; msi 385 drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c int rxq, msi, err; msi 398 drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c msi = MSIX_IQFLINT; msi 400 drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c err = request_irq(adapter->msix_info[msi].vec, msi 402 drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c adapter->msix_info[msi].desc, msi 406 drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c msi++; msi 412 drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c free_irq(adapter->msix_info[--msi].vec, &s->ethrxq[rxq].rspq); msi 423 drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c int rxq, msi; msi 426 drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c msi = MSIX_IQFLINT; msi 428 drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c free_irq(adapter->msix_info[msi++].vec, msi 2556 drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c ethqsets = vfres->niqflint - 1 - (msi == MSI_MSI); msi 3193 drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c if (msi == MSI_MSIX && enable_msix(adapter) == 0) msi 3196 drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c if (msi == MSI_MSIX) { msi 3205 drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c msi = MSI_MSI; msi 3478 drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c if (msi != MSI_MSIX && msi != MSI_MSI) { msi 3480 drivers/net/ethernet/chelsio/cxgb4vf/cxgb4vf_main.c msi, MSI_MSIX, MSI_MSI); msi 246 drivers/net/ethernet/neterion/s2io-regs.h #define RX_MAT_SET(ring, msi) vBIT(msi, (8 * ring), 8) msi 251 drivers/net/ethernet/neterion/s2io-regs.h #define TX_MAT_SET(fifo, msi) vBIT(msi, (8 * fifo), 8) msi 893 drivers/net/ethernet/nvidia/forcedeth.c static int msi = NV_MSI_INT_ENABLED; msi 5931 drivers/net/ethernet/nvidia/forcedeth.c if ((id->driver_data & DEV_HAS_MSI) && msi) msi 6445 drivers/net/ethernet/nvidia/forcedeth.c module_param(msi, int, 0); msi 6446 drivers/net/ethernet/nvidia/forcedeth.c MODULE_PARM_DESC(msi, "MSI interrupts are enabled by setting to 1 and disabled by setting to 0."); msi 64 drivers/net/ethernet/qlogic/qla3xxx.c static int msi; msi 65 drivers/net/ethernet/qlogic/qla3xxx.c module_param(msi, int, 0); msi 66 drivers/net/ethernet/qlogic/qla3xxx.c MODULE_PARM_DESC(msi, "Turn on Message Signaled Interrupts."); msi 3429 drivers/net/ethernet/qlogic/qla3xxx.c if (qdev->msi && test_bit(QL_MSI_ENABLED, &qdev->flags)) { msi 3475 drivers/net/ethernet/qlogic/qla3xxx.c if (qdev->msi) { msi 3479 drivers/net/ethernet/qlogic/qla3xxx.c qdev->msi = 0; msi 3528 drivers/net/ethernet/qlogic/qla3xxx.c if (qdev->msi && test_bit(QL_MSI_ENABLED, &qdev->flags)) { msi 3817 drivers/net/ethernet/qlogic/qla3xxx.c if (msi) msi 3818 drivers/net/ethernet/qlogic/qla3xxx.c qdev->msi = 1; msi 1086 drivers/net/ethernet/qlogic/qla3xxx.h u32 msi; msi 371 drivers/net/wireless/realtek/rtlwifi/rtl8188ee/sw.c module_param_named(msi, rtl88ee_mod_params.msi_support, bool, 0444); msi 379 drivers/net/wireless/realtek/rtlwifi/rtl8188ee/sw.c MODULE_PARM_DESC(msi, "Set to 1 to use MSI interrupts mode (default 1)\n"); msi 358 drivers/net/wireless/realtek/rtlwifi/rtl8192ee/sw.c module_param_named(msi, rtl92ee_mod_params.msi_support, bool, 0444); msi 367 drivers/net/wireless/realtek/rtlwifi/rtl8192ee/sw.c MODULE_PARM_DESC(msi, "Set to 1 to use MSI interrupts mode (default 1)\n"); msi 366 drivers/net/wireless/realtek/rtlwifi/rtl8723ae/sw.c module_param_named(msi, rtl8723e_mod_params.msi_support, bool, 0444); msi 374 drivers/net/wireless/realtek/rtlwifi/rtl8723ae/sw.c MODULE_PARM_DESC(msi, "Set to 1 to use MSI interrupts mode (default 0)\n"); msi 372 drivers/net/wireless/realtek/rtlwifi/rtl8723be/sw.c module_param_named(msi, rtl8723be_mod_params.msi_support, bool, 0444); msi 381 drivers/net/wireless/realtek/rtlwifi/rtl8723be/sw.c MODULE_PARM_DESC(msi, "Set to 1 to use MSI interrupts mode (default 0)\n"); msi 422 drivers/net/wireless/realtek/rtlwifi/rtl8821ae/sw.c module_param_named(msi, rtl8821ae_mod_params.msi_support, bool, 0444); msi 431 drivers/net/wireless/realtek/rtlwifi/rtl8821ae/sw.c MODULE_PARM_DESC(msi, "Set to 1 to use MSI interrupts mode (default 1)\n"); msi 46 drivers/ntb/msi.c struct_size = sizeof(*ntb->msi) + sizeof(*ntb->msi->peer_mws) * peers; msi 48 drivers/ntb/msi.c ntb->msi = devm_kzalloc(&ntb->dev, struct_size, GFP_KERNEL); msi 49 drivers/ntb/msi.c if (!ntb->msi) msi 52 drivers/ntb/msi.c ntb->msi->desc_changed = desc_changed; msi 62 drivers/ntb/msi.c ntb->msi->peer_mws[i] = devm_ioremap(&ntb->dev, mw_phys_addr, msi 64 drivers/ntb/msi.c if (!ntb->msi->peer_mws[i]) { msi 74 drivers/ntb/msi.c if (ntb->msi->peer_mws[i]) msi 75 drivers/ntb/msi.c devm_iounmap(&ntb->dev, ntb->msi->peer_mws[i]); msi 77 drivers/ntb/msi.c devm_kfree(&ntb->dev, ntb->msi); msi 78 drivers/ntb/msi.c ntb->msi = NULL; msi 108 drivers/ntb/msi.c if (!ntb->msi) msi 150 drivers/ntb/msi.c ntb->msi->base_addr = addr; msi 151 drivers/ntb/msi.c ntb->msi->end_addr = addr + mw_min_size; msi 203 drivers/ntb/msi.c if (addr < ntb->msi->base_addr || addr >= ntb->msi->end_addr) { msi 206 drivers/ntb/msi.c entry->irq, addr, ntb->msi->base_addr, msi 207 drivers/ntb/msi.c ntb->msi->end_addr); msi 211 drivers/ntb/msi.c msi_desc->addr_offset = addr - ntb->msi->base_addr; msi 223 drivers/ntb/msi.c if (dr->ntb->msi->desc_changed) msi 224 drivers/ntb/msi.c dr->ntb->msi->desc_changed(dr->ntb->ctx); msi 288 drivers/ntb/msi.c if (!ntb->msi) msi 368 drivers/ntb/msi.c if (!ntb->msi) msi 371 drivers/ntb/msi.c idx = desc->addr_offset / sizeof(*ntb->msi->peer_mws[peer]); msi 373 drivers/ntb/msi.c iowrite32(desc->data, &ntb->msi->peer_mws[peer][idx]); msi 453 drivers/pci/controller/pci-ftpci100.c host->msi = NULL; msi 1143 drivers/pci/controller/pci-hyperv.c struct msi_desc *msi = irq_data_get_msi_desc(irq_data); msi 1145 drivers/pci/controller/pci-hyperv.c pdev = msi_desc_to_pci_dev(msi); msi 1804 drivers/pci/controller/pci-hyperv.c hbus->pci_bus->msi = &hbus->msi_chip; msi 1805 drivers/pci/controller/pci-hyperv.c hbus->pci_bus->msi->dev = &hbus->hdev->device; msi 73 drivers/pci/controller/pci-mvebu.c struct msi_controller *msi; msi 1129 drivers/pci/controller/pci-mvebu.c bridge->msi = pcie->msi; msi 381 drivers/pci/controller/pci-tegra.c struct tegra_msi msi; msi 1614 drivers/pci/controller/pci-tegra.c int msi; msi 1618 drivers/pci/controller/pci-tegra.c msi = find_first_zero_bit(chip->used, INT_PCI_MSI_NR); msi 1619 drivers/pci/controller/pci-tegra.c if (msi < INT_PCI_MSI_NR) msi 1620 drivers/pci/controller/pci-tegra.c set_bit(msi, chip->used); msi 1622 drivers/pci/controller/pci-tegra.c msi = -ENOSPC; msi 1626 drivers/pci/controller/pci-tegra.c return msi; msi 1647 drivers/pci/controller/pci-tegra.c struct tegra_msi *msi = &pcie->msi; msi 1661 drivers/pci/controller/pci-tegra.c irq = irq_find_mapping(msi->domain, index); msi 1663 drivers/pci/controller/pci-tegra.c if (test_bit(index, msi->used)) msi 1688 drivers/pci/controller/pci-tegra.c struct tegra_msi *msi = to_tegra_msi(chip); msi 1693 drivers/pci/controller/pci-tegra.c hwirq = tegra_msi_alloc(msi); msi 1697 drivers/pci/controller/pci-tegra.c irq = irq_create_mapping(msi->domain, hwirq); msi 1699 drivers/pci/controller/pci-tegra.c tegra_msi_free(msi, hwirq); msi 1705 drivers/pci/controller/pci-tegra.c msg.address_lo = lower_32_bits(msi->phys); msi 1706 drivers/pci/controller/pci-tegra.c msg.address_hi = upper_32_bits(msi->phys); msi 1717 drivers/pci/controller/pci-tegra.c struct tegra_msi *msi = to_tegra_msi(chip); msi 1722 drivers/pci/controller/pci-tegra.c tegra_msi_free(msi, hwirq); msi 1752 drivers/pci/controller/pci-tegra.c struct tegra_msi *msi = &pcie->msi; msi 1756 drivers/pci/controller/pci-tegra.c mutex_init(&msi->lock); msi 1758 drivers/pci/controller/pci-tegra.c msi->chip.dev = dev; msi 1759 drivers/pci/controller/pci-tegra.c msi->chip.setup_irq = tegra_msi_setup_irq; msi 1760 drivers/pci/controller/pci-tegra.c msi->chip.teardown_irq = tegra_msi_teardown_irq; msi 1762 drivers/pci/controller/pci-tegra.c msi->domain = irq_domain_add_linear(dev->of_node, INT_PCI_MSI_NR, msi 1763 drivers/pci/controller/pci-tegra.c &msi_domain_ops, &msi->chip); msi 1764 drivers/pci/controller/pci-tegra.c if (!msi->domain) { msi 1775 drivers/pci/controller/pci-tegra.c msi->irq = err; msi 1777 drivers/pci/controller/pci-tegra.c err = request_irq(msi->irq, tegra_pcie_msi_irq, IRQF_NO_THREAD, msi 1795 drivers/pci/controller/pci-tegra.c msi->virt = dma_alloc_attrs(dev, PAGE_SIZE, &msi->phys, GFP_KERNEL, msi 1797 drivers/pci/controller/pci-tegra.c if (!msi->virt) { msi 1803 drivers/pci/controller/pci-tegra.c host->msi = &msi->chip; msi 1808 drivers/pci/controller/pci-tegra.c free_irq(msi->irq, pcie); msi 1810 drivers/pci/controller/pci-tegra.c irq_domain_remove(msi->domain); msi 1817 drivers/pci/controller/pci-tegra.c struct tegra_msi *msi = &pcie->msi; msi 1820 drivers/pci/controller/pci-tegra.c afi_writel(pcie, msi->phys >> soc->msi_base_shift, AFI_MSI_FPCI_BAR_ST); msi 1821 drivers/pci/controller/pci-tegra.c afi_writel(pcie, msi->phys, AFI_MSI_AXI_BAR_ST); msi 1843 drivers/pci/controller/pci-tegra.c struct tegra_msi *msi = &pcie->msi; msi 1846 drivers/pci/controller/pci-tegra.c dma_free_attrs(pcie->dev, PAGE_SIZE, msi->virt, msi->phys, msi 1849 drivers/pci/controller/pci-tegra.c if (msi->irq > 0) msi 1850 drivers/pci/controller/pci-tegra.c free_irq(msi->irq, pcie); msi 1853 drivers/pci/controller/pci-tegra.c irq = irq_find_mapping(msi->domain, i); msi 1858 drivers/pci/controller/pci-tegra.c irq_domain_remove(msi->domain); msi 753 drivers/pci/controller/pci-v3-semi.c host->msi = NULL; msi 27 drivers/pci/controller/pci-xgene-msi.c struct xgene_msi *msi; msi 94 drivers/pci/controller/pci-xgene-msi.c static u32 xgene_msi_ir_read(struct xgene_msi *msi, msi 97 drivers/pci/controller/pci-xgene-msi.c return readl_relaxed(msi->msi_regs + MSI_IR0 + msi 102 drivers/pci/controller/pci-xgene-msi.c static u32 xgene_msi_int_read(struct xgene_msi *msi, u32 msi_grp) msi 104 drivers/pci/controller/pci-xgene-msi.c return readl_relaxed(msi->msi_regs + MSI_INT0 + (msi_grp << 16)); msi 143 drivers/pci/controller/pci-xgene-msi.c struct xgene_msi *msi = irq_data_get_irq_chip_data(data); msi 146 drivers/pci/controller/pci-xgene-msi.c u64 target_addr = msi->msi_addr + (((8 * group) + reg_set) << 16); msi 197 drivers/pci/controller/pci-xgene-msi.c struct xgene_msi *msi = domain->host_data; msi 200 drivers/pci/controller/pci-xgene-msi.c mutex_lock(&msi->bitmap_lock); msi 202 drivers/pci/controller/pci-xgene-msi.c msi_irq = bitmap_find_next_zero_area(msi->bitmap, NR_MSI_VEC, 0, msi 203 drivers/pci/controller/pci-xgene-msi.c msi->num_cpus, 0); msi 205 drivers/pci/controller/pci-xgene-msi.c bitmap_set(msi->bitmap, msi_irq, msi->num_cpus); msi 209 drivers/pci/controller/pci-xgene-msi.c mutex_unlock(&msi->bitmap_lock); msi 225 drivers/pci/controller/pci-xgene-msi.c struct xgene_msi *msi = irq_data_get_irq_chip_data(d); msi 228 drivers/pci/controller/pci-xgene-msi.c mutex_lock(&msi->bitmap_lock); msi 231 drivers/pci/controller/pci-xgene-msi.c bitmap_clear(msi->bitmap, hwirq, msi->num_cpus); msi 233 drivers/pci/controller/pci-xgene-msi.c mutex_unlock(&msi->bitmap_lock); msi 243 drivers/pci/controller/pci-xgene-msi.c static int xgene_allocate_domains(struct xgene_msi *msi) msi 245 drivers/pci/controller/pci-xgene-msi.c msi->inner_domain = irq_domain_add_linear(NULL, NR_MSI_VEC, msi 246 drivers/pci/controller/pci-xgene-msi.c &msi_domain_ops, msi); msi 247 drivers/pci/controller/pci-xgene-msi.c if (!msi->inner_domain) msi 250 drivers/pci/controller/pci-xgene-msi.c msi->msi_domain = pci_msi_create_irq_domain(of_node_to_fwnode(msi->node), msi 252 drivers/pci/controller/pci-xgene-msi.c msi->inner_domain); msi 254 drivers/pci/controller/pci-xgene-msi.c if (!msi->msi_domain) { msi 255 drivers/pci/controller/pci-xgene-msi.c irq_domain_remove(msi->inner_domain); msi 262 drivers/pci/controller/pci-xgene-msi.c static void xgene_free_domains(struct xgene_msi *msi) msi 264 drivers/pci/controller/pci-xgene-msi.c if (msi->msi_domain) msi 265 drivers/pci/controller/pci-xgene-msi.c irq_domain_remove(msi->msi_domain); msi 266 drivers/pci/controller/pci-xgene-msi.c if (msi->inner_domain) msi 267 drivers/pci/controller/pci-xgene-msi.c irq_domain_remove(msi->inner_domain); msi 301 drivers/pci/controller/pci-xgene-msi.c xgene_msi = msi_groups->msi; msi 358 drivers/pci/controller/pci-xgene-msi.c struct xgene_msi *msi = platform_get_drvdata(pdev); msi 364 drivers/pci/controller/pci-xgene-msi.c kfree(msi->msi_groups); msi 366 drivers/pci/controller/pci-xgene-msi.c kfree(msi->bitmap); msi 367 drivers/pci/controller/pci-xgene-msi.c msi->bitmap = NULL; msi 369 drivers/pci/controller/pci-xgene-msi.c xgene_free_domains(msi); msi 376 drivers/pci/controller/pci-xgene-msi.c struct xgene_msi *msi = &xgene_msi_ctrl; msi 382 drivers/pci/controller/pci-xgene-msi.c for (i = cpu; i < NR_HW_IRQS; i += msi->num_cpus) { msi 383 drivers/pci/controller/pci-xgene-msi.c msi_group = &msi->msi_groups[i]; msi 423 drivers/pci/controller/pci-xgene-msi.c struct xgene_msi *msi = &xgene_msi_ctrl; msi 427 drivers/pci/controller/pci-xgene-msi.c for (i = cpu; i < NR_HW_IRQS; i += msi->num_cpus) { msi 428 drivers/pci/controller/pci-xgene-msi.c msi_group = &msi->msi_groups[i]; msi 488 drivers/pci/controller/pci-xgene-msi.c xgene_msi->msi_groups[irq_index].msi = xgene_msi; msi 41 drivers/pci/controller/pcie-altera-msi.c static inline void msi_writel(struct altera_msi *msi, const u32 value, msi 44 drivers/pci/controller/pcie-altera-msi.c writel_relaxed(value, msi->csr_base + reg); msi 47 drivers/pci/controller/pcie-altera-msi.c static inline u32 msi_readl(struct altera_msi *msi, const u32 reg) msi 49 drivers/pci/controller/pcie-altera-msi.c return readl_relaxed(msi->csr_base + reg); msi 55 drivers/pci/controller/pcie-altera-msi.c struct altera_msi *msi; msi 61 drivers/pci/controller/pcie-altera-msi.c msi = irq_desc_get_handler_data(desc); msi 63 drivers/pci/controller/pcie-altera-msi.c while ((status = msi_readl(msi, MSI_STATUS)) != 0) { msi 64 drivers/pci/controller/pcie-altera-msi.c for_each_set_bit(bit, &status, msi->num_of_vectors) { msi 66 drivers/pci/controller/pcie-altera-msi.c readl_relaxed(msi->vector_base + (bit * sizeof(u32))); msi 68 drivers/pci/controller/pcie-altera-msi.c virq = irq_find_mapping(msi->inner_domain, bit); msi 72 drivers/pci/controller/pcie-altera-msi.c dev_err(&msi->pdev->dev, "unexpected MSI\n"); msi 93 drivers/pci/controller/pcie-altera-msi.c struct altera_msi *msi = irq_data_get_irq_chip_data(data); msi 94 drivers/pci/controller/pcie-altera-msi.c phys_addr_t addr = msi->vector_phy + (data->hwirq * sizeof(u32)); msi 100 drivers/pci/controller/pcie-altera-msi.c dev_dbg(&msi->pdev->dev, "msi#%d address_hi %#x address_lo %#x\n", msi 119 drivers/pci/controller/pcie-altera-msi.c struct altera_msi *msi = domain->host_data; msi 124 drivers/pci/controller/pcie-altera-msi.c mutex_lock(&msi->lock); msi 126 drivers/pci/controller/pcie-altera-msi.c bit = find_first_zero_bit(msi->used, msi->num_of_vectors); msi 127 drivers/pci/controller/pcie-altera-msi.c if (bit >= msi->num_of_vectors) { msi 128 drivers/pci/controller/pcie-altera-msi.c mutex_unlock(&msi->lock); msi 132 drivers/pci/controller/pcie-altera-msi.c set_bit(bit, msi->used); msi 134 drivers/pci/controller/pcie-altera-msi.c mutex_unlock(&msi->lock); msi 140 drivers/pci/controller/pcie-altera-msi.c mask = msi_readl(msi, MSI_INTMASK); msi 142 drivers/pci/controller/pcie-altera-msi.c msi_writel(msi, mask, MSI_INTMASK); msi 151 drivers/pci/controller/pcie-altera-msi.c struct altera_msi *msi = irq_data_get_irq_chip_data(d); msi 154 drivers/pci/controller/pcie-altera-msi.c mutex_lock(&msi->lock); msi 156 drivers/pci/controller/pcie-altera-msi.c if (!test_bit(d->hwirq, msi->used)) { msi 157 drivers/pci/controller/pcie-altera-msi.c dev_err(&msi->pdev->dev, "trying to free unused MSI#%lu\n", msi 160 drivers/pci/controller/pcie-altera-msi.c __clear_bit(d->hwirq, msi->used); msi 161 drivers/pci/controller/pcie-altera-msi.c mask = msi_readl(msi, MSI_INTMASK); msi 163 drivers/pci/controller/pcie-altera-msi.c msi_writel(msi, mask, MSI_INTMASK); msi 166 drivers/pci/controller/pcie-altera-msi.c mutex_unlock(&msi->lock); msi 174 drivers/pci/controller/pcie-altera-msi.c static int altera_allocate_domains(struct altera_msi *msi) msi 176 drivers/pci/controller/pcie-altera-msi.c struct fwnode_handle *fwnode = of_node_to_fwnode(msi->pdev->dev.of_node); msi 178 drivers/pci/controller/pcie-altera-msi.c msi->inner_domain = irq_domain_add_linear(NULL, msi->num_of_vectors, msi 179 drivers/pci/controller/pcie-altera-msi.c &msi_domain_ops, msi); msi 180 drivers/pci/controller/pcie-altera-msi.c if (!msi->inner_domain) { msi 181 drivers/pci/controller/pcie-altera-msi.c dev_err(&msi->pdev->dev, "failed to create IRQ domain\n"); msi 185 drivers/pci/controller/pcie-altera-msi.c msi->msi_domain = pci_msi_create_irq_domain(fwnode, msi 186 drivers/pci/controller/pcie-altera-msi.c &altera_msi_domain_info, msi->inner_domain); msi 187 drivers/pci/controller/pcie-altera-msi.c if (!msi->msi_domain) { msi 188 drivers/pci/controller/pcie-altera-msi.c dev_err(&msi->pdev->dev, "failed to create MSI domain\n"); msi 189 drivers/pci/controller/pcie-altera-msi.c irq_domain_remove(msi->inner_domain); msi 196 drivers/pci/controller/pcie-altera-msi.c static void altera_free_domains(struct altera_msi *msi) msi 198 drivers/pci/controller/pcie-altera-msi.c irq_domain_remove(msi->msi_domain); msi 199 drivers/pci/controller/pcie-altera-msi.c irq_domain_remove(msi->inner_domain); msi 204 drivers/pci/controller/pcie-altera-msi.c struct altera_msi *msi = platform_get_drvdata(pdev); msi 206 drivers/pci/controller/pcie-altera-msi.c msi_writel(msi, 0, MSI_INTMASK); msi 207 drivers/pci/controller/pcie-altera-msi.c irq_set_chained_handler(msi->irq, NULL); msi 208 drivers/pci/controller/pcie-altera-msi.c irq_set_handler_data(msi->irq, NULL); msi 210 drivers/pci/controller/pcie-altera-msi.c altera_free_domains(msi); msi 218 drivers/pci/controller/pcie-altera-msi.c struct altera_msi *msi; msi 223 drivers/pci/controller/pcie-altera-msi.c msi = devm_kzalloc(&pdev->dev, sizeof(struct altera_msi), msi 225 drivers/pci/controller/pcie-altera-msi.c if (!msi) msi 228 drivers/pci/controller/pcie-altera-msi.c mutex_init(&msi->lock); msi 229 drivers/pci/controller/pcie-altera-msi.c msi->pdev = pdev; msi 232 drivers/pci/controller/pcie-altera-msi.c msi->csr_base = devm_ioremap_resource(&pdev->dev, res); msi 233 drivers/pci/controller/pcie-altera-msi.c if (IS_ERR(msi->csr_base)) { msi 235 drivers/pci/controller/pcie-altera-msi.c return PTR_ERR(msi->csr_base); msi 240 drivers/pci/controller/pcie-altera-msi.c msi->vector_base = devm_ioremap_resource(&pdev->dev, res); msi 241 drivers/pci/controller/pcie-altera-msi.c if (IS_ERR(msi->vector_base)) { msi 243 drivers/pci/controller/pcie-altera-msi.c return PTR_ERR(msi->vector_base); msi 246 drivers/pci/controller/pcie-altera-msi.c msi->vector_phy = res->start; msi 248 drivers/pci/controller/pcie-altera-msi.c if (of_property_read_u32(np, "num-vectors", &msi->num_of_vectors)) { msi 253 drivers/pci/controller/pcie-altera-msi.c ret = altera_allocate_domains(msi); msi 257 drivers/pci/controller/pcie-altera-msi.c msi->irq = platform_get_irq(pdev, 0); msi 258 drivers/pci/controller/pcie-altera-msi.c if (msi->irq < 0) { msi 259 drivers/pci/controller/pcie-altera-msi.c dev_err(&pdev->dev, "failed to map IRQ: %d\n", msi->irq); msi 260 drivers/pci/controller/pcie-altera-msi.c ret = msi->irq; msi 264 drivers/pci/controller/pcie-altera-msi.c irq_set_chained_handler_and_data(msi->irq, altera_msi_isr, msi); msi 265 drivers/pci/controller/pcie-altera-msi.c platform_set_drvdata(pdev, msi); msi 62 drivers/pci/controller/pcie-iproc-msi.c struct iproc_msi *msi; msi 128 drivers/pci/controller/pcie-iproc-msi.c static inline u32 iproc_msi_read_reg(struct iproc_msi *msi, msi 132 drivers/pci/controller/pcie-iproc-msi.c struct iproc_pcie *pcie = msi->pcie; msi 134 drivers/pci/controller/pcie-iproc-msi.c return readl_relaxed(pcie->base + msi->reg_offsets[eq][reg]); msi 137 drivers/pci/controller/pcie-iproc-msi.c static inline void iproc_msi_write_reg(struct iproc_msi *msi, msi 141 drivers/pci/controller/pcie-iproc-msi.c struct iproc_pcie *pcie = msi->pcie; msi 143 drivers/pci/controller/pcie-iproc-msi.c writel_relaxed(val, pcie->base + msi->reg_offsets[eq][reg]); msi 146 drivers/pci/controller/pcie-iproc-msi.c static inline u32 hwirq_to_group(struct iproc_msi *msi, unsigned long hwirq) msi 148 drivers/pci/controller/pcie-iproc-msi.c return (hwirq % msi->nr_irqs); msi 151 drivers/pci/controller/pcie-iproc-msi.c static inline unsigned int iproc_msi_addr_offset(struct iproc_msi *msi, msi 154 drivers/pci/controller/pcie-iproc-msi.c if (msi->nr_msi_region > 1) msi 155 drivers/pci/controller/pcie-iproc-msi.c return hwirq_to_group(msi, hwirq) * MSI_MEM_REGION_SIZE; msi 157 drivers/pci/controller/pcie-iproc-msi.c return hwirq_to_group(msi, hwirq) * sizeof(u32); msi 160 drivers/pci/controller/pcie-iproc-msi.c static inline unsigned int iproc_msi_eq_offset(struct iproc_msi *msi, u32 eq) msi 162 drivers/pci/controller/pcie-iproc-msi.c if (msi->nr_eq_region > 1) msi 195 drivers/pci/controller/pcie-iproc-msi.c static inline int hwirq_to_cpu(struct iproc_msi *msi, unsigned long hwirq) msi 197 drivers/pci/controller/pcie-iproc-msi.c return (hwirq % msi->nr_cpus); msi 200 drivers/pci/controller/pcie-iproc-msi.c static inline unsigned long hwirq_to_canonical_hwirq(struct iproc_msi *msi, msi 203 drivers/pci/controller/pcie-iproc-msi.c return (hwirq - hwirq_to_cpu(msi, hwirq)); msi 209 drivers/pci/controller/pcie-iproc-msi.c struct iproc_msi *msi = irq_data_get_irq_chip_data(data); msi 213 drivers/pci/controller/pcie-iproc-msi.c curr_cpu = hwirq_to_cpu(msi, data->hwirq); msi 218 drivers/pci/controller/pcie-iproc-msi.c data->hwirq = hwirq_to_canonical_hwirq(msi, data->hwirq) + target_cpu; msi 226 drivers/pci/controller/pcie-iproc-msi.c struct iproc_msi *msi = irq_data_get_irq_chip_data(data); msi 229 drivers/pci/controller/pcie-iproc-msi.c addr = msi->msi_addr + iproc_msi_addr_offset(msi, data->hwirq); msi 245 drivers/pci/controller/pcie-iproc-msi.c struct iproc_msi *msi = domain->host_data; msi 248 drivers/pci/controller/pcie-iproc-msi.c mutex_lock(&msi->bitmap_lock); msi 251 drivers/pci/controller/pcie-iproc-msi.c hwirq = bitmap_find_next_zero_area(msi->bitmap, msi->nr_msi_vecs, 0, msi 252 drivers/pci/controller/pcie-iproc-msi.c msi->nr_cpus, 0); msi 253 drivers/pci/controller/pcie-iproc-msi.c if (hwirq < msi->nr_msi_vecs) { msi 254 drivers/pci/controller/pcie-iproc-msi.c bitmap_set(msi->bitmap, hwirq, msi->nr_cpus); msi 256 drivers/pci/controller/pcie-iproc-msi.c mutex_unlock(&msi->bitmap_lock); msi 260 drivers/pci/controller/pcie-iproc-msi.c mutex_unlock(&msi->bitmap_lock); msi 276 drivers/pci/controller/pcie-iproc-msi.c struct iproc_msi *msi = irq_data_get_irq_chip_data(data); msi 279 drivers/pci/controller/pcie-iproc-msi.c mutex_lock(&msi->bitmap_lock); msi 281 drivers/pci/controller/pcie-iproc-msi.c hwirq = hwirq_to_canonical_hwirq(msi, data->hwirq); msi 282 drivers/pci/controller/pcie-iproc-msi.c bitmap_clear(msi->bitmap, hwirq, msi->nr_cpus); msi 284 drivers/pci/controller/pcie-iproc-msi.c mutex_unlock(&msi->bitmap_lock); msi 294 drivers/pci/controller/pcie-iproc-msi.c static inline u32 decode_msi_hwirq(struct iproc_msi *msi, u32 eq, u32 head) msi 299 drivers/pci/controller/pcie-iproc-msi.c offs = iproc_msi_eq_offset(msi, eq) + head * sizeof(u32); msi 300 drivers/pci/controller/pcie-iproc-msi.c msg = (u32 *)(msi->eq_cpu + offs); msi 309 drivers/pci/controller/pcie-iproc-msi.c return hwirq_to_canonical_hwirq(msi, hwirq); msi 316 drivers/pci/controller/pcie-iproc-msi.c struct iproc_msi *msi; msi 324 drivers/pci/controller/pcie-iproc-msi.c msi = grp->msi; msi 337 drivers/pci/controller/pcie-iproc-msi.c head = iproc_msi_read_reg(msi, IPROC_MSI_EQ_HEAD, msi 340 drivers/pci/controller/pcie-iproc-msi.c tail = iproc_msi_read_reg(msi, IPROC_MSI_EQ_TAIL, msi 354 drivers/pci/controller/pcie-iproc-msi.c hwirq = decode_msi_hwirq(msi, eq, head); msi 355 drivers/pci/controller/pcie-iproc-msi.c virq = irq_find_mapping(msi->inner_domain, hwirq); msi 366 drivers/pci/controller/pcie-iproc-msi.c iproc_msi_write_reg(msi, IPROC_MSI_EQ_HEAD, eq, head); msi 377 drivers/pci/controller/pcie-iproc-msi.c static void iproc_msi_enable(struct iproc_msi *msi) msi 383 drivers/pci/controller/pcie-iproc-msi.c for (i = 0; i < msi->nr_eq_region; i++) { msi 384 drivers/pci/controller/pcie-iproc-msi.c dma_addr_t addr = msi->eq_dma + (i * EQ_MEM_REGION_SIZE); msi 386 drivers/pci/controller/pcie-iproc-msi.c iproc_msi_write_reg(msi, IPROC_MSI_EQ_PAGE, i, msi 388 drivers/pci/controller/pcie-iproc-msi.c iproc_msi_write_reg(msi, IPROC_MSI_EQ_PAGE_UPPER, i, msi 393 drivers/pci/controller/pcie-iproc-msi.c for (i = 0; i < msi->nr_msi_region; i++) { msi 394 drivers/pci/controller/pcie-iproc-msi.c phys_addr_t addr = msi->msi_addr + (i * MSI_MEM_REGION_SIZE); msi 396 drivers/pci/controller/pcie-iproc-msi.c iproc_msi_write_reg(msi, IPROC_MSI_PAGE, i, msi 398 drivers/pci/controller/pcie-iproc-msi.c iproc_msi_write_reg(msi, IPROC_MSI_PAGE_UPPER, i, msi 402 drivers/pci/controller/pcie-iproc-msi.c for (eq = 0; eq < msi->nr_irqs; eq++) { msi 406 drivers/pci/controller/pcie-iproc-msi.c iproc_msi_write_reg(msi, IPROC_MSI_CTRL, eq, val); msi 412 drivers/pci/controller/pcie-iproc-msi.c if (msi->has_inten_reg) { msi 413 drivers/pci/controller/pcie-iproc-msi.c val = iproc_msi_read_reg(msi, IPROC_MSI_INTS_EN, eq); msi 415 drivers/pci/controller/pcie-iproc-msi.c iproc_msi_write_reg(msi, IPROC_MSI_INTS_EN, eq, val); msi 420 drivers/pci/controller/pcie-iproc-msi.c static void iproc_msi_disable(struct iproc_msi *msi) msi 424 drivers/pci/controller/pcie-iproc-msi.c for (eq = 0; eq < msi->nr_irqs; eq++) { msi 425 drivers/pci/controller/pcie-iproc-msi.c if (msi->has_inten_reg) { msi 426 drivers/pci/controller/pcie-iproc-msi.c val = iproc_msi_read_reg(msi, IPROC_MSI_INTS_EN, eq); msi 428 drivers/pci/controller/pcie-iproc-msi.c iproc_msi_write_reg(msi, IPROC_MSI_INTS_EN, eq, val); msi 431 drivers/pci/controller/pcie-iproc-msi.c val = iproc_msi_read_reg(msi, IPROC_MSI_CTRL, eq); msi 434 drivers/pci/controller/pcie-iproc-msi.c iproc_msi_write_reg(msi, IPROC_MSI_CTRL, eq, val); msi 439 drivers/pci/controller/pcie-iproc-msi.c struct iproc_msi *msi) msi 441 drivers/pci/controller/pcie-iproc-msi.c msi->inner_domain = irq_domain_add_linear(NULL, msi->nr_msi_vecs, msi 442 drivers/pci/controller/pcie-iproc-msi.c &msi_domain_ops, msi); msi 443 drivers/pci/controller/pcie-iproc-msi.c if (!msi->inner_domain) msi 446 drivers/pci/controller/pcie-iproc-msi.c msi->msi_domain = pci_msi_create_irq_domain(of_node_to_fwnode(node), msi 448 drivers/pci/controller/pcie-iproc-msi.c msi->inner_domain); msi 449 drivers/pci/controller/pcie-iproc-msi.c if (!msi->msi_domain) { msi 450 drivers/pci/controller/pcie-iproc-msi.c irq_domain_remove(msi->inner_domain); msi 457 drivers/pci/controller/pcie-iproc-msi.c static void iproc_msi_free_domains(struct iproc_msi *msi) msi 459 drivers/pci/controller/pcie-iproc-msi.c if (msi->msi_domain) msi 460 drivers/pci/controller/pcie-iproc-msi.c irq_domain_remove(msi->msi_domain); msi 462 drivers/pci/controller/pcie-iproc-msi.c if (msi->inner_domain) msi 463 drivers/pci/controller/pcie-iproc-msi.c irq_domain_remove(msi->inner_domain); msi 466 drivers/pci/controller/pcie-iproc-msi.c static void iproc_msi_irq_free(struct iproc_msi *msi, unsigned int cpu) msi 470 drivers/pci/controller/pcie-iproc-msi.c for (i = cpu; i < msi->nr_irqs; i += msi->nr_cpus) { msi 471 drivers/pci/controller/pcie-iproc-msi.c irq_set_chained_handler_and_data(msi->grps[i].gic_irq, msi 476 drivers/pci/controller/pcie-iproc-msi.c static int iproc_msi_irq_setup(struct iproc_msi *msi, unsigned int cpu) msi 480 drivers/pci/controller/pcie-iproc-msi.c struct iproc_pcie *pcie = msi->pcie; msi 482 drivers/pci/controller/pcie-iproc-msi.c for (i = cpu; i < msi->nr_irqs; i += msi->nr_cpus) { msi 483 drivers/pci/controller/pcie-iproc-msi.c irq_set_chained_handler_and_data(msi->grps[i].gic_irq, msi 485 drivers/pci/controller/pcie-iproc-msi.c &msi->grps[i]); msi 490 drivers/pci/controller/pcie-iproc-msi.c ret = irq_set_affinity(msi->grps[i].gic_irq, mask); msi 494 drivers/pci/controller/pcie-iproc-msi.c msi->grps[i].gic_irq); msi 503 drivers/pci/controller/pcie-iproc-msi.c iproc_msi_irq_free(msi, cpu); msi 513 drivers/pci/controller/pcie-iproc-msi.c struct iproc_msi *msi; msi 523 drivers/pci/controller/pcie-iproc-msi.c if (pcie->msi) msi 526 drivers/pci/controller/pcie-iproc-msi.c msi = devm_kzalloc(pcie->dev, sizeof(*msi), GFP_KERNEL); msi 527 drivers/pci/controller/pcie-iproc-msi.c if (!msi) msi 530 drivers/pci/controller/pcie-iproc-msi.c msi->pcie = pcie; msi 531 drivers/pci/controller/pcie-iproc-msi.c pcie->msi = msi; msi 532 drivers/pci/controller/pcie-iproc-msi.c msi->msi_addr = pcie->base_addr; msi 533 drivers/pci/controller/pcie-iproc-msi.c mutex_init(&msi->bitmap_lock); msi 534 drivers/pci/controller/pcie-iproc-msi.c msi->nr_cpus = num_possible_cpus(); msi 536 drivers/pci/controller/pcie-iproc-msi.c msi->nr_irqs = of_irq_count(node); msi 537 drivers/pci/controller/pcie-iproc-msi.c if (!msi->nr_irqs) { msi 542 drivers/pci/controller/pcie-iproc-msi.c if (msi->nr_irqs > NR_HW_IRQS) { msi 544 drivers/pci/controller/pcie-iproc-msi.c msi->nr_irqs); msi 545 drivers/pci/controller/pcie-iproc-msi.c msi->nr_irqs = NR_HW_IRQS; msi 548 drivers/pci/controller/pcie-iproc-msi.c if (msi->nr_irqs < msi->nr_cpus) { msi 554 drivers/pci/controller/pcie-iproc-msi.c if (msi->nr_irqs % msi->nr_cpus != 0) { msi 555 drivers/pci/controller/pcie-iproc-msi.c msi->nr_irqs -= msi->nr_irqs % msi->nr_cpus; msi 557 drivers/pci/controller/pcie-iproc-msi.c msi->nr_irqs); msi 563 drivers/pci/controller/pcie-iproc-msi.c msi->reg_offsets = iproc_msi_reg_paxb; msi 564 drivers/pci/controller/pcie-iproc-msi.c msi->nr_eq_region = 1; msi 565 drivers/pci/controller/pcie-iproc-msi.c msi->nr_msi_region = 1; msi 568 drivers/pci/controller/pcie-iproc-msi.c msi->reg_offsets = iproc_msi_reg_paxc; msi 569 drivers/pci/controller/pcie-iproc-msi.c msi->nr_eq_region = msi->nr_irqs; msi 570 drivers/pci/controller/pcie-iproc-msi.c msi->nr_msi_region = msi->nr_irqs; msi 578 drivers/pci/controller/pcie-iproc-msi.c msi->has_inten_reg = true; msi 580 drivers/pci/controller/pcie-iproc-msi.c msi->nr_msi_vecs = msi->nr_irqs * EQ_LEN; msi 581 drivers/pci/controller/pcie-iproc-msi.c msi->bitmap = devm_kcalloc(pcie->dev, BITS_TO_LONGS(msi->nr_msi_vecs), msi 582 drivers/pci/controller/pcie-iproc-msi.c sizeof(*msi->bitmap), GFP_KERNEL); msi 583 drivers/pci/controller/pcie-iproc-msi.c if (!msi->bitmap) msi 586 drivers/pci/controller/pcie-iproc-msi.c msi->grps = devm_kcalloc(pcie->dev, msi->nr_irqs, sizeof(*msi->grps), msi 588 drivers/pci/controller/pcie-iproc-msi.c if (!msi->grps) msi 591 drivers/pci/controller/pcie-iproc-msi.c for (i = 0; i < msi->nr_irqs; i++) { msi 599 drivers/pci/controller/pcie-iproc-msi.c msi->grps[i].gic_irq = irq; msi 600 drivers/pci/controller/pcie-iproc-msi.c msi->grps[i].msi = msi; msi 601 drivers/pci/controller/pcie-iproc-msi.c msi->grps[i].eq = i; msi 605 drivers/pci/controller/pcie-iproc-msi.c msi->eq_cpu = dma_alloc_coherent(pcie->dev, msi 606 drivers/pci/controller/pcie-iproc-msi.c msi->nr_eq_region * EQ_MEM_REGION_SIZE, msi 607 drivers/pci/controller/pcie-iproc-msi.c &msi->eq_dma, GFP_KERNEL); msi 608 drivers/pci/controller/pcie-iproc-msi.c if (!msi->eq_cpu) { msi 613 drivers/pci/controller/pcie-iproc-msi.c ret = iproc_msi_alloc_domains(node, msi); msi 620 drivers/pci/controller/pcie-iproc-msi.c ret = iproc_msi_irq_setup(msi, cpu); msi 625 drivers/pci/controller/pcie-iproc-msi.c iproc_msi_enable(msi); msi 631 drivers/pci/controller/pcie-iproc-msi.c iproc_msi_irq_free(msi, cpu); msi 632 drivers/pci/controller/pcie-iproc-msi.c iproc_msi_free_domains(msi); msi 635 drivers/pci/controller/pcie-iproc-msi.c dma_free_coherent(pcie->dev, msi->nr_eq_region * EQ_MEM_REGION_SIZE, msi 636 drivers/pci/controller/pcie-iproc-msi.c msi->eq_cpu, msi->eq_dma); msi 639 drivers/pci/controller/pcie-iproc-msi.c for (i = 0; i < msi->nr_irqs; i++) { msi 640 drivers/pci/controller/pcie-iproc-msi.c if (msi->grps[i].gic_irq) msi 641 drivers/pci/controller/pcie-iproc-msi.c irq_dispose_mapping(msi->grps[i].gic_irq); msi 643 drivers/pci/controller/pcie-iproc-msi.c pcie->msi = NULL; msi 650 drivers/pci/controller/pcie-iproc-msi.c struct iproc_msi *msi = pcie->msi; msi 653 drivers/pci/controller/pcie-iproc-msi.c if (!msi) msi 656 drivers/pci/controller/pcie-iproc-msi.c iproc_msi_disable(msi); msi 659 drivers/pci/controller/pcie-iproc-msi.c iproc_msi_irq_free(msi, cpu); msi 661 drivers/pci/controller/pcie-iproc-msi.c iproc_msi_free_domains(msi); msi 663 drivers/pci/controller/pcie-iproc-msi.c dma_free_coherent(pcie->dev, msi->nr_eq_region * EQ_MEM_REGION_SIZE, msi 664 drivers/pci/controller/pcie-iproc-msi.c msi->eq_cpu, msi->eq_dma); msi 666 drivers/pci/controller/pcie-iproc-msi.c for (i = 0; i < msi->nr_irqs; i++) { msi 667 drivers/pci/controller/pcie-iproc-msi.c if (msi->grps[i].gic_irq) msi 668 drivers/pci/controller/pcie-iproc-msi.c irq_dispose_mapping(msi->grps[i].gic_irq); msi 106 drivers/pci/controller/pcie-iproc.h struct iproc_msi *msi; msi 157 drivers/pci/controller/pcie-mobiveil.c struct mobiveil_msi msi; msi 343 drivers/pci/controller/pcie-mobiveil.c struct mobiveil_msi *msi = &pcie->msi; msi 410 drivers/pci/controller/pcie-mobiveil.c virq = irq_find_mapping(msi->dev_domain, msi_data); msi 568 drivers/pci/controller/pcie-mobiveil.c struct mobiveil_msi *msi = &pcie->msi; msi 570 drivers/pci/controller/pcie-mobiveil.c pcie->msi.num_of_vectors = PCI_NUM_MSI; msi 571 drivers/pci/controller/pcie-mobiveil.c msi->msi_pages_phys = (phys_addr_t)msg_addr; msi 763 drivers/pci/controller/pcie-mobiveil.c struct mobiveil_msi *msi = &pcie->msi; msi 767 drivers/pci/controller/pcie-mobiveil.c mutex_lock(&msi->lock); msi 769 drivers/pci/controller/pcie-mobiveil.c bit = find_first_zero_bit(msi->msi_irq_in_use, msi->num_of_vectors); msi 770 drivers/pci/controller/pcie-mobiveil.c if (bit >= msi->num_of_vectors) { msi 771 drivers/pci/controller/pcie-mobiveil.c mutex_unlock(&msi->lock); msi 775 drivers/pci/controller/pcie-mobiveil.c set_bit(bit, msi->msi_irq_in_use); msi 777 drivers/pci/controller/pcie-mobiveil.c mutex_unlock(&msi->lock); msi 790 drivers/pci/controller/pcie-mobiveil.c struct mobiveil_msi *msi = &pcie->msi; msi 792 drivers/pci/controller/pcie-mobiveil.c mutex_lock(&msi->lock); msi 794 drivers/pci/controller/pcie-mobiveil.c if (!test_bit(d->hwirq, msi->msi_irq_in_use)) msi 798 drivers/pci/controller/pcie-mobiveil.c __clear_bit(d->hwirq, msi->msi_irq_in_use); msi 800 drivers/pci/controller/pcie-mobiveil.c mutex_unlock(&msi->lock); msi 811 drivers/pci/controller/pcie-mobiveil.c struct mobiveil_msi *msi = &pcie->msi; msi 813 drivers/pci/controller/pcie-mobiveil.c mutex_init(&pcie->msi.lock); msi 814 drivers/pci/controller/pcie-mobiveil.c msi->dev_domain = irq_domain_add_linear(NULL, msi->num_of_vectors, msi 816 drivers/pci/controller/pcie-mobiveil.c if (!msi->dev_domain) { msi 821 drivers/pci/controller/pcie-mobiveil.c msi->msi_domain = pci_msi_create_irq_domain(fwnode, msi 823 drivers/pci/controller/pcie-mobiveil.c msi->dev_domain); msi 824 drivers/pci/controller/pcie-mobiveil.c if (!msi->msi_domain) { msi 826 drivers/pci/controller/pcie-mobiveil.c irq_domain_remove(msi->dev_domain); msi 157 drivers/pci/controller/pcie-rcar.c struct rcar_msi msi; msi 478 drivers/pci/controller/pcie-rcar.c bridge->msi = &pcie->msi.chip; msi 699 drivers/pci/controller/pcie-rcar.c int msi; msi 703 drivers/pci/controller/pcie-rcar.c msi = find_first_zero_bit(chip->used, INT_PCI_MSI_NR); msi 704 drivers/pci/controller/pcie-rcar.c if (msi < INT_PCI_MSI_NR) msi 705 drivers/pci/controller/pcie-rcar.c set_bit(msi, chip->used); msi 707 drivers/pci/controller/pcie-rcar.c msi = -ENOSPC; msi 711 drivers/pci/controller/pcie-rcar.c return msi; msi 716 drivers/pci/controller/pcie-rcar.c int msi; msi 719 drivers/pci/controller/pcie-rcar.c msi = bitmap_find_free_region(chip->used, INT_PCI_MSI_NR, msi 723 drivers/pci/controller/pcie-rcar.c return msi; msi 736 drivers/pci/controller/pcie-rcar.c struct rcar_msi *msi = &pcie->msi; msi 753 drivers/pci/controller/pcie-rcar.c msi_irq = irq_find_mapping(msi->domain, index); msi 755 drivers/pci/controller/pcie-rcar.c if (test_bit(index, msi->used)) msi 774 drivers/pci/controller/pcie-rcar.c struct rcar_msi *msi = to_rcar_msi(chip); msi 775 drivers/pci/controller/pcie-rcar.c struct rcar_pcie *pcie = container_of(chip, struct rcar_pcie, msi.chip); msi 780 drivers/pci/controller/pcie-rcar.c hwirq = rcar_msi_alloc(msi); msi 784 drivers/pci/controller/pcie-rcar.c irq = irq_find_mapping(msi->domain, hwirq); msi 786 drivers/pci/controller/pcie-rcar.c rcar_msi_free(msi, hwirq); msi 804 drivers/pci/controller/pcie-rcar.c struct rcar_pcie *pcie = container_of(chip, struct rcar_pcie, msi.chip); msi 805 drivers/pci/controller/pcie-rcar.c struct rcar_msi *msi = to_rcar_msi(chip); msi 819 drivers/pci/controller/pcie-rcar.c hwirq = rcar_msi_alloc_region(msi, nvec); msi 823 drivers/pci/controller/pcie-rcar.c irq = irq_find_mapping(msi->domain, hwirq); msi 855 drivers/pci/controller/pcie-rcar.c struct rcar_msi *msi = to_rcar_msi(chip); msi 858 drivers/pci/controller/pcie-rcar.c rcar_msi_free(msi, d->hwirq); msi 884 drivers/pci/controller/pcie-rcar.c struct rcar_msi *msi = &pcie->msi; msi 888 drivers/pci/controller/pcie-rcar.c irq = irq_find_mapping(msi->domain, i); msi 893 drivers/pci/controller/pcie-rcar.c irq_domain_remove(msi->domain); msi 899 drivers/pci/controller/pcie-rcar.c struct rcar_msi *msi = &pcie->msi; msi 903 drivers/pci/controller/pcie-rcar.c mutex_init(&msi->lock); msi 905 drivers/pci/controller/pcie-rcar.c msi->chip.dev = dev; msi 906 drivers/pci/controller/pcie-rcar.c msi->chip.setup_irq = rcar_msi_setup_irq; msi 907 drivers/pci/controller/pcie-rcar.c msi->chip.setup_irqs = rcar_msi_setup_irqs; msi 908 drivers/pci/controller/pcie-rcar.c msi->chip.teardown_irq = rcar_msi_teardown_irq; msi 910 drivers/pci/controller/pcie-rcar.c msi->domain = irq_domain_add_linear(dev->of_node, INT_PCI_MSI_NR, msi 911 drivers/pci/controller/pcie-rcar.c &msi_domain_ops, &msi->chip); msi 912 drivers/pci/controller/pcie-rcar.c if (!msi->domain) { msi 918 drivers/pci/controller/pcie-rcar.c irq_create_mapping(msi->domain, i); msi 921 drivers/pci/controller/pcie-rcar.c err = devm_request_irq(dev, msi->irq1, rcar_pcie_msi_irq, msi 929 drivers/pci/controller/pcie-rcar.c err = devm_request_irq(dev, msi->irq2, rcar_pcie_msi_irq, msi 938 drivers/pci/controller/pcie-rcar.c msi->pages = __get_free_pages(GFP_KERNEL, 0); msi 939 drivers/pci/controller/pcie-rcar.c if (!msi->pages) { msi 943 drivers/pci/controller/pcie-rcar.c base = virt_to_phys((void *)msi->pages); msi 960 drivers/pci/controller/pcie-rcar.c struct rcar_msi *msi = &pcie->msi; msi 968 drivers/pci/controller/pcie-rcar.c free_pages(msi->pages, 0); msi 1003 drivers/pci/controller/pcie-rcar.c pcie->msi.irq1 = i; msi 1011 drivers/pci/controller/pcie-rcar.c pcie->msi.irq2 = i; msi 1016 drivers/pci/controller/pcie-rcar.c irq_dispose_mapping(pcie->msi.irq1); msi 1220 drivers/pci/controller/pcie-rcar.c irq_dispose_mapping(pcie->msi.irq2); msi 1221 drivers/pci/controller/pcie-rcar.c irq_dispose_mapping(pcie->msi.irq1); msi 170 drivers/pci/controller/pcie-xilinx-nwl.c struct nwl_msi msi; msi 346 drivers/pci/controller/pcie-xilinx-nwl.c struct nwl_msi *msi; msi 351 drivers/pci/controller/pcie-xilinx-nwl.c msi = &pcie->msi; msi 356 drivers/pci/controller/pcie-xilinx-nwl.c virq = irq_find_mapping(msi->dev_domain, bit); msi 480 drivers/pci/controller/pcie-xilinx-nwl.c struct nwl_msi *msi = &pcie->msi; msi 484 drivers/pci/controller/pcie-xilinx-nwl.c mutex_lock(&msi->lock); msi 485 drivers/pci/controller/pcie-xilinx-nwl.c bit = bitmap_find_free_region(msi->bitmap, INT_PCI_MSI_NR, msi 488 drivers/pci/controller/pcie-xilinx-nwl.c mutex_unlock(&msi->lock); msi 497 drivers/pci/controller/pcie-xilinx-nwl.c mutex_unlock(&msi->lock); msi 506 drivers/pci/controller/pcie-xilinx-nwl.c struct nwl_msi *msi = &pcie->msi; msi 508 drivers/pci/controller/pcie-xilinx-nwl.c mutex_lock(&msi->lock); msi 509 drivers/pci/controller/pcie-xilinx-nwl.c bitmap_release_region(msi->bitmap, data->hwirq, msi 511 drivers/pci/controller/pcie-xilinx-nwl.c mutex_unlock(&msi->lock); msi 524 drivers/pci/controller/pcie-xilinx-nwl.c struct nwl_msi *msi = &pcie->msi; msi 526 drivers/pci/controller/pcie-xilinx-nwl.c msi->dev_domain = irq_domain_add_linear(NULL, INT_PCI_MSI_NR, msi 528 drivers/pci/controller/pcie-xilinx-nwl.c if (!msi->dev_domain) { msi 532 drivers/pci/controller/pcie-xilinx-nwl.c msi->msi_domain = pci_msi_create_irq_domain(fwnode, msi 534 drivers/pci/controller/pcie-xilinx-nwl.c msi->dev_domain); msi 535 drivers/pci/controller/pcie-xilinx-nwl.c if (!msi->msi_domain) { msi 537 drivers/pci/controller/pcie-xilinx-nwl.c irq_domain_remove(msi->dev_domain); msi 575 drivers/pci/controller/pcie-xilinx-nwl.c struct nwl_msi *msi = &pcie->msi; msi 580 drivers/pci/controller/pcie-xilinx-nwl.c mutex_init(&msi->lock); msi 582 drivers/pci/controller/pcie-xilinx-nwl.c msi->bitmap = kzalloc(size, GFP_KERNEL); msi 583 drivers/pci/controller/pcie-xilinx-nwl.c if (!msi->bitmap) msi 587 drivers/pci/controller/pcie-xilinx-nwl.c msi->irq_msi1 = platform_get_irq_byname(pdev, "msi1"); msi 588 drivers/pci/controller/pcie-xilinx-nwl.c if (msi->irq_msi1 < 0) { msi 589 drivers/pci/controller/pcie-xilinx-nwl.c dev_err(dev, "failed to get IRQ#%d\n", msi->irq_msi1); msi 594 drivers/pci/controller/pcie-xilinx-nwl.c irq_set_chained_handler_and_data(msi->irq_msi1, msi 598 drivers/pci/controller/pcie-xilinx-nwl.c msi->irq_msi0 = platform_get_irq_byname(pdev, "msi0"); msi 599 drivers/pci/controller/pcie-xilinx-nwl.c if (msi->irq_msi0 < 0) { msi 600 drivers/pci/controller/pcie-xilinx-nwl.c dev_err(dev, "failed to get IRQ#%d\n", msi->irq_msi0); msi 605 drivers/pci/controller/pcie-xilinx-nwl.c irq_set_chained_handler_and_data(msi->irq_msi0, msi 653 drivers/pci/controller/pcie-xilinx-nwl.c kfree(msi->bitmap); msi 654 drivers/pci/controller/pcie-xilinx-nwl.c msi->bitmap = NULL; msi 215 drivers/pci/controller/pcie-xilinx.c struct msi_desc *msi; msi 221 drivers/pci/controller/pcie-xilinx.c msi = irq_get_msi_desc(irq); msi 222 drivers/pci/controller/pcie-xilinx.c port = msi_desc_to_pci_sysdata(msi); msi 672 drivers/pci/controller/pcie-xilinx.c bridge->msi = &xilinx_pcie_msi_chip; msi 65 drivers/pci/msi.c struct msi_controller *chip = dev->bus->msi; msi 92 drivers/pci/msi.c struct msi_controller *chip = dev->bus->msi; msi 847 drivers/pci/probe.c bus->msi = bridge->msi; msi 995 drivers/pci/probe.c child->msi = parent->msi; msi 269 drivers/scsi/aacraid/aachba.c module_param_named(msi, aac_msi, int, S_IRUGO|S_IWUSR); msi 270 drivers/scsi/aacraid/aachba.c MODULE_PARM_DESC(msi, "IRQ handling." msi 1655 drivers/scsi/aacraid/aacraid.h u8 msi; msi 2563 drivers/scsi/aacraid/commsup.c if (dev->msi) msi 2588 drivers/scsi/aacraid/commsup.c if (dev->msi) msi 1276 drivers/scsi/aacraid/linit.c if (dev->msi) msi 1575 drivers/scsi/aacraid/linit.c if (aac->msi) msi 637 drivers/scsi/aacraid/rx.c dev->msi = aac_msi && !pci_enable_msi(dev->pdev); msi 640 drivers/scsi/aacraid/rx.c if (dev->msi) msi 970 drivers/scsi/aacraid/src.c dev->msi = !pci_enable_msi(dev->pdev); msi 978 drivers/scsi/aacraid/src.c if (dev->msi) msi 500 drivers/scsi/cxlflash/main.c cmd->rcb.msi = SISL_MSI_RRQ_UPDATED; msi 628 drivers/scsi/cxlflash/main.c cmd->rcb.msi = SISL_MSI_RRQ_UPDATED; msi 2408 drivers/scsi/cxlflash/main.c rcb.msi = SISL_MSI_RRQ_UPDATED; msi 3365 drivers/scsi/cxlflash/main.c rcb.msi = SISL_MSI_RRQ_UPDATED; msi 3450 drivers/scsi/cxlflash/main.c rcb.msi = SISL_MSI_RRQ_UPDATED; msi 61 drivers/scsi/cxlflash/sislite.h u8 msi; /* LISN to send on RRQ write */ msi 367 drivers/scsi/stex.c static int msi; msi 368 drivers/scsi/stex.c module_param(msi, int, 0); msi 369 drivers/scsi/stex.c MODULE_PARM_DESC(msi, "Enable Message Signaled Interrupts(0=off, 1=on)"); msi 1621 drivers/scsi/stex.c if (msi || hba->cardtype == st_P3) { msi 3335 drivers/staging/qlge/qlge_main.c goto msi; msi 3358 drivers/staging/qlge/qlge_main.c msi: msi 381 include/kvm/arm_vgic.h int kvm_send_userspace_msi(struct kvm *kvm, struct kvm_msi *msi); msi 395 include/linux/kvm_host.h } msi; msi 1154 include/linux/kvm_host.h int kvm_send_userspace_msi(struct kvm *kvm, struct kvm_msi *msi); msi 433 include/linux/ntb.h struct ntb_msi *msi; msi 504 include/linux/pci.h struct msi_controller *msi; msi 583 include/linux/pci.h struct msi_controller *msi; /* MSI controller */ msi 1047 include/uapi/linux/kvm.h struct kvm_irq_routing_msi msi; msi 854 sound/pci/hda/hda_controller.c if (chip->msi) { msi 139 sound/pci/hda/hda_controller.h unsigned int msi:1; msi 784 sound/pci/hda/hda_intel.c chip->msi ? 0 : IRQF_SHARED, msi 794 sound/pci/hda/hda_intel.c pci_intx(chip->pci, !chip->msi); msi 1034 sound/pci/hda/hda_intel.c if (chip->msi) msi 1050 sound/pci/hda/hda_intel.c if (chip->msi) msi 1052 sound/pci/hda/hda_intel.c chip->msi = 0; msi 1370 sound/pci/hda/hda_intel.c if (chip->msi) msi 1614 sound/pci/hda/hda_intel.c chip->msi = !!enable_msi; msi 1617 sound/pci/hda/hda_intel.c chip->msi = 1; /* enable MSI as default */ msi 1623 sound/pci/hda/hda_intel.c chip->msi = q->value; msi 1630 sound/pci/hda/hda_intel.c chip->msi = 0; msi 1842 sound/pci/hda/hda_intel.c if (chip->msi) { msi 1848 sound/pci/hda/hda_intel.c chip->msi = 0; msi 2003 sound/pci/hda/hda_intel.c chip->msi = 0; msi 1047 tools/include/uapi/linux/kvm.h struct kvm_irq_routing_msi msi; msi 55 virt/kvm/arm/vgic/vgic-irqfd.c e->msi.address_lo = ue->u.msi.address_lo; msi 56 virt/kvm/arm/vgic/vgic-irqfd.c e->msi.address_hi = ue->u.msi.address_hi; msi 57 virt/kvm/arm/vgic/vgic-irqfd.c e->msi.data = ue->u.msi.data; msi 58 virt/kvm/arm/vgic/vgic-irqfd.c e->msi.flags = ue->flags; msi 59 virt/kvm/arm/vgic/vgic-irqfd.c e->msi.devid = ue->u.msi.devid; msi 70 virt/kvm/arm/vgic/vgic-irqfd.c struct kvm_msi *msi) msi 72 virt/kvm/arm/vgic/vgic-irqfd.c msi->address_lo = e->msi.address_lo; msi 73 virt/kvm/arm/vgic/vgic-irqfd.c msi->address_hi = e->msi.address_hi; msi 74 virt/kvm/arm/vgic/vgic-irqfd.c msi->data = e->msi.data; msi 75 virt/kvm/arm/vgic/vgic-irqfd.c msi->flags = e->msi.flags; msi 76 virt/kvm/arm/vgic/vgic-irqfd.c msi->devid = e->msi.devid; msi 89 virt/kvm/arm/vgic/vgic-irqfd.c struct kvm_msi msi; msi 97 virt/kvm/arm/vgic/vgic-irqfd.c kvm_populate_msi(e, &msi); msi 98 virt/kvm/arm/vgic/vgic-irqfd.c return vgic_its_inject_msi(kvm, &msi); msi 111 virt/kvm/arm/vgic/vgic-irqfd.c struct kvm_msi msi; msi 113 virt/kvm/arm/vgic/vgic-irqfd.c kvm_populate_msi(e, &msi); msi 114 virt/kvm/arm/vgic/vgic-irqfd.c if (!vgic_its_inject_cached_translation(kvm, &msi)) msi 685 virt/kvm/arm/vgic/vgic-its.c struct vgic_its *vgic_msi_to_its(struct kvm *kvm, struct kvm_msi *msi) msi 694 virt/kvm/arm/vgic/vgic-its.c if (!(msi->flags & KVM_MSI_VALID_DEVID)) msi 697 virt/kvm/arm/vgic/vgic-its.c address = (u64)msi->address_hi << 32 | msi->address_lo; msi 742 virt/kvm/arm/vgic/vgic-its.c int vgic_its_inject_cached_translation(struct kvm *kvm, struct kvm_msi *msi) msi 748 virt/kvm/arm/vgic/vgic-its.c db = (u64)msi->address_hi << 32 | msi->address_lo; msi 749 virt/kvm/arm/vgic/vgic-its.c irq = vgic_its_check_cache(kvm, db, msi->devid, msi->data); msi 767 virt/kvm/arm/vgic/vgic-its.c int vgic_its_inject_msi(struct kvm *kvm, struct kvm_msi *msi) msi 772 virt/kvm/arm/vgic/vgic-its.c if (!vgic_its_inject_cached_translation(kvm, msi)) msi 775 virt/kvm/arm/vgic/vgic-its.c its = vgic_msi_to_its(kvm, msi); msi 780 virt/kvm/arm/vgic/vgic-its.c ret = vgic_its_trigger_msi(kvm, its, msi->devid, msi->data); msi 237 virt/kvm/arm/vgic/vgic-v4.c struct kvm_msi msi = (struct kvm_msi) { msi 238 virt/kvm/arm/vgic/vgic-v4.c .address_lo = irq_entry->msi.address_lo, msi 239 virt/kvm/arm/vgic/vgic-v4.c .address_hi = irq_entry->msi.address_hi, msi 240 virt/kvm/arm/vgic/vgic-v4.c .data = irq_entry->msi.data, msi 241 virt/kvm/arm/vgic/vgic-v4.c .flags = irq_entry->msi.flags, msi 242 virt/kvm/arm/vgic/vgic-v4.c .devid = irq_entry->msi.devid, msi 245 virt/kvm/arm/vgic/vgic-v4.c return vgic_msi_to_its(kvm, &msi); msi 270 virt/kvm/arm/vgic/vgic-v4.c ret = vgic_its_resolve_lpi(kvm, its, irq_entry->msi.devid, msi 271 virt/kvm/arm/vgic/vgic-v4.c irq_entry->msi.data, &irq); msi 323 virt/kvm/arm/vgic/vgic-v4.c ret = vgic_its_resolve_lpi(kvm, its, irq_entry->msi.devid, msi 324 virt/kvm/arm/vgic/vgic-v4.c irq_entry->msi.data, &irq); msi 234 virt/kvm/arm/vgic/vgic.h int vgic_its_inject_msi(struct kvm *kvm, struct kvm_msi *msi); msi 310 virt/kvm/arm/vgic/vgic.h struct vgic_its *vgic_msi_to_its(struct kvm *kvm, struct kvm_msi *msi); msi 311 virt/kvm/arm/vgic/vgic.h int vgic_its_inject_cached_translation(struct kvm *kvm, struct kvm_msi *msi); msi 49 virt/kvm/irqchip.c int kvm_send_userspace_msi(struct kvm *kvm, struct kvm_msi *msi) msi 53 virt/kvm/irqchip.c if (!irqchip_in_kernel(kvm) || (msi->flags & ~KVM_MSI_VALID_DEVID)) msi 56 virt/kvm/irqchip.c route.msi.address_lo = msi->address_lo; msi 57 virt/kvm/irqchip.c route.msi.address_hi = msi->address_hi; msi 58 virt/kvm/irqchip.c route.msi.data = msi->data; msi 59 virt/kvm/irqchip.c route.msi.flags = msi->flags; msi 60 virt/kvm/irqchip.c route.msi.devid = msi->devid; msi 3441 virt/kvm/kvm_main.c struct kvm_msi msi; msi 3444 virt/kvm/kvm_main.c if (copy_from_user(&msi, argp, sizeof(msi))) msi 3446 virt/kvm/kvm_main.c r = kvm_send_userspace_msi(kvm, &msi);