Lines Matching refs:devid
97 u16 devid; member
111 u16 devid; member
124 u16 devid; member
235 static inline void update_last_devid(u16 devid) in update_last_devid() argument
237 if (devid > amd_iommu_last_bdf) in update_last_devid()
238 amd_iommu_last_bdf = devid; in update_last_devid()
438 update_last_devid(dev->devid); in find_last_devid_from_ivhd()
629 static void set_dev_entry_bit(u16 devid, u8 bit) in set_dev_entry_bit() argument
634 amd_iommu_dev_table[devid].data[i] |= (1UL << _bit); in set_dev_entry_bit()
637 static int get_dev_entry_bit(u16 devid, u8 bit) in get_dev_entry_bit() argument
642 return (amd_iommu_dev_table[devid].data[i] & (1UL << _bit)) >> _bit; in get_dev_entry_bit()
646 void amd_iommu_apply_erratum_63(u16 devid) in amd_iommu_apply_erratum_63() argument
650 sysmgt = get_dev_entry_bit(devid, DEV_ENTRY_SYSMGT1) | in amd_iommu_apply_erratum_63()
651 (get_dev_entry_bit(devid, DEV_ENTRY_SYSMGT2) << 1); in amd_iommu_apply_erratum_63()
654 set_dev_entry_bit(devid, DEV_ENTRY_IW); in amd_iommu_apply_erratum_63()
658 static void __init set_iommu_for_device(struct amd_iommu *iommu, u16 devid) in set_iommu_for_device() argument
660 amd_iommu_rlookup_table[devid] = iommu; in set_iommu_for_device()
668 u16 devid, u32 flags, u32 ext_flags) in set_dev_entry_from_acpi() argument
671 set_dev_entry_bit(devid, DEV_ENTRY_INIT_PASS); in set_dev_entry_from_acpi()
673 set_dev_entry_bit(devid, DEV_ENTRY_EINT_PASS); in set_dev_entry_from_acpi()
675 set_dev_entry_bit(devid, DEV_ENTRY_NMI_PASS); in set_dev_entry_from_acpi()
677 set_dev_entry_bit(devid, DEV_ENTRY_SYSMGT1); in set_dev_entry_from_acpi()
679 set_dev_entry_bit(devid, DEV_ENTRY_SYSMGT2); in set_dev_entry_from_acpi()
681 set_dev_entry_bit(devid, DEV_ENTRY_LINT0_PASS); in set_dev_entry_from_acpi()
683 set_dev_entry_bit(devid, DEV_ENTRY_LINT1_PASS); in set_dev_entry_from_acpi()
685 amd_iommu_apply_erratum_63(devid); in set_dev_entry_from_acpi()
687 set_iommu_for_device(iommu, devid); in set_dev_entry_from_acpi()
690 static int __init add_special_device(u8 type, u8 id, u16 *devid, bool cmd_line) in add_special_device() argument
709 *devid = entry->devid; in add_special_device()
719 entry->devid = *devid; in add_special_device()
734 &early_ioapic_map[i].devid, in add_early_maps()
743 &early_hpet_map[i].devid, in add_early_maps()
756 static void __init set_device_exclusion_range(u16 devid, struct ivmd_header *m) in set_device_exclusion_range() argument
758 struct amd_iommu *iommu = amd_iommu_rlookup_table[devid]; in set_device_exclusion_range()
769 set_dev_entry_bit(devid, DEV_ENTRY_EX); in set_device_exclusion_range()
784 u16 devid = 0, devid_start = 0, devid_to = 0; in init_iommu_from_acpi() local
821 PCI_BUS_NUM(e->devid), in init_iommu_from_acpi()
822 PCI_SLOT(e->devid), in init_iommu_from_acpi()
823 PCI_FUNC(e->devid), in init_iommu_from_acpi()
826 devid = e->devid; in init_iommu_from_acpi()
827 set_dev_entry_from_acpi(iommu, devid, e->flags, 0); in init_iommu_from_acpi()
833 PCI_BUS_NUM(e->devid), in init_iommu_from_acpi()
834 PCI_SLOT(e->devid), in init_iommu_from_acpi()
835 PCI_FUNC(e->devid), in init_iommu_from_acpi()
838 devid_start = e->devid; in init_iommu_from_acpi()
847 PCI_BUS_NUM(e->devid), in init_iommu_from_acpi()
848 PCI_SLOT(e->devid), in init_iommu_from_acpi()
849 PCI_FUNC(e->devid), in init_iommu_from_acpi()
855 devid = e->devid; in init_iommu_from_acpi()
857 set_dev_entry_from_acpi(iommu, devid , e->flags, 0); in init_iommu_from_acpi()
859 amd_iommu_alias_table[devid] = devid_to; in init_iommu_from_acpi()
866 PCI_BUS_NUM(e->devid), in init_iommu_from_acpi()
867 PCI_SLOT(e->devid), in init_iommu_from_acpi()
868 PCI_FUNC(e->devid), in init_iommu_from_acpi()
874 devid_start = e->devid; in init_iommu_from_acpi()
884 PCI_BUS_NUM(e->devid), in init_iommu_from_acpi()
885 PCI_SLOT(e->devid), in init_iommu_from_acpi()
886 PCI_FUNC(e->devid), in init_iommu_from_acpi()
889 devid = e->devid; in init_iommu_from_acpi()
890 set_dev_entry_from_acpi(iommu, devid, e->flags, in init_iommu_from_acpi()
897 PCI_BUS_NUM(e->devid), in init_iommu_from_acpi()
898 PCI_SLOT(e->devid), in init_iommu_from_acpi()
899 PCI_FUNC(e->devid), in init_iommu_from_acpi()
902 devid_start = e->devid; in init_iommu_from_acpi()
910 PCI_BUS_NUM(e->devid), in init_iommu_from_acpi()
911 PCI_SLOT(e->devid), in init_iommu_from_acpi()
912 PCI_FUNC(e->devid)); in init_iommu_from_acpi()
914 devid = e->devid; in init_iommu_from_acpi()
915 for (dev_i = devid_start; dev_i <= devid; ++dev_i) { in init_iommu_from_acpi()
928 u16 devid; in init_iommu_from_acpi() local
932 devid = (e->ext >> 8) & 0xffff; in init_iommu_from_acpi()
944 PCI_BUS_NUM(devid), in init_iommu_from_acpi()
945 PCI_SLOT(devid), in init_iommu_from_acpi()
946 PCI_FUNC(devid)); in init_iommu_from_acpi()
948 ret = add_special_device(type, handle, &devid, false); in init_iommu_from_acpi()
957 set_dev_entry_from_acpi(iommu, devid, e->flags, 0); in init_iommu_from_acpi()
1076 iommu->devid = h->devid; in init_iommu_one()
1115 amd_iommu_rlookup_table[iommu->devid] = NULL; in init_iommu_one()
1141 PCI_BUS_NUM(h->devid), PCI_SLOT(h->devid), in init_iommu_all()
1142 PCI_FUNC(h->devid), h->cap_ptr, in init_iommu_all()
1231 iommu->dev = pci_get_bus_and_slot(PCI_BUS_NUM(iommu->devid), in iommu_init_pci()
1232 iommu->devid & 0xff); in iommu_init_pci()
1461 set_device_exclusion_range(m->devid, m); in init_exclusion_range()
1468 for (i = m->devid; i <= m->aux; ++i) in init_exclusion_range()
1494 e->devid_start = e->devid_end = m->devid; in init_unity_map_range()
1503 e->devid_start = m->devid; in init_unity_map_range()
1551 u32 devid; in init_device_table_dma() local
1553 for (devid = 0; devid <= amd_iommu_last_bdf; ++devid) { in init_device_table_dma()
1554 set_dev_entry_bit(devid, DEV_ENTRY_VALID); in init_device_table_dma()
1555 set_dev_entry_bit(devid, DEV_ENTRY_TRANSLATION); in init_device_table_dma()
1561 u32 devid; in uninit_device_table_dma() local
1563 for (devid = 0; devid <= amd_iommu_last_bdf; ++devid) { in uninit_device_table_dma()
1564 amd_iommu_dev_table[devid].data[0] = 0ULL; in uninit_device_table_dma()
1565 amd_iommu_dev_table[devid].data[1] = 0ULL; in uninit_device_table_dma()
1571 u32 devid; in init_device_table() local
1576 for (devid = 0; devid <= amd_iommu_last_bdf; ++devid) in init_device_table()
1577 set_dev_entry_bit(devid, DEV_ENTRY_IRQ_TBL_EN); in init_device_table()
1776 int devid, id = mpc_ioapic_id(idx); in check_ioapic_information() local
1778 devid = get_ioapic_devid(id); in check_ioapic_information()
1779 if (devid < 0) { in check_ioapic_information()
1783 } else if (devid == IOAPIC_SB_DEVID) { in check_ioapic_information()
2206 u16 devid; in parse_ivrs_ioapic() local
2221 devid = ((bus & 0xff) << 8) | ((dev & 0x1f) << 3) | (fn & 0x7); in parse_ivrs_ioapic()
2226 early_ioapic_map[i].devid = devid; in parse_ivrs_ioapic()
2236 u16 devid; in parse_ivrs_hpet() local
2251 devid = ((bus & 0xff) << 8) | ((dev & 0x1f) << 3) | (fn & 0x7); in parse_ivrs_hpet()
2256 early_hpet_map[i].devid = devid; in parse_ivrs_hpet()
2285 u8 amd_iommu_pc_get_max_banks(u16 devid) in amd_iommu_pc_get_max_banks() argument
2291 iommu = amd_iommu_rlookup_table[devid]; in amd_iommu_pc_get_max_banks()
2305 u8 amd_iommu_pc_get_max_counters(u16 devid) in amd_iommu_pc_get_max_counters() argument
2311 iommu = amd_iommu_rlookup_table[devid]; in amd_iommu_pc_get_max_counters()
2352 int amd_iommu_pc_get_set_reg_val(u16 devid, u8 bank, u8 cntr, u8 fxn, in amd_iommu_pc_get_set_reg_val() argument
2355 struct amd_iommu *iommu = amd_iommu_rlookup_table[devid]; in amd_iommu_pc_get_set_reg_val()