Lines Matching refs:devid
97 u16 devid; member
111 u16 devid; member
124 u16 devid; member
234 static inline void update_last_devid(u16 devid) in update_last_devid() argument
236 if (devid > amd_iommu_last_bdf) in update_last_devid()
237 amd_iommu_last_bdf = devid; in update_last_devid()
439 find_last_devid_on_pci(PCI_BUS_NUM(h->devid), in find_last_devid_from_ivhd()
440 PCI_SLOT(h->devid), in find_last_devid_from_ivhd()
441 PCI_FUNC(h->devid), in find_last_devid_from_ivhd()
452 update_last_devid(dev->devid); in find_last_devid_from_ivhd()
658 static void set_dev_entry_bit(u16 devid, u8 bit) in set_dev_entry_bit() argument
663 amd_iommu_dev_table[devid].data[i] |= (1UL << _bit); in set_dev_entry_bit()
666 static int get_dev_entry_bit(u16 devid, u8 bit) in get_dev_entry_bit() argument
671 return (amd_iommu_dev_table[devid].data[i] & (1UL << _bit)) >> _bit; in get_dev_entry_bit()
675 void amd_iommu_apply_erratum_63(u16 devid) in amd_iommu_apply_erratum_63() argument
679 sysmgt = get_dev_entry_bit(devid, DEV_ENTRY_SYSMGT1) | in amd_iommu_apply_erratum_63()
680 (get_dev_entry_bit(devid, DEV_ENTRY_SYSMGT2) << 1); in amd_iommu_apply_erratum_63()
683 set_dev_entry_bit(devid, DEV_ENTRY_IW); in amd_iommu_apply_erratum_63()
687 static void __init set_iommu_for_device(struct amd_iommu *iommu, u16 devid) in set_iommu_for_device() argument
689 amd_iommu_rlookup_table[devid] = iommu; in set_iommu_for_device()
697 u16 devid, u32 flags, u32 ext_flags) in set_dev_entry_from_acpi() argument
700 set_dev_entry_bit(devid, DEV_ENTRY_INIT_PASS); in set_dev_entry_from_acpi()
702 set_dev_entry_bit(devid, DEV_ENTRY_EINT_PASS); in set_dev_entry_from_acpi()
704 set_dev_entry_bit(devid, DEV_ENTRY_NMI_PASS); in set_dev_entry_from_acpi()
706 set_dev_entry_bit(devid, DEV_ENTRY_SYSMGT1); in set_dev_entry_from_acpi()
708 set_dev_entry_bit(devid, DEV_ENTRY_SYSMGT2); in set_dev_entry_from_acpi()
710 set_dev_entry_bit(devid, DEV_ENTRY_LINT0_PASS); in set_dev_entry_from_acpi()
712 set_dev_entry_bit(devid, DEV_ENTRY_LINT1_PASS); in set_dev_entry_from_acpi()
714 amd_iommu_apply_erratum_63(devid); in set_dev_entry_from_acpi()
716 set_iommu_for_device(iommu, devid); in set_dev_entry_from_acpi()
719 static int __init add_special_device(u8 type, u8 id, u16 *devid, bool cmd_line) in add_special_device() argument
738 *devid = entry->devid; in add_special_device()
748 entry->devid = *devid; in add_special_device()
763 &early_ioapic_map[i].devid, in add_early_maps()
772 &early_hpet_map[i].devid, in add_early_maps()
785 static void __init set_device_exclusion_range(u16 devid, struct ivmd_header *m) in set_device_exclusion_range() argument
787 struct amd_iommu *iommu = amd_iommu_rlookup_table[devid]; in set_device_exclusion_range()
798 set_dev_entry_bit(devid, DEV_ENTRY_EX); in set_device_exclusion_range()
813 u16 devid = 0, devid_start = 0, devid_to = 0; in init_iommu_from_acpi() local
860 PCI_BUS_NUM(e->devid), in init_iommu_from_acpi()
861 PCI_SLOT(e->devid), in init_iommu_from_acpi()
862 PCI_FUNC(e->devid), in init_iommu_from_acpi()
865 devid = e->devid; in init_iommu_from_acpi()
866 set_dev_entry_from_acpi(iommu, devid, e->flags, 0); in init_iommu_from_acpi()
872 PCI_BUS_NUM(e->devid), in init_iommu_from_acpi()
873 PCI_SLOT(e->devid), in init_iommu_from_acpi()
874 PCI_FUNC(e->devid), in init_iommu_from_acpi()
877 devid_start = e->devid; in init_iommu_from_acpi()
886 PCI_BUS_NUM(e->devid), in init_iommu_from_acpi()
887 PCI_SLOT(e->devid), in init_iommu_from_acpi()
888 PCI_FUNC(e->devid), in init_iommu_from_acpi()
894 devid = e->devid; in init_iommu_from_acpi()
896 set_dev_entry_from_acpi(iommu, devid , e->flags, 0); in init_iommu_from_acpi()
898 amd_iommu_alias_table[devid] = devid_to; in init_iommu_from_acpi()
905 PCI_BUS_NUM(e->devid), in init_iommu_from_acpi()
906 PCI_SLOT(e->devid), in init_iommu_from_acpi()
907 PCI_FUNC(e->devid), in init_iommu_from_acpi()
913 devid_start = e->devid; in init_iommu_from_acpi()
923 PCI_BUS_NUM(e->devid), in init_iommu_from_acpi()
924 PCI_SLOT(e->devid), in init_iommu_from_acpi()
925 PCI_FUNC(e->devid), in init_iommu_from_acpi()
928 devid = e->devid; in init_iommu_from_acpi()
929 set_dev_entry_from_acpi(iommu, devid, e->flags, in init_iommu_from_acpi()
936 PCI_BUS_NUM(e->devid), in init_iommu_from_acpi()
937 PCI_SLOT(e->devid), in init_iommu_from_acpi()
938 PCI_FUNC(e->devid), in init_iommu_from_acpi()
941 devid_start = e->devid; in init_iommu_from_acpi()
949 PCI_BUS_NUM(e->devid), in init_iommu_from_acpi()
950 PCI_SLOT(e->devid), in init_iommu_from_acpi()
951 PCI_FUNC(e->devid)); in init_iommu_from_acpi()
953 devid = e->devid; in init_iommu_from_acpi()
954 for (dev_i = devid_start; dev_i <= devid; ++dev_i) { in init_iommu_from_acpi()
967 u16 devid; in init_iommu_from_acpi() local
971 devid = (e->ext >> 8) & 0xffff; in init_iommu_from_acpi()
983 PCI_BUS_NUM(devid), in init_iommu_from_acpi()
984 PCI_SLOT(devid), in init_iommu_from_acpi()
985 PCI_FUNC(devid)); in init_iommu_from_acpi()
987 ret = add_special_device(type, handle, &devid, false); in init_iommu_from_acpi()
996 set_dev_entry_from_acpi(iommu, devid, e->flags, 0); in init_iommu_from_acpi()
1126 iommu->devid = h->devid; in init_iommu_one()
1163 amd_iommu_rlookup_table[iommu->devid] = NULL; in init_iommu_one()
1191 PCI_BUS_NUM(h->devid), PCI_SLOT(h->devid), in init_iommu_all()
1192 PCI_FUNC(h->devid), h->cap_ptr, in init_iommu_all()
1281 iommu->dev = pci_get_bus_and_slot(PCI_BUS_NUM(iommu->devid), in iommu_init_pci()
1282 iommu->devid & 0xff); in iommu_init_pci()
1510 set_device_exclusion_range(m->devid, m); in init_exclusion_range()
1517 for (i = m->devid; i <= m->aux; ++i) in init_exclusion_range()
1543 e->devid_start = e->devid_end = m->devid; in init_unity_map_range()
1552 e->devid_start = m->devid; in init_unity_map_range()
1600 u32 devid; in init_device_table_dma() local
1602 for (devid = 0; devid <= amd_iommu_last_bdf; ++devid) { in init_device_table_dma()
1603 set_dev_entry_bit(devid, DEV_ENTRY_VALID); in init_device_table_dma()
1604 set_dev_entry_bit(devid, DEV_ENTRY_TRANSLATION); in init_device_table_dma()
1610 u32 devid; in uninit_device_table_dma() local
1612 for (devid = 0; devid <= amd_iommu_last_bdf; ++devid) { in uninit_device_table_dma()
1613 amd_iommu_dev_table[devid].data[0] = 0ULL; in uninit_device_table_dma()
1614 amd_iommu_dev_table[devid].data[1] = 0ULL; in uninit_device_table_dma()
1620 u32 devid; in init_device_table() local
1625 for (devid = 0; devid <= amd_iommu_last_bdf; ++devid) in init_device_table()
1626 set_dev_entry_bit(devid, DEV_ENTRY_IRQ_TBL_EN); in init_device_table()
1828 int devid, id = mpc_ioapic_id(idx); in check_ioapic_information() local
1830 devid = get_ioapic_devid(id); in check_ioapic_information()
1831 if (devid < 0) { in check_ioapic_information()
1835 } else if (devid == IOAPIC_SB_DEVID) { in check_ioapic_information()
2285 u16 devid; in parse_ivrs_ioapic() local
2300 devid = ((bus & 0xff) << 8) | ((dev & 0x1f) << 3) | (fn & 0x7); in parse_ivrs_ioapic()
2305 early_ioapic_map[i].devid = devid; in parse_ivrs_ioapic()
2315 u16 devid; in parse_ivrs_hpet() local
2330 devid = ((bus & 0xff) << 8) | ((dev & 0x1f) << 3) | (fn & 0x7); in parse_ivrs_hpet()
2335 early_hpet_map[i].devid = devid; in parse_ivrs_hpet()
2364 u8 amd_iommu_pc_get_max_banks(u16 devid) in amd_iommu_pc_get_max_banks() argument
2370 iommu = amd_iommu_rlookup_table[devid]; in amd_iommu_pc_get_max_banks()
2384 u8 amd_iommu_pc_get_max_counters(u16 devid) in amd_iommu_pc_get_max_counters() argument
2390 iommu = amd_iommu_rlookup_table[devid]; in amd_iommu_pc_get_max_counters()
2431 int amd_iommu_pc_get_set_reg_val(u16 devid, u8 bank, u8 cntr, u8 fxn, in amd_iommu_pc_get_set_reg_val() argument
2434 struct amd_iommu *iommu = amd_iommu_rlookup_table[devid]; in amd_iommu_pc_get_set_reg_val()