icm 657 drivers/infiniband/hw/mthca/mthca_cmd.c static int mthca_map_cmd(struct mthca_dev *dev, u16 op, struct mthca_icm *icm, icm 675 drivers/infiniband/hw/mthca/mthca_cmd.c for (mthca_icm_first(icm, &iter); icm 736 drivers/infiniband/hw/mthca/mthca_cmd.c int mthca_MAP_FA(struct mthca_dev *dev, struct mthca_icm *icm) icm 738 drivers/infiniband/hw/mthca/mthca_cmd.c return mthca_map_cmd(dev, CMD_MAP_FA, icm, -1); icm 1534 drivers/infiniband/hw/mthca/mthca_cmd.c int mthca_MAP_ICM(struct mthca_dev *dev, struct mthca_icm *icm, u64 virt) icm 1536 drivers/infiniband/hw/mthca/mthca_cmd.c return mthca_map_cmd(dev, CMD_MAP_ICM, icm, virt); icm 1574 drivers/infiniband/hw/mthca/mthca_cmd.c int mthca_MAP_ICM_AUX(struct mthca_dev *dev, struct mthca_icm *icm) icm 1576 drivers/infiniband/hw/mthca/mthca_cmd.c return mthca_map_cmd(dev, CMD_MAP_ICM_AUX, icm, -1); icm 257 drivers/infiniband/hw/mthca/mthca_cmd.h int mthca_MAP_FA(struct mthca_dev *dev, struct mthca_icm *icm); icm 277 drivers/infiniband/hw/mthca/mthca_cmd.h int mthca_MAP_ICM(struct mthca_dev *dev, struct mthca_icm *icm, u64 virt); icm 280 drivers/infiniband/hw/mthca/mthca_cmd.h int mthca_MAP_ICM_AUX(struct mthca_dev *dev, struct mthca_icm *icm); icm 88 drivers/infiniband/hw/mthca/mthca_memfree.c void mthca_free_icm(struct mthca_dev *dev, struct mthca_icm *icm, int coherent) icm 92 drivers/infiniband/hw/mthca/mthca_memfree.c if (!icm) icm 95 drivers/infiniband/hw/mthca/mthca_memfree.c list_for_each_entry_safe(chunk, tmp, &icm->chunk_list, list) { icm 104 drivers/infiniband/hw/mthca/mthca_memfree.c kfree(icm); icm 140 drivers/infiniband/hw/mthca/mthca_memfree.c struct mthca_icm *icm; icm 148 drivers/infiniband/hw/mthca/mthca_memfree.c icm = kmalloc(sizeof *icm, gfp_mask & ~(__GFP_HIGHMEM | __GFP_NOWARN)); icm 149 drivers/infiniband/hw/mthca/mthca_memfree.c if (!icm) icm 150 drivers/infiniband/hw/mthca/mthca_memfree.c return icm; icm 152 drivers/infiniband/hw/mthca/mthca_memfree.c icm->refcount = 0; icm 153 drivers/infiniband/hw/mthca/mthca_memfree.c INIT_LIST_HEAD(&icm->chunk_list); icm 167 drivers/infiniband/hw/mthca/mthca_memfree.c list_add_tail(&chunk->list, &icm->chunk_list); icm 215 drivers/infiniband/hw/mthca/mthca_memfree.c return icm; icm 218 drivers/infiniband/hw/mthca/mthca_memfree.c mthca_free_icm(dev, icm, coherent); icm 229 drivers/infiniband/hw/mthca/mthca_memfree.c if (table->icm[i]) { icm 230 drivers/infiniband/hw/mthca/mthca_memfree.c ++table->icm[i]->refcount; icm 234 drivers/infiniband/hw/mthca/mthca_memfree.c table->icm[i] = mthca_alloc_icm(dev, MTHCA_TABLE_CHUNK_SIZE >> PAGE_SHIFT, icm 237 drivers/infiniband/hw/mthca/mthca_memfree.c if (!table->icm[i]) { icm 242 drivers/infiniband/hw/mthca/mthca_memfree.c if (mthca_MAP_ICM(dev, table->icm[i], icm 244 drivers/infiniband/hw/mthca/mthca_memfree.c mthca_free_icm(dev, table->icm[i], table->coherent); icm 245 drivers/infiniband/hw/mthca/mthca_memfree.c table->icm[i] = NULL; icm 250 drivers/infiniband/hw/mthca/mthca_memfree.c ++table->icm[i]->refcount; icm 268 drivers/infiniband/hw/mthca/mthca_memfree.c if (--table->icm[i]->refcount == 0) { icm 271 drivers/infiniband/hw/mthca/mthca_memfree.c mthca_free_icm(dev, table->icm[i], table->coherent); icm 272 drivers/infiniband/hw/mthca/mthca_memfree.c table->icm[i] = NULL; icm 282 drivers/infiniband/hw/mthca/mthca_memfree.c struct mthca_icm *icm; icm 291 drivers/infiniband/hw/mthca/mthca_memfree.c icm = table->icm[idx / MTHCA_TABLE_CHUNK_SIZE]; icm 294 drivers/infiniband/hw/mthca/mthca_memfree.c if (!icm) icm 297 drivers/infiniband/hw/mthca/mthca_memfree.c list_for_each_entry(chunk, &icm->chunk_list, list) { icm 370 drivers/infiniband/hw/mthca/mthca_memfree.c table = kmalloc(struct_size(table, icm, num_icm), GFP_KERNEL); icm 383 drivers/infiniband/hw/mthca/mthca_memfree.c table->icm[i] = NULL; icm 390 drivers/infiniband/hw/mthca/mthca_memfree.c table->icm[i] = mthca_alloc_icm(dev, chunk_size >> PAGE_SHIFT, icm 393 drivers/infiniband/hw/mthca/mthca_memfree.c if (!table->icm[i]) icm 395 drivers/infiniband/hw/mthca/mthca_memfree.c if (mthca_MAP_ICM(dev, table->icm[i], icm 397 drivers/infiniband/hw/mthca/mthca_memfree.c mthca_free_icm(dev, table->icm[i], table->coherent); icm 398 drivers/infiniband/hw/mthca/mthca_memfree.c table->icm[i] = NULL; icm 406 drivers/infiniband/hw/mthca/mthca_memfree.c ++table->icm[i]->refcount; icm 413 drivers/infiniband/hw/mthca/mthca_memfree.c if (table->icm[i]) { icm 416 drivers/infiniband/hw/mthca/mthca_memfree.c mthca_free_icm(dev, table->icm[i], table->coherent); icm 429 drivers/infiniband/hw/mthca/mthca_memfree.c if (table->icm[i]) { icm 433 drivers/infiniband/hw/mthca/mthca_memfree.c mthca_free_icm(dev, table->icm[i], table->coherent); icm 71 drivers/infiniband/hw/mthca/mthca_memfree.h struct mthca_icm *icm[0]; icm 75 drivers/infiniband/hw/mthca/mthca_memfree.h struct mthca_icm *icm; icm 84 drivers/infiniband/hw/mthca/mthca_memfree.h void mthca_free_icm(struct mthca_dev *dev, struct mthca_icm *icm, int coherent); icm 99 drivers/infiniband/hw/mthca/mthca_memfree.h static inline void mthca_icm_first(struct mthca_icm *icm, icm 102 drivers/infiniband/hw/mthca/mthca_memfree.h iter->icm = icm; icm 103 drivers/infiniband/hw/mthca/mthca_memfree.h iter->chunk = list_empty(&icm->chunk_list) ? icm 104 drivers/infiniband/hw/mthca/mthca_memfree.h NULL : list_entry(icm->chunk_list.next, icm 117 drivers/infiniband/hw/mthca/mthca_memfree.h if (iter->chunk->list.next == &iter->icm->chunk_list) { icm 1516 drivers/net/ethernet/mellanox/mlx4/fw.c int mlx4_map_cmd(struct mlx4_dev *dev, u16 op, struct mlx4_icm *icm, u64 virt) icm 1532 drivers/net/ethernet/mellanox/mlx4/fw.c for (mlx4_icm_first(icm, &iter); icm 1597 drivers/net/ethernet/mellanox/mlx4/fw.c int mlx4_MAP_FA(struct mlx4_dev *dev, struct mlx4_icm *icm) icm 1599 drivers/net/ethernet/mellanox/mlx4/fw.c return mlx4_map_cmd(dev, MLX4_CMD_MAP_FA, icm, -1); icm 240 drivers/net/ethernet/mellanox/mlx4/fw.h int mlx4_MAP_FA(struct mlx4_dev *dev, struct mlx4_icm *icm); icm 248 drivers/net/ethernet/mellanox/mlx4/fw.h int mlx4_map_cmd(struct mlx4_dev *dev, u16 op, struct mlx4_icm *icm, u64 virt); icm 250 drivers/net/ethernet/mellanox/mlx4/fw.h int mlx4_MAP_ICM_AUX(struct mlx4_dev *dev, struct mlx4_icm *icm); icm 79 drivers/net/ethernet/mellanox/mlx4/icm.c void mlx4_free_icm(struct mlx4_dev *dev, struct mlx4_icm *icm, int coherent) icm 83 drivers/net/ethernet/mellanox/mlx4/icm.c if (!icm) icm 86 drivers/net/ethernet/mellanox/mlx4/icm.c list_for_each_entry_safe(chunk, tmp, &icm->chunk_list, list) { icm 95 drivers/net/ethernet/mellanox/mlx4/icm.c kfree(icm); icm 135 drivers/net/ethernet/mellanox/mlx4/icm.c struct mlx4_icm *icm; icm 144 drivers/net/ethernet/mellanox/mlx4/icm.c icm = kmalloc_node(sizeof(*icm), icm 147 drivers/net/ethernet/mellanox/mlx4/icm.c if (!icm) { icm 148 drivers/net/ethernet/mellanox/mlx4/icm.c icm = kmalloc(sizeof(*icm), icm 150 drivers/net/ethernet/mellanox/mlx4/icm.c if (!icm) icm 154 drivers/net/ethernet/mellanox/mlx4/icm.c icm->refcount = 0; icm 155 drivers/net/ethernet/mellanox/mlx4/icm.c INIT_LIST_HEAD(&icm->chunk_list); icm 176 drivers/net/ethernet/mellanox/mlx4/icm.c list_add_tail(&chunk->list, &icm->chunk_list); icm 229 drivers/net/ethernet/mellanox/mlx4/icm.c return icm; icm 232 drivers/net/ethernet/mellanox/mlx4/icm.c mlx4_free_icm(dev, icm, coherent); icm 236 drivers/net/ethernet/mellanox/mlx4/icm.c static int mlx4_MAP_ICM(struct mlx4_dev *dev, struct mlx4_icm *icm, u64 virt) icm 238 drivers/net/ethernet/mellanox/mlx4/icm.c return mlx4_map_cmd(dev, MLX4_CMD_MAP_ICM, icm, virt); icm 247 drivers/net/ethernet/mellanox/mlx4/icm.c int mlx4_MAP_ICM_AUX(struct mlx4_dev *dev, struct mlx4_icm *icm) icm 249 drivers/net/ethernet/mellanox/mlx4/icm.c return mlx4_map_cmd(dev, MLX4_CMD_MAP_ICM_AUX, icm, -1); icm 266 drivers/net/ethernet/mellanox/mlx4/icm.c if (table->icm[i]) { icm 267 drivers/net/ethernet/mellanox/mlx4/icm.c ++table->icm[i]->refcount; icm 271 drivers/net/ethernet/mellanox/mlx4/icm.c table->icm[i] = mlx4_alloc_icm(dev, MLX4_TABLE_CHUNK_SIZE >> PAGE_SHIFT, icm 274 drivers/net/ethernet/mellanox/mlx4/icm.c if (!table->icm[i]) { icm 279 drivers/net/ethernet/mellanox/mlx4/icm.c if (mlx4_MAP_ICM(dev, table->icm[i], table->virt + icm 281 drivers/net/ethernet/mellanox/mlx4/icm.c mlx4_free_icm(dev, table->icm[i], table->coherent); icm 282 drivers/net/ethernet/mellanox/mlx4/icm.c table->icm[i] = NULL; icm 287 drivers/net/ethernet/mellanox/mlx4/icm.c ++table->icm[i]->refcount; icm 303 drivers/net/ethernet/mellanox/mlx4/icm.c if (--table->icm[i]->refcount == 0) { icm 307 drivers/net/ethernet/mellanox/mlx4/icm.c mlx4_free_icm(dev, table->icm[i], table->coherent); icm 308 drivers/net/ethernet/mellanox/mlx4/icm.c table->icm[i] = NULL; icm 320 drivers/net/ethernet/mellanox/mlx4/icm.c struct mlx4_icm *icm; icm 329 drivers/net/ethernet/mellanox/mlx4/icm.c icm = table->icm[idx / MLX4_TABLE_CHUNK_SIZE]; icm 332 drivers/net/ethernet/mellanox/mlx4/icm.c if (!icm) icm 335 drivers/net/ethernet/mellanox/mlx4/icm.c list_for_each_entry(chunk, &icm->chunk_list, list) { icm 430 drivers/net/ethernet/mellanox/mlx4/icm.c table->icm = kvcalloc(num_icm, sizeof(*table->icm), GFP_KERNEL); icm 431 drivers/net/ethernet/mellanox/mlx4/icm.c if (!table->icm) icm 448 drivers/net/ethernet/mellanox/mlx4/icm.c table->icm[i] = mlx4_alloc_icm(dev, chunk_size >> PAGE_SHIFT, icm 451 drivers/net/ethernet/mellanox/mlx4/icm.c if (!table->icm[i]) icm 453 drivers/net/ethernet/mellanox/mlx4/icm.c if (mlx4_MAP_ICM(dev, table->icm[i], virt + i * MLX4_TABLE_CHUNK_SIZE)) { icm 454 drivers/net/ethernet/mellanox/mlx4/icm.c mlx4_free_icm(dev, table->icm[i], use_coherent); icm 455 drivers/net/ethernet/mellanox/mlx4/icm.c table->icm[i] = NULL; icm 463 drivers/net/ethernet/mellanox/mlx4/icm.c ++table->icm[i]->refcount; icm 470 drivers/net/ethernet/mellanox/mlx4/icm.c if (table->icm[i]) { icm 473 drivers/net/ethernet/mellanox/mlx4/icm.c mlx4_free_icm(dev, table->icm[i], use_coherent); icm 476 drivers/net/ethernet/mellanox/mlx4/icm.c kvfree(table->icm); icm 486 drivers/net/ethernet/mellanox/mlx4/icm.c if (table->icm[i]) { icm 489 drivers/net/ethernet/mellanox/mlx4/icm.c mlx4_free_icm(dev, table->icm[i], table->coherent); icm 492 drivers/net/ethernet/mellanox/mlx4/icm.c kvfree(table->icm); icm 73 drivers/net/ethernet/mellanox/mlx4/icm.h struct mlx4_icm *icm; icm 82 drivers/net/ethernet/mellanox/mlx4/icm.h void mlx4_free_icm(struct mlx4_dev *dev, struct mlx4_icm *icm, int coherent); icm 96 drivers/net/ethernet/mellanox/mlx4/icm.h static inline void mlx4_icm_first(struct mlx4_icm *icm, icm 99 drivers/net/ethernet/mellanox/mlx4/icm.h iter->icm = icm; icm 100 drivers/net/ethernet/mellanox/mlx4/icm.h iter->chunk = list_empty(&icm->chunk_list) ? icm 101 drivers/net/ethernet/mellanox/mlx4/icm.h NULL : list_entry(icm->chunk_list.next, icm 114 drivers/net/ethernet/mellanox/mlx4/icm.h if (iter->chunk->list.next == &iter->icm->chunk_list) { icm 141 drivers/net/ethernet/mellanox/mlx4/icm.h int mlx4_MAP_ICM_AUX(struct mlx4_dev *dev, struct mlx4_icm *icm); icm 268 drivers/net/ethernet/mellanox/mlx4/mlx4.h struct mlx4_icm **icm; icm 150 drivers/thunderbolt/icm.c static inline struct tb *icm_to_tb(struct icm *icm) icm 152 drivers/thunderbolt/icm.c return ((void *)icm - sizeof(struct tb)); icm 179 drivers/thunderbolt/icm.c static int pci2cio_wait_completion(struct icm *icm, unsigned long timeout_msec) icm 185 drivers/thunderbolt/icm.c pci_read_config_dword(icm->upstream_port, icm 186 drivers/thunderbolt/icm.c icm->vnd_cap + PCIE2CIO_CMD, &cmd); icm 199 drivers/thunderbolt/icm.c static int pcie2cio_read(struct icm *icm, enum tb_cfg_space cs, icm 202 drivers/thunderbolt/icm.c struct pci_dev *pdev = icm->upstream_port; icm 203 drivers/thunderbolt/icm.c int ret, vnd_cap = icm->vnd_cap; icm 212 drivers/thunderbolt/icm.c ret = pci2cio_wait_completion(icm, 5000); icm 220 drivers/thunderbolt/icm.c static int pcie2cio_write(struct icm *icm, enum tb_cfg_space cs, icm 223 drivers/thunderbolt/icm.c struct pci_dev *pdev = icm->upstream_port; icm 224 drivers/thunderbolt/icm.c int vnd_cap = icm->vnd_cap; icm 235 drivers/thunderbolt/icm.c return pci2cio_wait_completion(icm, 5000); icm 269 drivers/thunderbolt/icm.c struct icm *icm = tb_priv(tb); icm 290 drivers/thunderbolt/icm.c mutex_lock(&icm->request_lock); icm 292 drivers/thunderbolt/icm.c mutex_unlock(&icm->request_lock); icm 312 drivers/thunderbolt/icm.c struct icm *icm = tb_priv(tb); icm 314 drivers/thunderbolt/icm.c if (delayed_work_pending(&icm->rescan_work)) icm 315 drivers/thunderbolt/icm.c mod_delayed_work(tb->wq, &icm->rescan_work, icm 321 drivers/thunderbolt/icm.c struct icm *icm = tb_priv(tb); icm 323 drivers/thunderbolt/icm.c if (!icm->veto) { icm 324 drivers/thunderbolt/icm.c icm->veto = true; icm 332 drivers/thunderbolt/icm.c struct icm *icm = tb_priv(tb); icm 334 drivers/thunderbolt/icm.c if (icm->veto) { icm 335 drivers/thunderbolt/icm.c icm->veto = false; icm 700 drivers/thunderbolt/icm.c struct icm *icm = tb_priv(tb); icm 747 drivers/thunderbolt/icm.c ret = icm->get_route(tb, link, depth, &route); icm 806 drivers/thunderbolt/icm.c ret = icm->get_route(tb, link, depth, &route); icm 1347 drivers/thunderbolt/icm.c struct icm *icm = tb_priv(tb); icm 1367 drivers/thunderbolt/icm.c icm->upstream_port = upstream_port; icm 1368 drivers/thunderbolt/icm.c icm->vnd_cap = cap; icm 1592 drivers/thunderbolt/icm.c struct icm *icm = tb_priv(tb); icm 1604 drivers/thunderbolt/icm.c icm->device_connected(tb, n->pkg); icm 1607 drivers/thunderbolt/icm.c icm->device_disconnected(tb, n->pkg); icm 1610 drivers/thunderbolt/icm.c icm->xdomain_connected(tb, n->pkg); icm 1613 drivers/thunderbolt/icm.c icm->xdomain_disconnected(tb, n->pkg); icm 1616 drivers/thunderbolt/icm.c icm->rtd3_veto(tb, n->pkg); icm 1647 drivers/thunderbolt/icm.c struct icm *icm = tb_priv(tb); icm 1651 drivers/thunderbolt/icm.c ret = icm->driver_ready(tb, security_level, nboot_acl, rpm); icm 1679 drivers/thunderbolt/icm.c struct icm *icm = tb_priv(tb); icm 1682 drivers/thunderbolt/icm.c if (!icm->upstream_port) icm 1697 drivers/thunderbolt/icm.c return icm->cio_reset(tb); icm 1732 drivers/thunderbolt/icm.c struct icm *icm = tb_priv(tb); icm 1738 drivers/thunderbolt/icm.c if (!icm->upstream_port) icm 1753 drivers/thunderbolt/icm.c ret = pcie2cio_read(icm, TB_CFG_PORT, port0, PHY_PORT_CS1, &val0); icm 1756 drivers/thunderbolt/icm.c ret = pcie2cio_read(icm, TB_CFG_PORT, port1, PHY_PORT_CS1, &val1); icm 1770 drivers/thunderbolt/icm.c ret = pcie2cio_write(icm, TB_CFG_PORT, port0, PHY_PORT_CS1, val0); icm 1775 drivers/thunderbolt/icm.c ret = pcie2cio_write(icm, TB_CFG_PORT, port1, PHY_PORT_CS1, val1); icm 1782 drivers/thunderbolt/icm.c ret = pcie2cio_read(icm, TB_CFG_PORT, port0, PHY_PORT_CS1, &val0); icm 1785 drivers/thunderbolt/icm.c ret = pcie2cio_read(icm, TB_CFG_PORT, port1, PHY_PORT_CS1, &val1); icm 1790 drivers/thunderbolt/icm.c ret = pcie2cio_write(icm, TB_CFG_PORT, port0, PHY_PORT_CS1, val0); icm 1795 drivers/thunderbolt/icm.c return pcie2cio_write(icm, TB_CFG_PORT, port1, PHY_PORT_CS1, val1); icm 1800 drivers/thunderbolt/icm.c struct icm *icm = tb_priv(tb); icm 1810 drivers/thunderbolt/icm.c if (icm->get_mode) { icm 1811 drivers/thunderbolt/icm.c ret = icm->get_mode(tb); icm 1815 drivers/thunderbolt/icm.c icm->safe_mode = true; icm 1848 drivers/thunderbolt/icm.c struct icm *icm = tb_priv(tb); icm 1855 drivers/thunderbolt/icm.c if (icm->safe_mode) { icm 1863 drivers/thunderbolt/icm.c &icm->rpm); icm 1871 drivers/thunderbolt/icm.c if (tb->nboot_acl > icm->max_boot_acl) icm 1879 drivers/thunderbolt/icm.c struct icm *icm = tb_priv(tb); icm 1881 drivers/thunderbolt/icm.c if (icm->save_devices) icm 1882 drivers/thunderbolt/icm.c icm->save_devices(tb); icm 1960 drivers/thunderbolt/icm.c struct icm *icm = container_of(work, struct icm, rescan_work.work); icm 1961 drivers/thunderbolt/icm.c struct tb *tb = icm_to_tb(icm); icm 1971 drivers/thunderbolt/icm.c struct icm *icm = tb_priv(tb); icm 1996 drivers/thunderbolt/icm.c queue_delayed_work(tb->wq, &icm->rescan_work, msecs_to_jiffies(500)); icm 2035 drivers/thunderbolt/icm.c struct icm *icm = tb_priv(tb); icm 2038 drivers/thunderbolt/icm.c if (icm->safe_mode) icm 2045 drivers/thunderbolt/icm.c tb->root_switch->no_nvm_upgrade = !icm->can_upgrade_nvm; icm 2046 drivers/thunderbolt/icm.c tb->root_switch->rpm = icm->rpm; icm 2048 drivers/thunderbolt/icm.c if (icm->set_uuid) icm 2049 drivers/thunderbolt/icm.c icm->set_uuid(tb); icm 2062 drivers/thunderbolt/icm.c struct icm *icm = tb_priv(tb); icm 2064 drivers/thunderbolt/icm.c cancel_delayed_work(&icm->rescan_work); icm 2150 drivers/thunderbolt/icm.c struct icm *icm; icm 2153 drivers/thunderbolt/icm.c tb = tb_domain_alloc(nhi, sizeof(struct icm)); icm 2157 drivers/thunderbolt/icm.c icm = tb_priv(tb); icm 2158 drivers/thunderbolt/icm.c INIT_DELAYED_WORK(&icm->rescan_work, icm_rescan_work); icm 2159 drivers/thunderbolt/icm.c mutex_init(&icm->request_lock); icm 2164 drivers/thunderbolt/icm.c icm->can_upgrade_nvm = true; icm 2165 drivers/thunderbolt/icm.c icm->is_supported = icm_fr_is_supported; icm 2166 drivers/thunderbolt/icm.c icm->get_route = icm_fr_get_route; icm 2167 drivers/thunderbolt/icm.c icm->save_devices = icm_fr_save_devices; icm 2168 drivers/thunderbolt/icm.c icm->driver_ready = icm_fr_driver_ready; icm 2169 drivers/thunderbolt/icm.c icm->device_connected = icm_fr_device_connected; icm 2170 drivers/thunderbolt/icm.c icm->device_disconnected = icm_fr_device_disconnected; icm 2171 drivers/thunderbolt/icm.c icm->xdomain_connected = icm_fr_xdomain_connected; icm 2172 drivers/thunderbolt/icm.c icm->xdomain_disconnected = icm_fr_xdomain_disconnected; icm 2181 drivers/thunderbolt/icm.c icm->max_boot_acl = ICM_AR_PREBOOT_ACL_ENTRIES; icm 2188 drivers/thunderbolt/icm.c icm->can_upgrade_nvm = !x86_apple_machine; icm 2189 drivers/thunderbolt/icm.c icm->is_supported = icm_ar_is_supported; icm 2190 drivers/thunderbolt/icm.c icm->cio_reset = icm_ar_cio_reset; icm 2191 drivers/thunderbolt/icm.c icm->get_mode = icm_ar_get_mode; icm 2192 drivers/thunderbolt/icm.c icm->get_route = icm_ar_get_route; icm 2193 drivers/thunderbolt/icm.c icm->save_devices = icm_fr_save_devices; icm 2194 drivers/thunderbolt/icm.c icm->driver_ready = icm_ar_driver_ready; icm 2195 drivers/thunderbolt/icm.c icm->device_connected = icm_fr_device_connected; icm 2196 drivers/thunderbolt/icm.c icm->device_disconnected = icm_fr_device_disconnected; icm 2197 drivers/thunderbolt/icm.c icm->xdomain_connected = icm_fr_xdomain_connected; icm 2198 drivers/thunderbolt/icm.c icm->xdomain_disconnected = icm_fr_xdomain_disconnected; icm 2204 drivers/thunderbolt/icm.c icm->max_boot_acl = ICM_AR_PREBOOT_ACL_ENTRIES; icm 2205 drivers/thunderbolt/icm.c icm->can_upgrade_nvm = !x86_apple_machine; icm 2206 drivers/thunderbolt/icm.c icm->is_supported = icm_ar_is_supported; icm 2207 drivers/thunderbolt/icm.c icm->cio_reset = icm_tr_cio_reset; icm 2208 drivers/thunderbolt/icm.c icm->get_mode = icm_ar_get_mode; icm 2209 drivers/thunderbolt/icm.c icm->driver_ready = icm_tr_driver_ready; icm 2210 drivers/thunderbolt/icm.c icm->device_connected = icm_tr_device_connected; icm 2211 drivers/thunderbolt/icm.c icm->device_disconnected = icm_tr_device_disconnected; icm 2212 drivers/thunderbolt/icm.c icm->xdomain_connected = icm_tr_xdomain_connected; icm 2213 drivers/thunderbolt/icm.c icm->xdomain_disconnected = icm_tr_xdomain_disconnected; icm 2219 drivers/thunderbolt/icm.c icm->is_supported = icm_ar_is_supported; icm 2220 drivers/thunderbolt/icm.c icm->driver_ready = icm_icl_driver_ready; icm 2221 drivers/thunderbolt/icm.c icm->set_uuid = icm_icl_set_uuid; icm 2222 drivers/thunderbolt/icm.c icm->device_connected = icm_icl_device_connected; icm 2223 drivers/thunderbolt/icm.c icm->device_disconnected = icm_tr_device_disconnected; icm 2224 drivers/thunderbolt/icm.c icm->xdomain_connected = icm_tr_xdomain_connected; icm 2225 drivers/thunderbolt/icm.c icm->xdomain_disconnected = icm_tr_xdomain_disconnected; icm 2226 drivers/thunderbolt/icm.c icm->rtd3_veto = icm_icl_rtd3_veto; icm 2231 drivers/thunderbolt/icm.c if (!icm->is_supported || !icm->is_supported(tb)) {