cur_dev 490 drivers/ide/ide-io.c ide_drive_t *cur_dev = cur_dev 491 drivers/ide/ide-io.c prev_port ? prev_port->cur_dev : NULL; cur_dev 497 drivers/ide/ide-io.c if (cur_dev && cur_dev 498 drivers/ide/ide-io.c (cur_dev->dev_flags & IDE_DFLAG_NIEN_QUIRK) == 0) cur_dev 505 drivers/ide/ide-io.c hwif->cur_dev = drive; cur_dev 644 drivers/ide/ide-io.c drive = hwif->cur_dev; cur_dev 830 drivers/ide/ide-io.c drive = hwif->cur_dev; cur_dev 812 drivers/infiniband/core/cma.c struct cma_device *cma_dev, *cur_dev; cur_dev 826 drivers/infiniband/core/cma.c list_for_each_entry(cur_dev, &dev_list, list) { cur_dev 827 drivers/infiniband/core/cma.c for (p = 1; p <= cur_dev->device->phys_port_cnt; ++p) { cur_dev 828 drivers/infiniband/core/cma.c if (!rdma_cap_af_ib(cur_dev->device, p)) cur_dev 831 drivers/infiniband/core/cma.c if (ib_find_cached_pkey(cur_dev->device, p, pkey, &index)) cur_dev 834 drivers/infiniband/core/cma.c if (ib_get_cached_port_state(cur_dev->device, p, &port_state)) cur_dev 836 drivers/infiniband/core/cma.c for (i = 0; !rdma_query_gid(cur_dev->device, cur_dev 840 drivers/infiniband/core/cma.c cma_dev = cur_dev; cur_dev 849 drivers/infiniband/core/cma.c cma_dev = cur_dev; cur_dev 2969 drivers/infiniband/core/cma.c struct cma_device *cma_dev, *cur_dev; cur_dev 2978 drivers/infiniband/core/cma.c list_for_each_entry(cur_dev, &dev_list, list) { cur_dev 2980 drivers/infiniband/core/cma.c !rdma_cap_ib_cm(cur_dev->device, 1)) cur_dev 2984 drivers/infiniband/core/cma.c cma_dev = cur_dev; cur_dev 2986 drivers/infiniband/core/cma.c for (p = 1; p <= cur_dev->device->phys_port_cnt; ++p) { cur_dev 2987 drivers/infiniband/core/cma.c if (!ib_get_cached_port_state(cur_dev->device, p, &port_state) && cur_dev 2989 drivers/infiniband/core/cma.c cma_dev = cur_dev; cur_dev 805 include/linux/ide.h ide_drive_t *cur_dev;