ldev 532 arch/arm/common/locomo.c static inline struct locomo *locomo_chip_driver(struct locomo_dev *ldev) ldev 534 arch/arm/common/locomo.c return (struct locomo *)dev_get_drvdata(ldev->dev.parent); ldev 659 arch/arm/common/locomo.c void locomo_m62332_senddata(struct locomo_dev *ldev, unsigned int dac_data, int channel) ldev 661 arch/arm/common/locomo.c struct locomo *lchip = locomo_chip_driver(ldev); ldev 828 arch/arm/common/locomo.c struct locomo_dev *ldev = LOCOMO_DEV(dev); ldev 833 arch/arm/common/locomo.c ret = drv->probe(ldev); ldev 839 arch/arm/common/locomo.c struct locomo_dev *ldev = LOCOMO_DEV(dev); ldev 844 arch/arm/common/locomo.c ret = drv->remove(ldev); ldev 210 arch/arm/include/asm/hardware/locomo.h void locomo_m62332_senddata(struct locomo_dev *ldev, unsigned int dac_data, int channel); ldev 50 drivers/acpi/proc.c struct device *ldev; ldev 53 drivers/acpi/proc.c ldev = get_device(entry->dev); ldev 54 drivers/acpi/proc.c if (!ldev) ldev 64 drivers/acpi/proc.c device_may_wakeup(ldev)) ? ldev 66 drivers/acpi/proc.c ldev->bus ? ldev->bus->name : ldev 67 drivers/acpi/proc.c "no-bus", dev_name(ldev)); ldev 68 drivers/acpi/proc.c put_device(ldev); ldev 151 drivers/block/drbd/drbd_actlog.c if (op != REQ_OP_WRITE && device->state.disk == D_DISKLESS && device->ldev == NULL) ldev 300 drivers/block/drbd/drbd_actlog.c const unsigned int stripes = device->ldev->md.al_stripes; ldev 301 drivers/block/drbd/drbd_actlog.c const unsigned int stripe_size_4kB = device->ldev->md.al_stripe_size_4k; ldev 304 drivers/block/drbd/drbd_actlog.c unsigned int t = device->al_tr_number % (device->ldev->md.al_size_4k); ldev 313 drivers/block/drbd/drbd_actlog.c return device->ldev->md.md_offset + device->ldev->md.al_offset + t; ldev 386 drivers/block/drbd/drbd_actlog.c write_al_updates = rcu_dereference(device->ldev->disk_conf)->al_updates; ldev 389 drivers/block/drbd/drbd_actlog.c if (drbd_md_sync_page_io(device, device->ldev, sector, WRITE)) { ldev 457 drivers/block/drbd/drbd_actlog.c write_al_updates = rcu_dereference(device->ldev->disk_conf)->al_updates; ldev 611 drivers/block/drbd/drbd_actlog.c struct drbd_md *md = &device->ldev->md; ldev 614 drivers/block/drbd/drbd_bitmap.c static u64 drbd_md_on_disk_bits(struct drbd_backing_dev *ldev) ldev 617 drivers/block/drbd/drbd_bitmap.c if (ldev->md.al_offset == 8) ldev 618 drivers/block/drbd/drbd_bitmap.c bitmap_sectors = ldev->md.md_size_sect - ldev->md.bm_offset; ldev 620 drivers/block/drbd/drbd_bitmap.c bitmap_sectors = ldev->md.al_offset - ldev->md.bm_offset; ldev 678 drivers/block/drbd/drbd_bitmap.c u64 bits_on_disk = drbd_md_on_disk_bits(device->ldev); ldev 989 drivers/block/drbd/drbd_bitmap.c device->ldev->md.md_offset + device->ldev->md.bm_offset; ldev 996 drivers/block/drbd/drbd_bitmap.c (drbd_md_last_sector(device->ldev) - on_disk_sector + 1)<<9); ldev 1011 drivers/block/drbd/drbd_bitmap.c bio_set_dev(bio, device->ldev->md_bdev); ldev 1144 drivers/block/drbd/drbd_bitmap.c wait_until_done_or_force_detached(device, device->ldev, &ctx->done); ldev 727 drivers/block/drbd/drbd_debugfs.c md = &device->ldev->md; ldev 840 drivers/block/drbd/drbd_int.h struct drbd_backing_dev *ldev __protected_by(local); ldev 1121 drivers/block/drbd/drbd_int.h extern void drbd_backing_dev_free(struct drbd_device *device, struct drbd_backing_dev *ldev); ldev 1753 drivers/block/drbd/drbd_int.h ep = rcu_dereference(device->ldev->disk_conf)->on_io_error; ldev 748 drivers/block/drbd/drbd_main.c dc = rcu_dereference(peer_device->device->ldev->disk_conf); ldev 843 drivers/block/drbd/drbd_main.c spin_lock_irq(&device->ldev->md.uuid_lock); ldev 845 drivers/block/drbd/drbd_main.c p->uuid[i] = cpu_to_be64(device->ldev->md.uuid[i]); ldev 846 drivers/block/drbd/drbd_main.c spin_unlock_irq(&device->ldev->md.uuid_lock); ldev 874 drivers/block/drbd/drbd_main.c u64 *uuid = device->ldev->md.uuid; ldev 898 drivers/block/drbd/drbd_main.c uuid = device->ldev->md.uuid[UI_BITMAP]; ldev 961 drivers/block/drbd/drbd_main.c struct request_queue *q = bdev_get_queue(device->ldev->backing_bdev); ldev 962 drivers/block/drbd/drbd_main.c d_size = drbd_get_max_capacity(device->ldev); ldev 964 drivers/block/drbd/drbd_main.c u_size = rcu_dereference(device->ldev->disk_conf)->disk_size; ldev 1277 drivers/block/drbd/drbd_main.c if (drbd_md_test_flag(device->ldev, MDF_FULL_SYNC)) { ldev 2078 drivers/block/drbd/drbd_main.c drbd_backing_dev_free(device, device->ldev); ldev 2079 drivers/block/drbd/drbd_main.c device->ldev = NULL; ldev 2241 drivers/block/drbd/drbd_main.c drbd_backing_dev_free(device, device->ldev); ldev 2242 drivers/block/drbd/drbd_main.c device->ldev = NULL; ldev 2455 drivers/block/drbd/drbd_main.c q = bdev_get_queue(device->ldev->backing_bdev); ldev 3109 drivers/block/drbd/drbd_main.c buffer->uuid[i] = cpu_to_be64(device->ldev->md.uuid[i]); ldev 3110 drivers/block/drbd/drbd_main.c buffer->flags = cpu_to_be32(device->ldev->md.flags); ldev 3113 drivers/block/drbd/drbd_main.c buffer->md_size_sect = cpu_to_be32(device->ldev->md.md_size_sect); ldev 3114 drivers/block/drbd/drbd_main.c buffer->al_offset = cpu_to_be32(device->ldev->md.al_offset); ldev 3117 drivers/block/drbd/drbd_main.c buffer->device_uuid = cpu_to_be64(device->ldev->md.device_uuid); ldev 3119 drivers/block/drbd/drbd_main.c buffer->bm_offset = cpu_to_be32(device->ldev->md.bm_offset); ldev 3122 drivers/block/drbd/drbd_main.c buffer->al_stripes = cpu_to_be32(device->ldev->md.al_stripes); ldev 3123 drivers/block/drbd/drbd_main.c buffer->al_stripe_size_4k = cpu_to_be32(device->ldev->md.al_stripe_size_4k); ldev 3125 drivers/block/drbd/drbd_main.c D_ASSERT(device, drbd_md_ss(device->ldev) == device->ldev->md.md_offset); ldev 3126 drivers/block/drbd/drbd_main.c sector = device->ldev->md.md_offset; ldev 3128 drivers/block/drbd/drbd_main.c if (drbd_md_sync_page_io(device, device->ldev, sector, REQ_OP_WRITE)) { ldev 3165 drivers/block/drbd/drbd_main.c device->ldev->md.la_size_sect = drbd_get_capacity(device->this_bdev); ldev 3439 drivers/block/drbd/drbd_main.c device->ldev->md.uuid[i+1] = device->ldev->md.uuid[i]; ldev 3453 drivers/block/drbd/drbd_main.c device->ldev->md.uuid[idx] = val; ldev 3460 drivers/block/drbd/drbd_main.c spin_lock_irqsave(&device->ldev->md.uuid_lock, flags); ldev 3462 drivers/block/drbd/drbd_main.c spin_unlock_irqrestore(&device->ldev->md.uuid_lock, flags); ldev 3468 drivers/block/drbd/drbd_main.c spin_lock_irqsave(&device->ldev->md.uuid_lock, flags); ldev 3469 drivers/block/drbd/drbd_main.c if (device->ldev->md.uuid[idx]) { ldev 3471 drivers/block/drbd/drbd_main.c device->ldev->md.uuid[UI_HISTORY_START] = device->ldev->md.uuid[idx]; ldev 3474 drivers/block/drbd/drbd_main.c spin_unlock_irqrestore(&device->ldev->md.uuid_lock, flags); ldev 3491 drivers/block/drbd/drbd_main.c spin_lock_irq(&device->ldev->md.uuid_lock); ldev 3492 drivers/block/drbd/drbd_main.c bm_uuid = device->ldev->md.uuid[UI_BITMAP]; ldev 3497 drivers/block/drbd/drbd_main.c device->ldev->md.uuid[UI_BITMAP] = device->ldev->md.uuid[UI_CURRENT]; ldev 3499 drivers/block/drbd/drbd_main.c spin_unlock_irq(&device->ldev->md.uuid_lock); ldev 3509 drivers/block/drbd/drbd_main.c if (device->ldev->md.uuid[UI_BITMAP] == 0 && val == 0) ldev 3512 drivers/block/drbd/drbd_main.c spin_lock_irqsave(&device->ldev->md.uuid_lock, flags); ldev 3515 drivers/block/drbd/drbd_main.c device->ldev->md.uuid[UI_HISTORY_START] = device->ldev->md.uuid[UI_BITMAP]; ldev 3516 drivers/block/drbd/drbd_main.c device->ldev->md.uuid[UI_BITMAP] = 0; ldev 3518 drivers/block/drbd/drbd_main.c unsigned long long bm_uuid = device->ldev->md.uuid[UI_BITMAP]; ldev 3522 drivers/block/drbd/drbd_main.c device->ldev->md.uuid[UI_BITMAP] = val & ~((u64)1); ldev 3524 drivers/block/drbd/drbd_main.c spin_unlock_irqrestore(&device->ldev->md.uuid_lock, flags); ldev 3679 drivers/block/drbd/drbd_main.c if ((device->ldev->md.flags & flag) != flag) { ldev 3681 drivers/block/drbd/drbd_main.c device->ldev->md.flags |= flag; ldev 3687 drivers/block/drbd/drbd_main.c if ((device->ldev->md.flags & flag) != 0) { ldev 3689 drivers/block/drbd/drbd_main.c device->ldev->md.flags &= ~flag; ldev 455 drivers/block/drbd/drbd_nl.c rcu_dereference(peer_device->device->ldev->disk_conf); ldev 721 drivers/block/drbd/drbd_nl.c device->ldev->md.uuid[UI_CURRENT] &= ~(u64)1; ldev 734 drivers/block/drbd/drbd_nl.c && device->ldev->md.uuid[UI_BITMAP] == 0) || forced) ldev 737 drivers/block/drbd/drbd_nl.c device->ldev->md.uuid[UI_CURRENT] |= (u64)1; ldev 941 drivers/block/drbd/drbd_nl.c struct drbd_md *md = &device->ldev->md; ldev 978 drivers/block/drbd/drbd_nl.c drbd_md_set_sector_offsets(device, device->ldev); ldev 981 drivers/block/drbd/drbd_nl.c u_size = rcu_dereference(device->ldev->disk_conf)->disk_size; ldev 983 drivers/block/drbd/drbd_nl.c size = drbd_new_dev_size(device, device->ldev, u_size, flags & DDSF_FORCED); ldev 1348 drivers/block/drbd/drbd_nl.c dc = rcu_dereference(device->ldev->disk_conf); ldev 1604 drivers/block/drbd/drbd_nl.c old_disk_conf = device->ldev->disk_conf; ldev 1619 drivers/block/drbd/drbd_nl.c sanitize_disk_conf(device, new_disk_conf, device->ldev); ldev 1647 drivers/block/drbd/drbd_nl.c rcu_assign_pointer(device->ldev->disk_conf, new_disk_conf); ldev 1663 drivers/block/drbd/drbd_nl.c device->ldev->md.flags &= ~MDF_AL_DISABLED; ldev 1665 drivers/block/drbd/drbd_nl.c device->ldev->md.flags |= MDF_AL_DISABLED; ldev 1677 drivers/block/drbd/drbd_nl.c drbd_reconsider_queue_parameters(device, device->ldev, NULL); ldev 1778 drivers/block/drbd/drbd_nl.c void drbd_backing_dev_free(struct drbd_device *device, struct drbd_backing_dev *ldev) ldev 1780 drivers/block/drbd/drbd_nl.c if (ldev == NULL) ldev 1783 drivers/block/drbd/drbd_nl.c close_backing_dev(device, ldev->md_bdev, ldev->md_bdev != ldev->backing_bdev); ldev 1784 drivers/block/drbd/drbd_nl.c close_backing_dev(device, ldev->backing_bdev, true); ldev 1786 drivers/block/drbd/drbd_nl.c kfree(ldev->disk_conf); ldev 1787 drivers/block/drbd/drbd_nl.c kfree(ldev); ldev 2040 drivers/block/drbd/drbd_nl.c D_ASSERT(device, device->ldev == NULL); ldev 2041 drivers/block/drbd/drbd_nl.c device->ldev = nbc; ldev 2050 drivers/block/drbd/drbd_nl.c drbd_bump_write_ordering(device->resource, device->ldev, WO_BDEV_FLUSH); ldev 2053 drivers/block/drbd/drbd_nl.c if (drbd_md_test_flag(device->ldev, MDF_CRASHED_PRIMARY)) ldev 2058 drivers/block/drbd/drbd_nl.c if (drbd_md_test_flag(device->ldev, MDF_PRIMARY_IND) && ldev 2067 drivers/block/drbd/drbd_nl.c drbd_reconsider_queue_parameters(device, device->ldev, NULL); ldev 2085 drivers/block/drbd/drbd_nl.c drbd_md_test_flag(device->ldev, MDF_PRIMARY_IND) && ldev 2086 drivers/block/drbd/drbd_nl.c !drbd_md_test_flag(device->ldev, MDF_CONNECTED_IND)) ldev 2096 drivers/block/drbd/drbd_nl.c if (drbd_md_test_flag(device->ldev, MDF_FULL_SYNC) || ldev 2098 drivers/block/drbd/drbd_nl.c drbd_md_test_flag(device->ldev, MDF_AL_DISABLED))) { ldev 2125 drivers/block/drbd/drbd_nl.c if (drbd_md_test_flag(device->ldev, MDF_CONSISTENT)) { ldev 2126 drivers/block/drbd/drbd_nl.c if (drbd_md_test_flag(device->ldev, MDF_WAS_UP_TO_DATE)) ldev 2134 drivers/block/drbd/drbd_nl.c if (drbd_md_test_flag(device->ldev, MDF_PEER_OUT_DATED)) ldev 2139 drivers/block/drbd/drbd_nl.c (ns.pdsk == D_OUTDATED || rcu_dereference(device->ldev->disk_conf)->fencing == FP_DONT_CARE)) ldev 2147 drivers/block/drbd/drbd_nl.c if (rcu_dereference(device->ldev->disk_conf)->al_updates) ldev 2148 drivers/block/drbd/drbd_nl.c device->ldev->md.flags &= ~MDF_AL_DISABLED; ldev 2150 drivers/block/drbd/drbd_nl.c device->ldev->md.flags |= MDF_AL_DISABLED; ldev 2177 drivers/block/drbd/drbd_nl.c device->ldev->md.uuid[UI_CURRENT] |= (u64)1; ldev 2179 drivers/block/drbd/drbd_nl.c device->ldev->md.uuid[UI_CURRENT] &= ~(u64)1; ldev 2329 drivers/block/drbd/drbd_nl.c enum drbd_fencing_p fp = rcu_dereference(device->ldev->disk_conf)->fencing; ldev 2864 drivers/block/drbd/drbd_nl.c rs.al_stripes = device->ldev->md.al_stripes; ldev 2865 drivers/block/drbd/drbd_nl.c rs.al_stripe_size = device->ldev->md.al_stripe_size_4k * 4; ldev 2892 drivers/block/drbd/drbd_nl.c u_size = rcu_dereference(device->ldev->disk_conf)->disk_size; ldev 2902 drivers/block/drbd/drbd_nl.c if (device->ldev->md.al_stripes != rs.al_stripes || ldev 2903 drivers/block/drbd/drbd_nl.c device->ldev->md.al_stripe_size_4k != rs.al_stripe_size / 4) { ldev 2924 drivers/block/drbd/drbd_nl.c if (device->ldev->known_size != drbd_get_capacity(device->ldev->backing_bdev)) ldev 2925 drivers/block/drbd/drbd_nl.c device->ldev->known_size = drbd_get_capacity(device->ldev->backing_bdev); ldev 2929 drivers/block/drbd/drbd_nl.c old_disk_conf = device->ldev->disk_conf; ldev 2932 drivers/block/drbd/drbd_nl.c rcu_assign_pointer(device->ldev->disk_conf, new_disk_conf); ldev 3371 drivers/block/drbd/drbd_nl.c struct drbd_md *md = &device->ldev->md; ldev 3387 drivers/block/drbd/drbd_nl.c q = bdev_get_queue(device->ldev->backing_bdev); ldev 3476 drivers/block/drbd/drbd_nl.c rcu_dereference(device->ldev->disk_conf); ldev 3650 drivers/block/drbd/drbd_nl.c struct drbd_md *md = &device->ldev->md; ldev 3656 drivers/block/drbd/drbd_nl.c (drbd_md_test_flag(device->ldev, MDF_CONNECTED_IND) ? ldev 3658 drivers/block/drbd/drbd_nl.c (drbd_md_test_flag(device->ldev, MDF_CONSISTENT) && ldev 3659 drivers/block/drbd/drbd_nl.c !drbd_md_test_flag(device->ldev, MDF_WAS_UP_TO_DATE) ? ldev 3662 drivers/block/drbd/drbd_nl.c (drbd_md_test_flag(device->ldev, MDF_FULL_SYNC) ? ldev 3814 drivers/block/drbd/drbd_nl.c disk_conf = rcu_dereference(device->ldev->disk_conf); ldev 3850 drivers/block/drbd/drbd_nl.c spin_lock_irq(&device->ldev->md.uuid_lock); ldev 3851 drivers/block/drbd/drbd_nl.c err = nla_put(skb, T_uuids, sizeof(si->uuids), device->ldev->md.uuid); ldev 3852 drivers/block/drbd/drbd_nl.c spin_unlock_irq(&device->ldev->md.uuid_lock); ldev 3857 drivers/block/drbd/drbd_nl.c if (nla_put_u32(skb, T_disk_flags, device->ldev->md.flags) || ldev 4217 drivers/block/drbd/drbd_nl.c device->ldev->md.uuid[UI_CURRENT] == UUID_JUST_CREATED && args.clear_bm) { ldev 1298 drivers/block/drbd/drbd_receiver.c bio_set_dev(bio, device->ldev->backing_bdev); ldev 1464 drivers/block/drbd/drbd_receiver.c wo = max_allowed_wo(device->ldev, wo); ldev 1465 drivers/block/drbd/drbd_receiver.c if (device->ldev == bdev) ldev 1513 drivers/block/drbd/drbd_receiver.c struct block_device *bdev = device->ldev->backing_bdev; ldev 1578 drivers/block/drbd/drbd_receiver.c struct request_queue *q = bdev_get_queue(device->ldev->backing_bdev); ldev 1586 drivers/block/drbd/drbd_receiver.c dc = rcu_dereference(device->ldev->disk_conf); ldev 1610 drivers/block/drbd/drbd_receiver.c struct block_device *bdev = device->ldev->backing_bdev; ldev 1696 drivers/block/drbd/drbd_receiver.c bio_set_dev(bio, device->ldev->backing_bdev); ldev 1903 drivers/block/drbd/drbd_receiver.c if (data_size != bdev_logical_block_size(device->ldev->backing_bdev)) { ldev 1905 drivers/block/drbd/drbd_receiver.c data_size, bdev_logical_block_size(device->ldev->backing_bdev)); ldev 2791 drivers/block/drbd/drbd_receiver.c struct gendisk *disk = device->ldev->backing_bdev->bd_contains->bd_disk; ldev 2797 drivers/block/drbd/drbd_receiver.c c_min_rate = rcu_dereference(device->ldev->disk_conf)->c_min_rate; ldev 3058 drivers/block/drbd/drbd_receiver.c self = device->ldev->md.uuid[UI_BITMAP] & 1; ldev 3275 drivers/block/drbd/drbd_receiver.c self = device->ldev->md.uuid[UI_CURRENT] & ~((u64)1); ldev 3295 drivers/block/drbd/drbd_receiver.c if (device->p_uuid[UI_BITMAP] == (u64)0 && device->ldev->md.uuid[UI_BITMAP] != (u64)0) { ldev 3300 drivers/block/drbd/drbd_receiver.c if ((device->ldev->md.uuid[UI_BITMAP] & ~((u64)1)) == (device->p_uuid[UI_HISTORY_START] & ~((u64)1)) && ldev 3301 drivers/block/drbd/drbd_receiver.c (device->ldev->md.uuid[UI_HISTORY_START] & ~((u64)1)) == (device->p_uuid[UI_HISTORY_START + 1] & ~((u64)1))) { ldev 3304 drivers/block/drbd/drbd_receiver.c device->ldev->md.uuid[UI_HISTORY_START] = device->ldev->md.uuid[UI_BITMAP]; ldev 3305 drivers/block/drbd/drbd_receiver.c device->ldev->md.uuid[UI_BITMAP] = 0; ldev 3307 drivers/block/drbd/drbd_receiver.c drbd_uuid_dump(device, "self", device->ldev->md.uuid, ldev 3318 drivers/block/drbd/drbd_receiver.c if (device->ldev->md.uuid[UI_BITMAP] == (u64)0 && device->p_uuid[UI_BITMAP] != (u64)0) { ldev 3323 drivers/block/drbd/drbd_receiver.c if ((device->ldev->md.uuid[UI_HISTORY_START] & ~((u64)1)) == (device->p_uuid[UI_BITMAP] & ~((u64)1)) && ldev 3324 drivers/block/drbd/drbd_receiver.c (device->ldev->md.uuid[UI_HISTORY_START + 1] & ~((u64)1)) == (device->p_uuid[UI_HISTORY_START] & ~((u64)1))) { ldev 3398 drivers/block/drbd/drbd_receiver.c (device->ldev->md.uuid[UI_HISTORY_START] & ~((u64)1)) == ldev 3418 drivers/block/drbd/drbd_receiver.c self = device->ldev->md.uuid[UI_CURRENT] & ~((u64)1); ldev 3426 drivers/block/drbd/drbd_receiver.c self = device->ldev->md.uuid[UI_BITMAP] & ~((u64)1); ldev 3432 drivers/block/drbd/drbd_receiver.c self = device->ldev->md.uuid[UI_HISTORY_START] & ~((u64)1); ldev 3435 drivers/block/drbd/drbd_receiver.c (device->ldev->md.uuid[UI_HISTORY_START + 1] & ~((u64)1)) == ldev 3437 drivers/block/drbd/drbd_receiver.c self + UUID_NEW_BM_OFFSET == (device->ldev->md.uuid[UI_BITMAP] & ~((u64)1))) { ldev 3444 drivers/block/drbd/drbd_receiver.c __drbd_uuid_set(device, UI_BITMAP, device->ldev->md.uuid[UI_HISTORY_START]); ldev 3445 drivers/block/drbd/drbd_receiver.c __drbd_uuid_set(device, UI_HISTORY_START, device->ldev->md.uuid[UI_HISTORY_START + 1]); ldev 3448 drivers/block/drbd/drbd_receiver.c drbd_uuid_dump(device, "self", device->ldev->md.uuid, ldev 3459 drivers/block/drbd/drbd_receiver.c self = device->ldev->md.uuid[i] & ~((u64)1); ldev 3465 drivers/block/drbd/drbd_receiver.c self = device->ldev->md.uuid[UI_BITMAP] & ~((u64)1); ldev 3472 drivers/block/drbd/drbd_receiver.c self = device->ldev->md.uuid[i] & ~((u64)1); ldev 3502 drivers/block/drbd/drbd_receiver.c spin_lock_irq(&device->ldev->md.uuid_lock); ldev 3503 drivers/block/drbd/drbd_receiver.c drbd_uuid_dump(device, "self", device->ldev->md.uuid, device->comm_bm_set, 0); ldev 3508 drivers/block/drbd/drbd_receiver.c spin_unlock_irq(&device->ldev->md.uuid_lock); ldev 3942 drivers/block/drbd/drbd_receiver.c old_disk_conf = device->ldev->disk_conf; ldev 4048 drivers/block/drbd/drbd_receiver.c rcu_assign_pointer(device->ldev->disk_conf, new_disk_conf); ldev 4131 drivers/block/drbd/drbd_receiver.c my_usize = rcu_dereference(device->ldev->disk_conf)->disk_size; ldev 4135 drivers/block/drbd/drbd_receiver.c p_size, drbd_get_max_capacity(device->ldev)); ldev 4147 drivers/block/drbd/drbd_receiver.c new_size = drbd_new_dev_size(device, device->ldev, p_usize, 0); ldev 4169 drivers/block/drbd/drbd_receiver.c old_disk_conf = device->ldev->disk_conf; ldev 4173 drivers/block/drbd/drbd_receiver.c rcu_assign_pointer(device->ldev->disk_conf, new_disk_conf); ldev 4193 drivers/block/drbd/drbd_receiver.c drbd_reconsider_queue_parameters(device, device->ldev, o); ldev 4245 drivers/block/drbd/drbd_receiver.c if (device->ldev->known_size != drbd_get_capacity(device->ldev->backing_bdev)) { ldev 4246 drivers/block/drbd/drbd_receiver.c device->ldev->known_size = drbd_get_capacity(device->ldev->backing_bdev); ldev 4315 drivers/block/drbd/drbd_receiver.c device->ldev->md.uuid[UI_CURRENT] == UUID_JUST_CREATED && ldev 537 drivers/block/drbd/drbd_req.c bdevname(device->ldev->backing_bdev, b)); ldev 927 drivers/block/drbd/drbd_req.c bdi = device->ldev->backing_bdev->bd_disk->queue->backing_dev_info; ldev 1071 drivers/block/drbd/drbd_req.c rbm = rcu_dereference(device->ldev->disk_conf)->read_balancing; ldev 1169 drivers/block/drbd/drbd_req.c bio_set_dev(bio, device->ldev->backing_bdev); ldev 1725 drivers/block/drbd/drbd_req.c dt = rcu_dereference(device->ldev->disk_conf)->disk_timeout * HZ / 10; ldev 834 drivers/block/drbd/drbd_state.c fp = rcu_dereference(device->ldev->disk_conf)->fencing; ldev 1064 drivers/block/drbd/drbd_state.c fp = rcu_dereference(device->ldev->disk_conf)->fencing; ldev 1093 drivers/block/drbd/drbd_state.c if (device->ed_uuid == device->ldev->md.uuid[UI_CURRENT]) { ldev 1411 drivers/block/drbd/drbd_state.c u32 mdf = device->ldev->md.flags & ~(MDF_CONSISTENT|MDF_PRIMARY_IND| ldev 1429 drivers/block/drbd/drbd_state.c if (mdf != device->ldev->md.flags) { ldev 1430 drivers/block/drbd/drbd_state.c device->ldev->md.flags = mdf; ldev 1434 drivers/block/drbd/drbd_state.c drbd_set_ed_uuid(device, device->ldev->md.uuid[UI_CURRENT]); ldev 1817 drivers/block/drbd/drbd_state.c device->ldev->md.uuid[UI_BITMAP] == 0 && ns.disk >= D_UP_TO_DATE) { ldev 1831 drivers/block/drbd/drbd_state.c device->ldev->md.uuid[UI_BITMAP] == 0 && ns.disk >= D_UP_TO_DATE) { ldev 1901 drivers/block/drbd/drbd_state.c if (device->ldev) { ldev 1903 drivers/block/drbd/drbd_state.c eh = rcu_dereference(device->ldev->disk_conf)->on_io_error; ldev 57 drivers/block/drbd/drbd_worker.c if (device->ldev) ldev 512 drivers/block/drbd/drbd_worker.c dc = rcu_dereference(device->ldev->disk_conf); ldev 566 drivers/block/drbd/drbd_worker.c device->c_sync_rate = rcu_dereference(device->ldev->disk_conf)->resync_rate; ldev 620 drivers/block/drbd/drbd_worker.c discard_granularity = rcu_dereference(device->ldev->disk_conf)->rs_discard_granularity; ldev 965 drivers/block/drbd/drbd_worker.c drbd_uuid_set(device, UI_BITMAP, device->ldev->md.uuid[UI_CURRENT]); ldev 982 drivers/block/drbd/drbd_worker.c device->p_uuid[i] = device->ldev->md.uuid[i]; ldev 999 drivers/block/drbd/drbd_worker.c fp = rcu_dereference(device->ldev->disk_conf)->fencing; ldev 1526 drivers/block/drbd/drbd_worker.c bio_set_dev(req->private_bio, device->ldev->backing_bdev); ldev 1538 drivers/block/drbd/drbd_worker.c if (!odev->ldev || odev->state.disk == D_DISKLESS) ldev 1541 drivers/block/drbd/drbd_worker.c resync_after = rcu_dereference(odev->ldev->disk_conf)->resync_after; ldev 1646 drivers/block/drbd/drbd_worker.c if (!odev || !odev->ldev || odev->state.disk == D_DISKLESS) ldev 1650 drivers/block/drbd/drbd_worker.c resync_after = rcu_dereference(odev->ldev->disk_conf)->resync_after; ldev 1674 drivers/block/drbd/drbd_worker.c struct gendisk *disk = device->ldev->backing_bdev->bd_contains->bd_disk; ldev 1938 drivers/block/drbd/drbd_worker.c drbd_backing_dev_free(device, device->ldev); ldev 1939 drivers/block/drbd/drbd_worker.c device->ldev = NULL; ldev 1967 drivers/block/drbd/drbd_worker.c if (device->bitmap && device->ldev) { ldev 52 drivers/cpuidle/governors/ladder.c struct ladder_device *ldev, ldev 55 drivers/cpuidle/governors/ladder.c ldev->states[old_idx].stats.promotion_count = 0; ldev 56 drivers/cpuidle/governors/ladder.c ldev->states[old_idx].stats.demotion_count = 0; ldev 69 drivers/cpuidle/governors/ladder.c struct ladder_device *ldev = this_cpu_ptr(&ladder_devices); ldev 77 drivers/cpuidle/governors/ladder.c ladder_do_selection(dev, ldev, last_idx, 0); ldev 81 drivers/cpuidle/governors/ladder.c last_state = &ldev->states[last_idx]; ldev 94 drivers/cpuidle/governors/ladder.c ladder_do_selection(dev, ldev, last_idx, last_idx + 1); ldev 110 drivers/cpuidle/governors/ladder.c ladder_do_selection(dev, ldev, last_idx, i); ldev 119 drivers/cpuidle/governors/ladder.c ladder_do_selection(dev, ldev, last_idx, last_idx - 1); ldev 138 drivers/cpuidle/governors/ladder.c struct ladder_device *ldev = &per_cpu(ladder_devices, dev->cpu); ldev 146 drivers/cpuidle/governors/ladder.c lstate = &ldev->states[i]; ldev 202 drivers/gpu/drm/lima/lima_device.c static void lima_fini_ip(struct lima_device *ldev, int index) ldev 205 drivers/gpu/drm/lima/lima_device.c struct lima_ip *ip = ldev->ip + index; ldev 290 drivers/gpu/drm/lima/lima_device.c int lima_device_init(struct lima_device *ldev) ldev 295 drivers/gpu/drm/lima/lima_device.c dma_set_coherent_mask(ldev->dev, DMA_BIT_MASK(32)); ldev 297 drivers/gpu/drm/lima/lima_device.c err = lima_clk_init(ldev); ldev 301 drivers/gpu/drm/lima/lima_device.c err = lima_regulator_init(ldev); ldev 305 drivers/gpu/drm/lima/lima_device.c ldev->empty_vm = lima_vm_create(ldev); ldev 306 drivers/gpu/drm/lima/lima_device.c if (!ldev->empty_vm) { ldev 311 drivers/gpu/drm/lima/lima_device.c ldev->va_start = 0; ldev 312 drivers/gpu/drm/lima/lima_device.c if (ldev->id == lima_gpu_mali450) { ldev 313 drivers/gpu/drm/lima/lima_device.c ldev->va_end = LIMA_VA_RESERVE_START; ldev 314 drivers/gpu/drm/lima/lima_device.c ldev->dlbu_cpu = dma_alloc_wc( ldev 315 drivers/gpu/drm/lima/lima_device.c ldev->dev, LIMA_PAGE_SIZE, ldev 316 drivers/gpu/drm/lima/lima_device.c &ldev->dlbu_dma, GFP_KERNEL); ldev 317 drivers/gpu/drm/lima/lima_device.c if (!ldev->dlbu_cpu) { ldev 322 drivers/gpu/drm/lima/lima_device.c ldev->va_end = LIMA_VA_RESERVE_END; ldev 324 drivers/gpu/drm/lima/lima_device.c res = platform_get_resource(ldev->pdev, IORESOURCE_MEM, 0); ldev 325 drivers/gpu/drm/lima/lima_device.c ldev->iomem = devm_ioremap_resource(ldev->dev, res); ldev 326 drivers/gpu/drm/lima/lima_device.c if (IS_ERR(ldev->iomem)) { ldev 327 drivers/gpu/drm/lima/lima_device.c dev_err(ldev->dev, "fail to ioremap iomem\n"); ldev 328 drivers/gpu/drm/lima/lima_device.c err = PTR_ERR(ldev->iomem); ldev 333 drivers/gpu/drm/lima/lima_device.c err = lima_init_ip(ldev, i); ldev 338 drivers/gpu/drm/lima/lima_device.c err = lima_init_gp_pipe(ldev); ldev 342 drivers/gpu/drm/lima/lima_device.c err = lima_init_pp_pipe(ldev); ldev 346 drivers/gpu/drm/lima/lima_device.c dev_info(ldev->dev, "bus rate = %lu\n", clk_get_rate(ldev->clk_bus)); ldev 347 drivers/gpu/drm/lima/lima_device.c dev_info(ldev->dev, "mod rate = %lu", clk_get_rate(ldev->clk_gpu)); ldev 352 drivers/gpu/drm/lima/lima_device.c lima_fini_gp_pipe(ldev); ldev 355 drivers/gpu/drm/lima/lima_device.c lima_fini_ip(ldev, i); ldev 357 drivers/gpu/drm/lima/lima_device.c if (ldev->dlbu_cpu) ldev 358 drivers/gpu/drm/lima/lima_device.c dma_free_wc(ldev->dev, LIMA_PAGE_SIZE, ldev 359 drivers/gpu/drm/lima/lima_device.c ldev->dlbu_cpu, ldev->dlbu_dma); ldev 361 drivers/gpu/drm/lima/lima_device.c lima_vm_put(ldev->empty_vm); ldev 363 drivers/gpu/drm/lima/lima_device.c lima_regulator_fini(ldev); ldev 365 drivers/gpu/drm/lima/lima_device.c lima_clk_fini(ldev); ldev 369 drivers/gpu/drm/lima/lima_device.c void lima_device_fini(struct lima_device *ldev) ldev 373 drivers/gpu/drm/lima/lima_device.c lima_fini_pp_pipe(ldev); ldev 374 drivers/gpu/drm/lima/lima_device.c lima_fini_gp_pipe(ldev); ldev 377 drivers/gpu/drm/lima/lima_device.c lima_fini_ip(ldev, i); ldev 379 drivers/gpu/drm/lima/lima_device.c if (ldev->dlbu_cpu) ldev 380 drivers/gpu/drm/lima/lima_device.c dma_free_wc(ldev->dev, LIMA_PAGE_SIZE, ldev 381 drivers/gpu/drm/lima/lima_device.c ldev->dlbu_cpu, ldev->dlbu_dma); ldev 383 drivers/gpu/drm/lima/lima_device.c lima_vm_put(ldev->empty_vm); ldev 385 drivers/gpu/drm/lima/lima_device.c lima_regulator_fini(ldev); ldev 387 drivers/gpu/drm/lima/lima_device.c lima_clk_fini(ldev); ldev 105 drivers/gpu/drm/lima/lima_device.h int lima_device_init(struct lima_device *ldev); ldev 106 drivers/gpu/drm/lima/lima_device.h void lima_device_fini(struct lima_device *ldev); ldev 26 drivers/gpu/drm/lima/lima_drv.c struct lima_device *ldev = to_lima_dev(dev); ldev 33 drivers/gpu/drm/lima/lima_drv.c switch (ldev->id) { ldev 47 drivers/gpu/drm/lima/lima_drv.c args->value = ldev->pipe[lima_pipe_pp].num_processor; ldev 51 drivers/gpu/drm/lima/lima_drv.c args->value = ldev->gp_version; ldev 55 drivers/gpu/drm/lima/lima_drv.c args->value = ldev->pp_version; ldev 91 drivers/gpu/drm/lima/lima_drv.c struct lima_device *ldev = to_lima_dev(dev); ldev 107 drivers/gpu/drm/lima/lima_drv.c pipe = ldev->pipe + args->pipe; ldev 179 drivers/gpu/drm/lima/lima_drv.c struct lima_device *ldev = to_lima_dev(dev); ldev 184 drivers/gpu/drm/lima/lima_drv.c return lima_ctx_create(ldev, &priv->ctx_mgr, &args->id); ldev 202 drivers/gpu/drm/lima/lima_drv.c struct lima_device *ldev = to_lima_dev(dev); ldev 208 drivers/gpu/drm/lima/lima_drv.c priv->vm = lima_vm_create(ldev); ldev 281 drivers/gpu/drm/lima/lima_drv.c struct lima_device *ldev; ldev 289 drivers/gpu/drm/lima/lima_drv.c ldev = devm_kzalloc(&pdev->dev, sizeof(*ldev), GFP_KERNEL); ldev 290 drivers/gpu/drm/lima/lima_drv.c if (!ldev) { ldev 295 drivers/gpu/drm/lima/lima_drv.c ldev->pdev = pdev; ldev 296 drivers/gpu/drm/lima/lima_drv.c ldev->dev = &pdev->dev; ldev 297 drivers/gpu/drm/lima/lima_drv.c ldev->id = (enum lima_gpu_id)of_device_get_match_data(&pdev->dev); ldev 299 drivers/gpu/drm/lima/lima_drv.c platform_set_drvdata(pdev, ldev); ldev 306 drivers/gpu/drm/lima/lima_drv.c ddev->dev_private = ldev; ldev 307 drivers/gpu/drm/lima/lima_drv.c ldev->ddev = ddev; ldev 309 drivers/gpu/drm/lima/lima_drv.c err = lima_device_init(ldev); ldev 324 drivers/gpu/drm/lima/lima_drv.c lima_device_fini(ldev); ldev 334 drivers/gpu/drm/lima/lima_drv.c struct lima_device *ldev = platform_get_drvdata(pdev); ldev 335 drivers/gpu/drm/lima/lima_drv.c struct drm_device *ddev = ldev->ddev; ldev 338 drivers/gpu/drm/lima/lima_drv.c lima_device_fini(ldev); ldev 25 drivers/gpu/drm/lima/lima_gem.c struct lima_device *ldev = to_lima_dev(dev); ldev 27 drivers/gpu/drm/lima/lima_gem.c bo = lima_bo_create(ldev, size, flags, NULL); ldev 18 drivers/gpu/drm/lima/lima_gem_prime.c struct lima_device *ldev = to_lima_dev(dev); ldev 21 drivers/gpu/drm/lima/lima_gem_prime.c bo = lima_bo_create(ldev, attach->dmabuf->size, 0, sgt); ldev 82 drivers/gpu/drm/stm/drv.c struct ltdc_device *ldev; ldev 87 drivers/gpu/drm/stm/drv.c ldev = devm_kzalloc(ddev->dev, sizeof(*ldev), GFP_KERNEL); ldev 88 drivers/gpu/drm/stm/drv.c if (!ldev) ldev 91 drivers/gpu/drm/stm/drv.c ddev->dev_private = (void *)ldev; ldev 133 drivers/gpu/drm/stm/drv.c struct ltdc_device *ldev = ddev->dev_private; ldev 136 drivers/gpu/drm/stm/drv.c WARN_ON(ldev->suspend_state); ldev 142 drivers/gpu/drm/stm/drv.c ldev->suspend_state = state; ldev 151 drivers/gpu/drm/stm/drv.c struct ltdc_device *ldev = ddev->dev_private; ldev 154 drivers/gpu/drm/stm/drv.c if (WARN_ON(!ldev->suspend_state)) ldev 158 drivers/gpu/drm/stm/drv.c ret = drm_atomic_helper_resume(ddev, ldev->suspend_state); ldev 162 drivers/gpu/drm/stm/drv.c ldev->suspend_state = NULL; ldev 56 drivers/gpu/drm/stm/ltdc.c #define REG_OFS (ldev->caps.reg_ofs) ldev 370 drivers/gpu/drm/stm/ltdc.c struct ltdc_device *ldev = ddev->dev_private; ldev 374 drivers/gpu/drm/stm/ltdc.c if (ldev->irq_status & ISR_LIF) ldev 378 drivers/gpu/drm/stm/ltdc.c mutex_lock(&ldev->err_lock); ldev 379 drivers/gpu/drm/stm/ltdc.c if (ldev->irq_status & ISR_FUIF) ldev 380 drivers/gpu/drm/stm/ltdc.c ldev->error_status |= ISR_FUIF; ldev 381 drivers/gpu/drm/stm/ltdc.c if (ldev->irq_status & ISR_TERRIF) ldev 382 drivers/gpu/drm/stm/ltdc.c ldev->error_status |= ISR_TERRIF; ldev 383 drivers/gpu/drm/stm/ltdc.c mutex_unlock(&ldev->err_lock); ldev 391 drivers/gpu/drm/stm/ltdc.c struct ltdc_device *ldev = ddev->dev_private; ldev 394 drivers/gpu/drm/stm/ltdc.c ldev->irq_status = reg_read(ldev->regs, LTDC_ISR); ldev 395 drivers/gpu/drm/stm/ltdc.c reg_write(ldev->regs, LTDC_ICR, ldev->irq_status); ldev 406 drivers/gpu/drm/stm/ltdc.c struct ltdc_device *ldev = crtc_to_ltdc(crtc); ldev 419 drivers/gpu/drm/stm/ltdc.c reg_write(ldev->regs, LTDC_L1CLUTWR, val); ldev 426 drivers/gpu/drm/stm/ltdc.c struct ltdc_device *ldev = crtc_to_ltdc(crtc); ldev 431 drivers/gpu/drm/stm/ltdc.c reg_write(ldev->regs, LTDC_BCCR, BCCR_BCBLACK); ldev 434 drivers/gpu/drm/stm/ltdc.c reg_set(ldev->regs, LTDC_IER, IER_RRIE | IER_FUIE | IER_TERRIE); ldev 437 drivers/gpu/drm/stm/ltdc.c reg_set(ldev->regs, LTDC_SRCR, SRCR_VBR); ldev 440 drivers/gpu/drm/stm/ltdc.c reg_set(ldev->regs, LTDC_GCR, GCR_LTDCEN); ldev 448 drivers/gpu/drm/stm/ltdc.c struct ltdc_device *ldev = crtc_to_ltdc(crtc); ldev 456 drivers/gpu/drm/stm/ltdc.c reg_clear(ldev->regs, LTDC_GCR, GCR_LTDCEN); ldev 459 drivers/gpu/drm/stm/ltdc.c reg_clear(ldev->regs, LTDC_IER, IER_RRIE | IER_FUIE | IER_TERRIE); ldev 462 drivers/gpu/drm/stm/ltdc.c reg_set(ldev->regs, LTDC_SRCR, SRCR_IMR); ldev 473 drivers/gpu/drm/stm/ltdc.c struct ltdc_device *ldev = crtc_to_ltdc(crtc); ldev 479 drivers/gpu/drm/stm/ltdc.c result = clk_round_rate(ldev->pixel_clk, target); ldev 484 drivers/gpu/drm/stm/ltdc.c if (result > ldev->caps.pad_max_freq_hz) ldev 512 drivers/gpu/drm/stm/ltdc.c struct ltdc_device *ldev = crtc_to_ltdc(crtc); ldev 523 drivers/gpu/drm/stm/ltdc.c if (clk_set_rate(ldev->pixel_clk, rate) < 0) { ldev 528 drivers/gpu/drm/stm/ltdc.c adjusted_mode->clock = clk_get_rate(ldev->pixel_clk) / 1000; ldev 546 drivers/gpu/drm/stm/ltdc.c struct ltdc_device *ldev = crtc_to_ltdc(crtc); ldev 596 drivers/gpu/drm/stm/ltdc.c reg_update_bits(ldev->regs, LTDC_GCR, ldev 601 drivers/gpu/drm/stm/ltdc.c reg_update_bits(ldev->regs, LTDC_SSCR, SSCR_VSH | SSCR_HSW, val); ldev 605 drivers/gpu/drm/stm/ltdc.c reg_update_bits(ldev->regs, LTDC_BPCR, BPCR_AVBP | BPCR_AHBP, val); ldev 609 drivers/gpu/drm/stm/ltdc.c reg_update_bits(ldev->regs, LTDC_AWCR, AWCR_AAW | AWCR_AAH, val); ldev 613 drivers/gpu/drm/stm/ltdc.c reg_update_bits(ldev->regs, LTDC_TWCR, TWCR_TOTALH | TWCR_TOTALW, val); ldev 615 drivers/gpu/drm/stm/ltdc.c reg_write(ldev->regs, LTDC_LIPCR, (accum_act_h + 1)); ldev 621 drivers/gpu/drm/stm/ltdc.c struct ltdc_device *ldev = crtc_to_ltdc(crtc); ldev 630 drivers/gpu/drm/stm/ltdc.c reg_set(ldev->regs, LTDC_SRCR, SRCR_VBR); ldev 655 drivers/gpu/drm/stm/ltdc.c struct ltdc_device *ldev = crtc_to_ltdc(crtc); ldev 658 drivers/gpu/drm/stm/ltdc.c reg_set(ldev->regs, LTDC_IER, IER_LIE); ldev 665 drivers/gpu/drm/stm/ltdc.c struct ltdc_device *ldev = crtc_to_ltdc(crtc); ldev 668 drivers/gpu/drm/stm/ltdc.c reg_clear(ldev->regs, LTDC_IER, IER_LIE); ldev 676 drivers/gpu/drm/stm/ltdc.c struct ltdc_device *ldev = ddev->dev_private; ldev 697 drivers/gpu/drm/stm/ltdc.c line = reg_read(ldev->regs, LTDC_CPSR) & CPSR_CYPOS; ldev 698 drivers/gpu/drm/stm/ltdc.c vactive_start = reg_read(ldev->regs, LTDC_BPCR) & BPCR_AVBP; ldev 699 drivers/gpu/drm/stm/ltdc.c vactive_end = reg_read(ldev->regs, LTDC_AWCR) & AWCR_AAH; ldev 700 drivers/gpu/drm/stm/ltdc.c vtotal = reg_read(ldev->regs, LTDC_TWCR) & TWCR_TOTALH; ldev 761 drivers/gpu/drm/stm/ltdc.c struct ltdc_device *ldev = plane_to_ltdc(plane); ldev 790 drivers/gpu/drm/stm/ltdc.c bpcr = reg_read(ldev->regs, LTDC_BPCR); ldev 796 drivers/gpu/drm/stm/ltdc.c reg_update_bits(ldev->regs, LTDC_L1WHPCR + lofs, ldev 801 drivers/gpu/drm/stm/ltdc.c reg_update_bits(ldev->regs, LTDC_L1WVPCR + lofs, ldev 807 drivers/gpu/drm/stm/ltdc.c if (ldev->caps.pix_fmt_hw[val] == pf) ldev 815 drivers/gpu/drm/stm/ltdc.c reg_update_bits(ldev->regs, LTDC_L1PFCR + lofs, LXPFCR_PF, val); ldev 820 drivers/gpu/drm/stm/ltdc.c (x1 - x0 + 1) + (ldev->caps.bus_width >> 3) - 1; ldev 822 drivers/gpu/drm/stm/ltdc.c reg_update_bits(ldev->regs, LTDC_L1CFBLR + lofs, ldev 827 drivers/gpu/drm/stm/ltdc.c reg_update_bits(ldev->regs, LTDC_L1CACR + lofs, LXCACR_CONSTA, val); ldev 835 drivers/gpu/drm/stm/ltdc.c if (ldev->caps.non_alpha_only_l1 && ldev 839 drivers/gpu/drm/stm/ltdc.c reg_update_bits(ldev->regs, LTDC_L1BFCR + lofs, ldev 844 drivers/gpu/drm/stm/ltdc.c reg_update_bits(ldev->regs, LTDC_L1CFBLNR + lofs, LXCFBLNR_CFBLN, val); ldev 850 drivers/gpu/drm/stm/ltdc.c reg_write(ldev->regs, LTDC_L1CFBAR + lofs, paddr); ldev 855 drivers/gpu/drm/stm/ltdc.c reg_update_bits(ldev->regs, LTDC_L1CR + lofs, ldev 858 drivers/gpu/drm/stm/ltdc.c ldev->plane_fpsi[plane->index].counter++; ldev 860 drivers/gpu/drm/stm/ltdc.c mutex_lock(&ldev->err_lock); ldev 861 drivers/gpu/drm/stm/ltdc.c if (ldev->error_status & ISR_FUIF) { ldev 863 drivers/gpu/drm/stm/ltdc.c ldev->error_status &= ~ISR_FUIF; ldev 865 drivers/gpu/drm/stm/ltdc.c if (ldev->error_status & ISR_TERRIF) { ldev 867 drivers/gpu/drm/stm/ltdc.c ldev->error_status &= ~ISR_TERRIF; ldev 869 drivers/gpu/drm/stm/ltdc.c mutex_unlock(&ldev->err_lock); ldev 875 drivers/gpu/drm/stm/ltdc.c struct ltdc_device *ldev = plane_to_ltdc(plane); ldev 879 drivers/gpu/drm/stm/ltdc.c reg_clear(ldev->regs, LTDC_L1CR + lofs, LXCR_LEN); ldev 889 drivers/gpu/drm/stm/ltdc.c struct ltdc_device *ldev = plane_to_ltdc(plane); ldev 890 drivers/gpu/drm/stm/ltdc.c struct fps_info *fpsi = &ldev->plane_fpsi[plane->index]; ldev 936 drivers/gpu/drm/stm/ltdc.c struct ltdc_device *ldev = ddev->dev_private; ldev 947 drivers/gpu/drm/stm/ltdc.c drm_fmt = to_drm_pixelformat(ldev->caps.pix_fmt_hw[i]); ldev 958 drivers/gpu/drm/stm/ltdc.c if (ldev->caps.non_alpha_only_l1 && ldev 993 drivers/gpu/drm/stm/ltdc.c struct ltdc_device *ldev = ddev->dev_private; ldev 1019 drivers/gpu/drm/stm/ltdc.c for (i = 1; i < ldev->caps.nb_layers; i++) { ldev 1071 drivers/gpu/drm/stm/ltdc.c struct ltdc_device *ldev = ddev->dev_private; ldev 1078 drivers/gpu/drm/stm/ltdc.c lcr = reg_read(ldev->regs, LTDC_LCR); ldev 1080 drivers/gpu/drm/stm/ltdc.c ldev->caps.nb_layers = clamp((int)lcr, 1, LTDC_MAX_LAYER); ldev 1083 drivers/gpu/drm/stm/ltdc.c gc2r = reg_read(ldev->regs, LTDC_GC2R); ldev 1085 drivers/gpu/drm/stm/ltdc.c ldev->caps.bus_width = 8 << bus_width_log2; ldev 1086 drivers/gpu/drm/stm/ltdc.c ldev->caps.hw_version = reg_read(ldev->regs, LTDC_IDR); ldev 1088 drivers/gpu/drm/stm/ltdc.c switch (ldev->caps.hw_version) { ldev 1091 drivers/gpu/drm/stm/ltdc.c ldev->caps.reg_ofs = REG_OFS_NONE; ldev 1092 drivers/gpu/drm/stm/ltdc.c ldev->caps.pix_fmt_hw = ltdc_pix_fmt_a0; ldev 1100 drivers/gpu/drm/stm/ltdc.c ldev->caps.non_alpha_only_l1 = true; ldev 1101 drivers/gpu/drm/stm/ltdc.c ldev->caps.pad_max_freq_hz = 90000000; ldev 1102 drivers/gpu/drm/stm/ltdc.c if (ldev->caps.hw_version == HWVER_10200) ldev 1103 drivers/gpu/drm/stm/ltdc.c ldev->caps.pad_max_freq_hz = 65000000; ldev 1106 drivers/gpu/drm/stm/ltdc.c ldev->caps.reg_ofs = REG_OFS_4; ldev 1107 drivers/gpu/drm/stm/ltdc.c ldev->caps.pix_fmt_hw = ltdc_pix_fmt_a1; ldev 1108 drivers/gpu/drm/stm/ltdc.c ldev->caps.non_alpha_only_l1 = false; ldev 1109 drivers/gpu/drm/stm/ltdc.c ldev->caps.pad_max_freq_hz = 150000000; ldev 1120 drivers/gpu/drm/stm/ltdc.c struct ltdc_device *ldev = ddev->dev_private; ldev 1123 drivers/gpu/drm/stm/ltdc.c clk_disable_unprepare(ldev->pixel_clk); ldev 1128 drivers/gpu/drm/stm/ltdc.c struct ltdc_device *ldev = ddev->dev_private; ldev 1133 drivers/gpu/drm/stm/ltdc.c ret = clk_prepare_enable(ldev->pixel_clk); ldev 1145 drivers/gpu/drm/stm/ltdc.c struct ltdc_device *ldev = ddev->dev_private; ldev 1177 drivers/gpu/drm/stm/ltdc.c mutex_init(&ldev->err_lock); ldev 1179 drivers/gpu/drm/stm/ltdc.c ldev->pixel_clk = devm_clk_get(dev, "lcd"); ldev 1180 drivers/gpu/drm/stm/ltdc.c if (IS_ERR(ldev->pixel_clk)) { ldev 1181 drivers/gpu/drm/stm/ltdc.c if (PTR_ERR(ldev->pixel_clk) != -EPROBE_DEFER) ldev 1183 drivers/gpu/drm/stm/ltdc.c return PTR_ERR(ldev->pixel_clk); ldev 1186 drivers/gpu/drm/stm/ltdc.c if (clk_prepare_enable(ldev->pixel_clk)) { ldev 1198 drivers/gpu/drm/stm/ltdc.c ldev->regs = devm_ioremap_resource(dev, res); ldev 1199 drivers/gpu/drm/stm/ltdc.c if (IS_ERR(ldev->regs)) { ldev 1201 drivers/gpu/drm/stm/ltdc.c ret = PTR_ERR(ldev->regs); ldev 1206 drivers/gpu/drm/stm/ltdc.c reg_clear(ldev->regs, LTDC_IER, ldev 1230 drivers/gpu/drm/stm/ltdc.c ldev->caps.hw_version); ldev 1234 drivers/gpu/drm/stm/ltdc.c DRM_DEBUG_DRIVER("ltdc hw version 0x%08x\n", ldev->caps.hw_version); ldev 1281 drivers/gpu/drm/stm/ltdc.c clk_disable_unprepare(ldev->pixel_clk); ldev 1290 drivers/gpu/drm/stm/ltdc.c clk_disable_unprepare(ldev->pixel_clk); ldev 78 drivers/hid/hid-led.c int (*init)(struct hidled_device *ldev); ldev 89 drivers/hid/hid-led.c struct hidled_device *ldev; ldev 113 drivers/hid/hid-led.c static int hidled_send(struct hidled_device *ldev, __u8 *buf) ldev 117 drivers/hid/hid-led.c mutex_lock(&ldev->lock); ldev 123 drivers/hid/hid-led.c memcpy(ldev->buf, buf, ldev->config->report_size); ldev 125 drivers/hid/hid-led.c if (ldev->config->report_type == RAW_REQUEST) ldev 126 drivers/hid/hid-led.c ret = hid_hw_raw_request(ldev->hdev, buf[0], ldev->buf, ldev 127 drivers/hid/hid-led.c ldev->config->report_size, ldev 130 drivers/hid/hid-led.c else if (ldev->config->report_type == OUTPUT_REPORT) ldev 131 drivers/hid/hid-led.c ret = hid_hw_output_report(ldev->hdev, ldev->buf, ldev 132 drivers/hid/hid-led.c ldev->config->report_size); ldev 136 drivers/hid/hid-led.c mutex_unlock(&ldev->lock); ldev 141 drivers/hid/hid-led.c return ret == ldev->config->report_size ? 0 : -EMSGSIZE; ldev 145 drivers/hid/hid-led.c static int hidled_recv(struct hidled_device *ldev, __u8 *buf) ldev 149 drivers/hid/hid-led.c if (ldev->config->report_type != RAW_REQUEST) ldev 152 drivers/hid/hid-led.c mutex_lock(&ldev->lock); ldev 154 drivers/hid/hid-led.c memcpy(ldev->buf, buf, ldev->config->report_size); ldev 156 drivers/hid/hid-led.c ret = hid_hw_raw_request(ldev->hdev, buf[0], ldev->buf, ldev 157 drivers/hid/hid-led.c ldev->config->report_size, ldev 163 drivers/hid/hid-led.c ret = hid_hw_raw_request(ldev->hdev, buf[0], ldev->buf, ldev 164 drivers/hid/hid-led.c ldev->config->report_size, ldev 168 drivers/hid/hid-led.c memcpy(buf, ldev->buf, ldev->config->report_size); ldev 170 drivers/hid/hid-led.c mutex_unlock(&ldev->lock); ldev 197 drivers/hid/hid-led.c return hidled_send(rgb->ldev, buf); ldev 212 drivers/hid/hid-led.c return hidled_send(rgb->ldev, buf); ldev 215 drivers/hid/hid-led.c static int dream_cheeky_init(struct hidled_device *ldev) ldev 226 drivers/hid/hid-led.c return hidled_send(ldev, buf); ldev 240 drivers/hid/hid-led.c return hidled_send(led->rgb->ldev, buf); ldev 263 drivers/hid/hid-led.c static int thingm_init(struct hidled_device *ldev) ldev 268 drivers/hid/hid-led.c ret = hidled_recv(ldev, buf); ldev 274 drivers/hid/hid-led.c ldev->config = &hidled_config_thingm_v1; ldev 296 drivers/hid/hid-led.c return hidled_send(led->rgb->ldev, dp.data); ldev 306 drivers/hid/hid-led.c return hidled_send(led->rgb->ldev, dp.data); ldev 326 drivers/hid/hid-led.c static int delcom_init(struct hidled_device *ldev) ldev 331 drivers/hid/hid-led.c ret = hidled_recv(ldev, dp.data); ldev 351 drivers/hid/hid-led.c return hidled_send(led->rgb->ldev, buf); ldev 413 drivers/hid/hid-led.c const struct hidled_config *config = rgb->ldev->config; ldev 427 drivers/hid/hid-led.c return devm_led_classdev_register(&rgb->ldev->hdev->dev, &led->cdev); ldev 450 drivers/hid/hid-led.c struct hidled_device *ldev; ldev 454 drivers/hid/hid-led.c ldev = devm_kzalloc(&hdev->dev, sizeof(*ldev), GFP_KERNEL); ldev 455 drivers/hid/hid-led.c if (!ldev) ldev 458 drivers/hid/hid-led.c ldev->buf = devm_kmalloc(&hdev->dev, MAX_REPORT_SIZE, GFP_KERNEL); ldev 459 drivers/hid/hid-led.c if (!ldev->buf) ldev 466 drivers/hid/hid-led.c ldev->hdev = hdev; ldev 467 drivers/hid/hid-led.c mutex_init(&ldev->lock); ldev 469 drivers/hid/hid-led.c for (i = 0; !ldev->config && i < ARRAY_SIZE(hidled_configs); i++) ldev 471 drivers/hid/hid-led.c ldev->config = &hidled_configs[i]; ldev 473 drivers/hid/hid-led.c if (!ldev->config) ldev 476 drivers/hid/hid-led.c if (ldev->config->init) { ldev 477 drivers/hid/hid-led.c ret = ldev->config->init(ldev); ldev 482 drivers/hid/hid-led.c ldev->rgb = devm_kcalloc(&hdev->dev, ldev->config->num_leds, ldev 484 drivers/hid/hid-led.c if (!ldev->rgb) ldev 493 drivers/hid/hid-led.c for (i = 0; i < ldev->config->num_leds; i++) { ldev 494 drivers/hid/hid-led.c ldev->rgb[i].ldev = ldev; ldev 495 drivers/hid/hid-led.c ldev->rgb[i].num = i; ldev 496 drivers/hid/hid-led.c ret = hidled_init_rgb(&ldev->rgb[i], minor); ldev 503 drivers/hid/hid-led.c hid_info(hdev, "%s initialized\n", ldev->config->name); ldev 20 drivers/hid/hid-picolcd_lcd.c static int picolcd_get_contrast(struct lcd_device *ldev) ldev 22 drivers/hid/hid-picolcd_lcd.c struct picolcd_data *data = lcd_get_data(ldev); ldev 26 drivers/hid/hid-picolcd_lcd.c static int picolcd_set_contrast(struct lcd_device *ldev, int contrast) ldev 28 drivers/hid/hid-picolcd_lcd.c struct picolcd_data *data = lcd_get_data(ldev); ldev 44 drivers/hid/hid-picolcd_lcd.c static int picolcd_check_lcd_fb(struct lcd_device *ldev, struct fb_info *fb) ldev 46 drivers/hid/hid-picolcd_lcd.c return fb && fb == picolcd_fbinfo((struct picolcd_data *)lcd_get_data(ldev)); ldev 58 drivers/hid/hid-picolcd_lcd.c struct lcd_device *ldev; ldev 68 drivers/hid/hid-picolcd_lcd.c ldev = lcd_device_register(dev_name(dev), dev, data, &picolcd_lcdops); ldev 69 drivers/hid/hid-picolcd_lcd.c if (IS_ERR(ldev)) { ldev 71 drivers/hid/hid-picolcd_lcd.c return PTR_ERR(ldev); ldev 73 drivers/hid/hid-picolcd_lcd.c ldev->props.max_contrast = 0x0ff; ldev 75 drivers/hid/hid-picolcd_lcd.c data->lcd = ldev; ldev 76 drivers/hid/hid-picolcd_lcd.c picolcd_set_contrast(ldev, 0xe5); ldev 82 drivers/hid/hid-picolcd_lcd.c struct lcd_device *ldev = data->lcd; ldev 85 drivers/hid/hid-picolcd_lcd.c lcd_device_unregister(ldev); ldev 62 drivers/hid/hid-u2fzero.c struct led_classdev ldev; /* Embedded struct for led */ ldev 149 drivers/hid/hid-u2fzero.c static int u2fzero_blink(struct led_classdev *ldev) ldev 151 drivers/hid/hid-u2fzero.c struct u2fzero_device *dev = container_of(ldev, ldev 152 drivers/hid/hid-u2fzero.c struct u2fzero_device, ldev); ldev 166 drivers/hid/hid-u2fzero.c static int u2fzero_brightness_set(struct led_classdev *ldev, ldev 169 drivers/hid/hid-u2fzero.c ldev->brightness = LED_OFF; ldev 171 drivers/hid/hid-u2fzero.c return u2fzero_blink(ldev); ldev 221 drivers/hid/hid-u2fzero.c dev->ldev.name = dev->led_name; ldev 222 drivers/hid/hid-u2fzero.c dev->ldev.max_brightness = LED_ON; ldev 223 drivers/hid/hid-u2fzero.c dev->ldev.flags = LED_HW_PLUGGABLE; ldev 224 drivers/hid/hid-u2fzero.c dev->ldev.brightness_set_blocking = u2fzero_brightness_set; ldev 226 drivers/hid/hid-u2fzero.c return devm_led_classdev_register(&dev->hdev->dev, &dev->ldev); ldev 238 drivers/infiniband/hw/i40iw/i40iw.h struct i40e_info *ldev; ldev 325 drivers/infiniband/hw/i40iw/i40iw.h struct i40e_info ldev; ldev 110 drivers/infiniband/hw/i40iw/i40iw_main.c static struct i40iw_handler *i40iw_find_i40e_handler(struct i40e_info *ldev) ldev 117 drivers/infiniband/hw/i40iw/i40iw_main.c if (hdl->ldev.netdev == ldev->netdev) { ldev 137 drivers/infiniband/hw/i40iw/i40iw_main.c if (hdl->ldev.netdev == netdev) { ldev 759 drivers/infiniband/hw/i40iw/i40iw_main.c struct i40e_info *ldev = iwdev->ldev; ldev 761 drivers/infiniband/hw/i40iw/i40iw_main.c ldev->ops->request_reset(ldev, iwdev->client, 1); ldev 774 drivers/infiniband/hw/i40iw/i40iw_main.c struct i40e_info *ldev) ldev 783 drivers/infiniband/hw/i40iw/i40iw_main.c if (ldev && ldev->ops && ldev->ops->setup_qvlist) { ldev 784 drivers/infiniband/hw/i40iw/i40iw_main.c status = ldev->ops->setup_qvlist(ldev, &i40iw_client, ldev 1312 drivers/infiniband/hw/i40iw/i40iw_main.c struct i40e_info *ldev) ldev 1349 drivers/infiniband/hw/i40iw/i40iw_main.c info.hmc_fn_id = ldev->fid; ldev 1350 drivers/infiniband/hw/i40iw/i40iw_main.c info.is_pf = (ldev->ftype) ? false : true; ldev 1351 drivers/infiniband/hw/i40iw/i40iw_main.c info.bar0 = ldev->hw_addr; ldev 1355 drivers/infiniband/hw/i40iw/i40iw_main.c (ldev->params.mtu) ? ldev->params.mtu : I40IW_DEFAULT_MTU; ldev 1357 drivers/infiniband/hw/i40iw/i40iw_main.c qset = ldev->params.qos.prio_qos[i].qs_handle; ldev 1379 drivers/infiniband/hw/i40iw/i40iw_main.c stats_info.fcn_id = ldev->fid; ldev 1428 drivers/infiniband/hw/i40iw/i40iw_main.c struct i40e_info *ldev) ldev 1436 drivers/infiniband/hw/i40iw/i40iw_main.c if (!ldev->msix_count) { ldev 1441 drivers/infiniband/hw/i40iw/i40iw_main.c iwdev->msix_count = ldev->msix_count; ldev 1457 drivers/infiniband/hw/i40iw/i40iw_main.c iwdev->iw_msixtbl[i].idx = ldev->msix_entries[i].entry; ldev 1458 drivers/infiniband/hw/i40iw/i40iw_main.c iwdev->iw_msixtbl[i].irq = ldev->msix_entries[i].vector; ldev 1485 drivers/infiniband/hw/i40iw/i40iw_main.c struct i40e_info *ldev = iwdev->ldev; ldev 1543 drivers/infiniband/hw/i40iw/i40iw_main.c i40iw_del_handler(i40iw_find_i40e_handler(ldev)); ldev 1558 drivers/infiniband/hw/i40iw/i40iw_main.c struct i40e_info *ldev, ldev 1565 drivers/infiniband/hw/i40iw/i40iw_main.c memcpy(&hdl->ldev, ldev, sizeof(*ldev)); ldev 1574 drivers/infiniband/hw/i40iw/i40iw_main.c iwdev->netdev = ldev->netdev; ldev 1576 drivers/infiniband/hw/i40iw/i40iw_main.c if (!ldev->ftype) ldev 1577 drivers/infiniband/hw/i40iw/i40iw_main.c iwdev->db_start = pci_resource_start(ldev->pcidev, 0) + I40IW_DB_ADDR_OFFSET; ldev 1579 drivers/infiniband/hw/i40iw/i40iw_main.c iwdev->db_start = pci_resource_start(ldev->pcidev, 0) + I40IW_VF_DB_ADDR_OFFSET; ldev 1581 drivers/infiniband/hw/i40iw/i40iw_main.c status = i40iw_save_msix_info(iwdev, ldev); ldev 1584 drivers/infiniband/hw/i40iw/i40iw_main.c iwdev->hw.dev_context = (void *)ldev->pcidev; ldev 1585 drivers/infiniband/hw/i40iw/i40iw_main.c iwdev->hw.hw_addr = ldev->hw_addr; ldev 1597 drivers/infiniband/hw/i40iw/i40iw_main.c status = i40iw_initialize_dev(iwdev, ldev); ldev 1631 drivers/infiniband/hw/i40iw/i40iw_main.c static int i40iw_open(struct i40e_info *ldev, struct i40e_client *client) ldev 1638 drivers/infiniband/hw/i40iw/i40iw_main.c hdl = i40iw_find_netdev(ldev->netdev); ldev 1654 drivers/infiniband/hw/i40iw/i40iw_main.c iwdev->ldev = &hdl->ldev; ldev 1660 drivers/infiniband/hw/i40iw/i40iw_main.c status = i40iw_setup_init_state(hdl, ldev, client); ldev 1689 drivers/infiniband/hw/i40iw/i40iw_main.c status = i40iw_setup_ceqs(iwdev, ldev); ldev 1748 drivers/infiniband/hw/i40iw/i40iw_main.c static void i40iw_l2param_change(struct i40e_info *ldev, struct i40e_client *client, ldev 1757 drivers/infiniband/hw/i40iw/i40iw_main.c hdl = i40iw_find_i40e_handler(ldev); ldev 1792 drivers/infiniband/hw/i40iw/i40iw_main.c static void i40iw_close(struct i40e_info *ldev, struct i40e_client *client, bool reset) ldev 1797 drivers/infiniband/hw/i40iw/i40iw_main.c hdl = i40iw_find_i40e_handler(ldev); ldev 1821 drivers/infiniband/hw/i40iw/i40iw_main.c static void i40iw_vf_reset(struct i40e_info *ldev, struct i40e_client *client, u32 vf_id) ldev 1832 drivers/infiniband/hw/i40iw/i40iw_main.c hdl = i40iw_find_i40e_handler(ldev); ldev 1871 drivers/infiniband/hw/i40iw/i40iw_main.c static void i40iw_vf_enable(struct i40e_info *ldev, ldev 1877 drivers/infiniband/hw/i40iw/i40iw_main.c hdl = i40iw_find_i40e_handler(ldev); ldev 1896 drivers/infiniband/hw/i40iw/i40iw_main.c static int i40iw_vf_capable(struct i40e_info *ldev, ldev 1904 drivers/infiniband/hw/i40iw/i40iw_main.c hdl = i40iw_find_i40e_handler(ldev); ldev 1929 drivers/infiniband/hw/i40iw/i40iw_main.c static int i40iw_virtchnl_receive(struct i40e_info *ldev, ldev 1943 drivers/infiniband/hw/i40iw/i40iw_main.c hdl = i40iw_find_i40e_handler(ldev); ldev 2009 drivers/infiniband/hw/i40iw/i40iw_main.c struct i40e_info *ldev; ldev 2015 drivers/infiniband/hw/i40iw/i40iw_main.c ldev = iwdev->ldev; ldev 2017 drivers/infiniband/hw/i40iw/i40iw_main.c if (ldev && ldev->ops && ldev->ops->virtchnl_send) ldev 2018 drivers/infiniband/hw/i40iw/i40iw_main.c return ldev->ops->virtchnl_send(ldev, &i40iw_client, vf_id, msg, len); ldev 166 drivers/infiniband/hw/i40iw/i40iw_utils.c netdev = iwdev->ldev->netdev; ldev 242 drivers/infiniband/hw/i40iw/i40iw_utils.c netdev = iwdev->ldev->netdev; ldev 338 drivers/infiniband/hw/i40iw/i40iw_utils.c netdev = iwdev->ldev->netdev; ldev 69 drivers/infiniband/hw/i40iw/i40iw_verbs.c props->vendor_id = iwdev->ldev->pcidev->vendor; ldev 70 drivers/infiniband/hw/i40iw/i40iw_verbs.c props->vendor_part_id = iwdev->ldev->pcidev->device; ldev 201 drivers/infiniband/hw/i40iw/i40iw_verbs.c vma->vm_pgoff + (pci_resource_start(ucontext->iwdev->ldev->pcidev, 0) >> PAGE_SHIFT), ldev 46 drivers/leds/leds-cr0014114.c struct led_classdev ldev; ldev 162 drivers/leds/leds-cr0014114.c static int cr0014114_set_sync(struct led_classdev *ldev, ldev 166 drivers/leds/leds-cr0014114.c struct cr0014114_led *led = container_of(ldev, ldev 168 drivers/leds/leds-cr0014114.c ldev); ldev 192 drivers/leds/leds-cr0014114.c &led->ldev.default_trigger); ldev 195 drivers/leds/leds-cr0014114.c led->ldev.max_brightness = CR_MAX_BRIGHTNESS; ldev 196 drivers/leds/leds-cr0014114.c led->ldev.brightness_set_blocking = cr0014114_set_sync; ldev 202 drivers/leds/leds-cr0014114.c ret = devm_led_classdev_register_ext(priv->dev, &led->ldev, ldev 16 drivers/leds/leds-dac124s085.c struct led_classdev ldev; ldev 33 drivers/leds/leds-dac124s085.c static int dac124s085_set_brightness(struct led_classdev *ldev, ldev 36 drivers/leds/leds-dac124s085.c struct dac124s085_led *led = container_of(ldev, struct dac124s085_led, ldev 37 drivers/leds/leds-dac124s085.c ldev); ldev 68 drivers/leds/leds-dac124s085.c led->ldev.name = led->name; ldev 69 drivers/leds/leds-dac124s085.c led->ldev.brightness = LED_OFF; ldev 70 drivers/leds/leds-dac124s085.c led->ldev.max_brightness = 0xfff; ldev 71 drivers/leds/leds-dac124s085.c led->ldev.brightness_set_blocking = dac124s085_set_brightness; ldev 72 drivers/leds/leds-dac124s085.c ret = led_classdev_register(&spi->dev, &led->ldev); ldev 83 drivers/leds/leds-dac124s085.c led_classdev_unregister(&dac->leds[i].ldev); ldev 94 drivers/leds/leds-dac124s085.c led_classdev_unregister(&dac->leds[i].ldev); ldev 55 drivers/leds/leds-locomo.c static int locomoled_probe(struct locomo_dev *ldev) ldev 59 drivers/leds/leds-locomo.c ret = devm_led_classdev_register(&ldev->dev, &locomo_led0); ldev 63 drivers/leds/leds-locomo.c return devm_led_classdev_register(&ldev->dev, &locomo_led1); ldev 60 drivers/leds/leds-lp3944.c #define ldev_to_led(c) container_of(c, struct lp3944_led_data, ldev) ldev 66 drivers/leds/leds-lp3944.c struct led_classdev ldev; ldev 175 drivers/leds/leds-lp3944.c __func__, led->ldev.name, status); ldev 214 drivers/leds/leds-lp3944.c __func__, led->ldev.name, reg, id, status, val); ldev 307 drivers/leds/leds-lp3944.c led->ldev.name = pled->name; ldev 308 drivers/leds/leds-lp3944.c led->ldev.max_brightness = 1; ldev 309 drivers/leds/leds-lp3944.c led->ldev.brightness_set_blocking = ldev 311 drivers/leds/leds-lp3944.c led->ldev.blink_set = lp3944_led_set_blink; ldev 312 drivers/leds/leds-lp3944.c led->ldev.flags = LED_CORE_SUSPENDRESUME; ldev 314 drivers/leds/leds-lp3944.c err = led_classdev_register(&client->dev, &led->ldev); ldev 318 drivers/leds/leds-lp3944.c led->ldev.name); ldev 323 drivers/leds/leds-lp3944.c led->ldev.brightness = ldev 331 drivers/leds/leds-lp3944.c led->ldev.name, pled->status); ldev 351 drivers/leds/leds-lp3944.c led_classdev_unregister(&data->leds[i].ldev); ldev 410 drivers/leds/leds-lp3944.c led_classdev_unregister(&data->leds[i].ldev); ldev 31 drivers/leds/leds-pca9532.c #define ldev_to_led(c) container_of(c, struct pca9532_led, ldev) ldev 123 drivers/leds/leds-pca9532.c b += data->leds[i].ldev.brightness; ldev 332 drivers/leds/leds-pca9532.c led_classdev_unregister(&data->leds[i].ldev); ldev 385 drivers/leds/leds-pca9532.c led->ldev.name = led->name; ldev 386 drivers/leds/leds-pca9532.c led->ldev.default_trigger = pled->default_trigger; ldev 387 drivers/leds/leds-pca9532.c led->ldev.brightness = LED_OFF; ldev 388 drivers/leds/leds-pca9532.c led->ldev.brightness_set_blocking = ldev 390 drivers/leds/leds-pca9532.c led->ldev.blink_set = pca9532_set_blink; ldev 391 drivers/leds/leds-pca9532.c err = led_classdev_register(&client->dev, &led->ldev); ldev 49 drivers/leds/leds-sc27xx-bltc.c struct led_classdev ldev; ldev 62 drivers/leds/leds-sc27xx-bltc.c #define to_sc27xx_led(ldev) \ ldev 63 drivers/leds/leds-sc27xx-bltc.c container_of(ldev, struct sc27xx_led, ldev) ldev 117 drivers/leds/leds-sc27xx-bltc.c static int sc27xx_led_set(struct led_classdev *ldev, enum led_brightness value) ldev 119 drivers/leds/leds-sc27xx-bltc.c struct sc27xx_led *leds = to_sc27xx_led(ldev); ldev 146 drivers/leds/leds-sc27xx-bltc.c static int sc27xx_led_pattern_clear(struct led_classdev *ldev) ldev 148 drivers/leds/leds-sc27xx-bltc.c struct sc27xx_led *leds = to_sc27xx_led(ldev); ldev 164 drivers/leds/leds-sc27xx-bltc.c ldev->brightness = LED_OFF; ldev 171 drivers/leds/leds-sc27xx-bltc.c static int sc27xx_led_pattern_set(struct led_classdev *ldev, ldev 175 drivers/leds/leds-sc27xx-bltc.c struct sc27xx_led *leds = to_sc27xx_led(ldev); ldev 233 drivers/leds/leds-sc27xx-bltc.c ldev->brightness = pattern[1].brightness; ldev 258 drivers/leds/leds-sc27xx-bltc.c led->ldev.brightness_set_blocking = sc27xx_led_set; ldev 259 drivers/leds/leds-sc27xx-bltc.c led->ldev.pattern_set = sc27xx_led_pattern_set; ldev 260 drivers/leds/leds-sc27xx-bltc.c led->ldev.pattern_clear = sc27xx_led_pattern_clear; ldev 261 drivers/leds/leds-sc27xx-bltc.c led->ldev.default_trigger = "pattern"; ldev 267 drivers/leds/leds-sc27xx-bltc.c err = devm_led_classdev_register_ext(dev, &led->ldev, ldev 46 drivers/leds/leds-spi-byte.c struct led_classdev ldev; ldev 68 drivers/leds/leds-spi-byte.c struct spi_byte_led *led = container_of(dev, struct spi_byte_led, ldev); ldev 110 drivers/leds/leds-spi-byte.c led->ldev.name = led->name; ldev 111 drivers/leds/leds-spi-byte.c led->ldev.brightness = LED_OFF; ldev 112 drivers/leds/leds-spi-byte.c led->ldev.max_brightness = led->cdef->max_value - led->cdef->off_value; ldev 113 drivers/leds/leds-spi-byte.c led->ldev.brightness_set_blocking = spi_byte_brightness_set_blocking; ldev 118 drivers/leds/leds-spi-byte.c led->ldev.brightness = led->ldev.max_brightness; ldev 125 drivers/leds/leds-spi-byte.c spi_byte_brightness_set_blocking(&led->ldev, ldev 126 drivers/leds/leds-spi-byte.c led->ldev.brightness); ldev 128 drivers/leds/leds-spi-byte.c ret = devm_led_classdev_register(&spi->dev, &led->ldev); ldev 41 drivers/leds/leds-tlc591xx.c #define ldev_to_led(c) container_of(c, struct tlc591xx_led, ldev) ldev 46 drivers/leds/leds-tlc591xx.c struct led_classdev ldev; ldev 139 drivers/leds/leds-tlc591xx.c led_classdev_unregister(&priv->leds[i].ldev); ldev 160 drivers/leds/leds-tlc591xx.c led->ldev.brightness_set_blocking = tlc591xx_brightness_set; ldev 161 drivers/leds/leds-tlc591xx.c led->ldev.max_brightness = TLC591XX_MAX_BRIGHTNESS; ldev 162 drivers/leds/leds-tlc591xx.c err = led_classdev_register(dev, &led->ldev); ldev 165 drivers/leds/leds-tlc591xx.c led->ldev.name); ldev 241 drivers/leds/leds-tlc591xx.c priv->leds[reg].ldev.name = ldev 243 drivers/leds/leds-tlc591xx.c priv->leds[reg].ldev.default_trigger = ldev 79 drivers/media/rc/fintek-cir.c static inline void fintek_select_logical_dev(struct fintek_dev *fintek, u8 ldev) ldev 81 drivers/media/rc/fintek-cir.c fintek_cr_write(fintek, ldev, GCR_LOGICAL_DEV_NO); ldev 110 drivers/media/rc/nuvoton-cir.c static inline void nvt_select_logical_dev(struct nvt_dev *nvt, u8 ldev) ldev 112 drivers/media/rc/nuvoton-cir.c nvt_cr_write(nvt, ldev, CR_LOGICAL_DEV_SEL); ldev 116 drivers/media/rc/nuvoton-cir.c static inline void nvt_enable_logical_dev(struct nvt_dev *nvt, u8 ldev) ldev 119 drivers/media/rc/nuvoton-cir.c nvt_select_logical_dev(nvt, ldev); ldev 125 drivers/media/rc/nuvoton-cir.c static inline void nvt_disable_logical_dev(struct nvt_dev *nvt, u8 ldev) ldev 128 drivers/media/rc/nuvoton-cir.c nvt_select_logical_dev(nvt, ldev); ldev 3442 drivers/net/bonding/bond_main.c struct net_device *ldev, *next, *now, *dev_stack[MAX_NEST_DEV + 1]; ldev 3452 drivers/net/bonding/bond_main.c ldev = netdev_next_lower_dev_rcu(now, &iter); ldev 3453 drivers/net/bonding/bond_main.c if (!ldev) ldev 3456 drivers/net/bonding/bond_main.c next = ldev; ldev 3457 drivers/net/bonding/bond_main.c niter = &ldev->adj_list.lower; ldev 16 drivers/net/ethernet/intel/i40e/i40e_client.c static int i40e_client_virtchnl_send(struct i40e_info *ldev, ldev 20 drivers/net/ethernet/intel/i40e/i40e_client.c static int i40e_client_setup_qvlist(struct i40e_info *ldev, ldev 24 drivers/net/ethernet/intel/i40e/i40e_client.c static void i40e_client_request_reset(struct i40e_info *ldev, ldev 28 drivers/net/ethernet/intel/i40e/i40e_client.c static int i40e_client_update_vsi_ctxt(struct i40e_info *ldev, ldev 139 drivers/net/ethernet/intel/i40e/i40e_client.c static void i40e_client_release_qvlist(struct i40e_info *ldev) ldev 141 drivers/net/ethernet/intel/i40e/i40e_client.c struct i40e_qvlist_info *qvlist_info = ldev->qvlist_info; ldev 144 drivers/net/ethernet/intel/i40e/i40e_client.c if (!ldev->qvlist_info) ldev 148 drivers/net/ethernet/intel/i40e/i40e_client.c struct i40e_pf *pf = ldev->pf; ldev 158 drivers/net/ethernet/intel/i40e/i40e_client.c kfree(ldev->qvlist_info); ldev 159 drivers/net/ethernet/intel/i40e/i40e_client.c ldev->qvlist_info = NULL; ldev 405 drivers/net/ethernet/intel/i40e/i40e_client.c struct i40e_device *ldev; ldev 409 drivers/net/ethernet/intel/i40e/i40e_client.c list_for_each_entry(ldev, &i40e_devices, list) { ldev 410 drivers/net/ethernet/intel/i40e/i40e_client.c if (ldev->pf == pf) { ldev 415 drivers/net/ethernet/intel/i40e/i40e_client.c ldev = kzalloc(sizeof(*ldev), GFP_KERNEL); ldev 416 drivers/net/ethernet/intel/i40e/i40e_client.c if (!ldev) { ldev 420 drivers/net/ethernet/intel/i40e/i40e_client.c ldev->pf = pf; ldev 421 drivers/net/ethernet/intel/i40e/i40e_client.c INIT_LIST_HEAD(&ldev->list); ldev 422 drivers/net/ethernet/intel/i40e/i40e_client.c list_add(&ldev->list, &i40e_devices); ldev 453 drivers/net/ethernet/intel/i40e/i40e_client.c struct i40e_device *ldev, *tmp; ldev 460 drivers/net/ethernet/intel/i40e/i40e_client.c list_for_each_entry_safe(ldev, tmp, &i40e_devices, list) { ldev 461 drivers/net/ethernet/intel/i40e/i40e_client.c if (ldev->pf == pf) { ldev 465 drivers/net/ethernet/intel/i40e/i40e_client.c list_del(&ldev->list); ldev 466 drivers/net/ethernet/intel/i40e/i40e_client.c kfree(ldev); ldev 483 drivers/net/ethernet/intel/i40e/i40e_client.c struct i40e_device *ldev; ldev 487 drivers/net/ethernet/intel/i40e/i40e_client.c list_for_each_entry(ldev, &i40e_devices, list) { ldev 488 drivers/net/ethernet/intel/i40e/i40e_client.c pf = ldev->pf; ldev 524 drivers/net/ethernet/intel/i40e/i40e_client.c struct i40e_device *ldev; ldev 528 drivers/net/ethernet/intel/i40e/i40e_client.c list_for_each_entry(ldev, &i40e_devices, list) { ldev 529 drivers/net/ethernet/intel/i40e/i40e_client.c pf = ldev->pf; ldev 548 drivers/net/ethernet/intel/i40e/i40e_client.c static int i40e_client_virtchnl_send(struct i40e_info *ldev, ldev 552 drivers/net/ethernet/intel/i40e/i40e_client.c struct i40e_pf *pf = ldev->pf; ldev 573 drivers/net/ethernet/intel/i40e/i40e_client.c static int i40e_client_setup_qvlist(struct i40e_info *ldev, ldev 577 drivers/net/ethernet/intel/i40e/i40e_client.c struct i40e_pf *pf = ldev->pf; ldev 582 drivers/net/ethernet/intel/i40e/i40e_client.c ldev->qvlist_info = kzalloc(struct_size(ldev->qvlist_info, qv_info, ldev 584 drivers/net/ethernet/intel/i40e/i40e_client.c if (!ldev->qvlist_info) ldev 586 drivers/net/ethernet/intel/i40e/i40e_client.c ldev->qvlist_info->num_vectors = qvlist_info->num_vectors; ldev 599 drivers/net/ethernet/intel/i40e/i40e_client.c ldev->qvlist_info->qv_info[i] = *qv_info; ldev 633 drivers/net/ethernet/intel/i40e/i40e_client.c kfree(ldev->qvlist_info); ldev 634 drivers/net/ethernet/intel/i40e/i40e_client.c ldev->qvlist_info = NULL; ldev 644 drivers/net/ethernet/intel/i40e/i40e_client.c static void i40e_client_request_reset(struct i40e_info *ldev, ldev 648 drivers/net/ethernet/intel/i40e/i40e_client.c struct i40e_pf *pf = ldev->pf; ldev 678 drivers/net/ethernet/intel/i40e/i40e_client.c static int i40e_client_update_vsi_ctxt(struct i40e_info *ldev, ldev 683 drivers/net/ethernet/intel/i40e/i40e_client.c struct i40e_pf *pf = ldev->pf; ldev 116 drivers/net/ethernet/intel/i40e/i40e_client.h int (*setup_qvlist)(struct i40e_info *ldev, struct i40e_client *client, ldev 119 drivers/net/ethernet/intel/i40e/i40e_client.h int (*virtchnl_send)(struct i40e_info *ldev, struct i40e_client *client, ldev 125 drivers/net/ethernet/intel/i40e/i40e_client.h void (*request_reset)(struct i40e_info *ldev, ldev 131 drivers/net/ethernet/intel/i40e/i40e_client.h int (*update_vsi_ctxt)(struct i40e_info *ldev, ldev 141 drivers/net/ethernet/intel/i40e/i40e_client.h int (*open)(struct i40e_info *ldev, struct i40e_client *client); ldev 147 drivers/net/ethernet/intel/i40e/i40e_client.h void (*close)(struct i40e_info *ldev, struct i40e_client *client, ldev 151 drivers/net/ethernet/intel/i40e/i40e_client.h void (*l2_param_change)(struct i40e_info *ldev, ldev 155 drivers/net/ethernet/intel/i40e/i40e_client.h int (*virtchnl_receive)(struct i40e_info *ldev, ldev 160 drivers/net/ethernet/intel/i40e/i40e_client.h void (*vf_reset)(struct i40e_info *ldev, ldev 164 drivers/net/ethernet/intel/i40e/i40e_client.h void (*vf_enable)(struct i40e_info *ldev, ldev 168 drivers/net/ethernet/intel/i40e/i40e_client.h int (*vf_capable)(struct i40e_info *ldev, ldev 17 drivers/net/ethernet/intel/iavf/iavf_client.c static u32 iavf_client_virtchnl_send(struct iavf_info *ldev, ldev 21 drivers/net/ethernet/intel/iavf/iavf_client.c static int iavf_client_setup_qvlist(struct iavf_info *ldev, ldev 135 drivers/net/ethernet/intel/iavf/iavf_client.c static int iavf_client_release_qvlist(struct iavf_info *ldev) ldev 137 drivers/net/ethernet/intel/iavf/iavf_client.c struct iavf_adapter *adapter = ldev->vf; ldev 294 drivers/net/ethernet/intel/iavf/iavf_client.c struct iavf_device *ldev; ldev 298 drivers/net/ethernet/intel/iavf/iavf_client.c list_for_each_entry(ldev, &iavf_devices, list) { ldev 299 drivers/net/ethernet/intel/iavf/iavf_client.c if (ldev->vf == adapter) { ldev 304 drivers/net/ethernet/intel/iavf/iavf_client.c ldev = kzalloc(sizeof(*ldev), GFP_KERNEL); ldev 305 drivers/net/ethernet/intel/iavf/iavf_client.c if (!ldev) { ldev 309 drivers/net/ethernet/intel/iavf/iavf_client.c ldev->vf = adapter; ldev 310 drivers/net/ethernet/intel/iavf/iavf_client.c INIT_LIST_HEAD(&ldev->list); ldev 311 drivers/net/ethernet/intel/iavf/iavf_client.c list_add(&ldev->list, &iavf_devices); ldev 334 drivers/net/ethernet/intel/iavf/iavf_client.c struct iavf_device *ldev, *tmp; ldev 338 drivers/net/ethernet/intel/iavf/iavf_client.c list_for_each_entry_safe(ldev, tmp, &iavf_devices, list) { ldev 339 drivers/net/ethernet/intel/iavf/iavf_client.c if (ldev->vf == adapter) { ldev 344 drivers/net/ethernet/intel/iavf/iavf_client.c list_del(&ldev->list); ldev 345 drivers/net/ethernet/intel/iavf/iavf_client.c kfree(ldev); ldev 363 drivers/net/ethernet/intel/iavf/iavf_client.c struct iavf_device *ldev; ldev 367 drivers/net/ethernet/intel/iavf/iavf_client.c list_for_each_entry(ldev, &iavf_devices, list) { ldev 368 drivers/net/ethernet/intel/iavf/iavf_client.c adapter = ldev->vf; ldev 397 drivers/net/ethernet/intel/iavf/iavf_client.c struct iavf_device *ldev; ldev 401 drivers/net/ethernet/intel/iavf/iavf_client.c list_for_each_entry(ldev, &iavf_devices, list) { ldev 402 drivers/net/ethernet/intel/iavf/iavf_client.c adapter = ldev->vf; ldev 418 drivers/net/ethernet/intel/iavf/iavf_client.c static u32 iavf_client_virtchnl_send(struct iavf_info *ldev, ldev 422 drivers/net/ethernet/intel/iavf/iavf_client.c struct iavf_adapter *adapter = ldev->vf; ldev 445 drivers/net/ethernet/intel/iavf/iavf_client.c static int iavf_client_setup_qvlist(struct iavf_info *ldev, ldev 450 drivers/net/ethernet/intel/iavf/iavf_client.c struct iavf_adapter *adapter = ldev->vf; ldev 109 drivers/net/ethernet/intel/iavf/iavf_client.h int (*setup_qvlist)(struct iavf_info *ldev, struct iavf_client *client, ldev 112 drivers/net/ethernet/intel/iavf/iavf_client.h u32 (*virtchnl_send)(struct iavf_info *ldev, struct iavf_client *client, ldev 116 drivers/net/ethernet/intel/iavf/iavf_client.h void (*request_reset)(struct iavf_info *ldev, ldev 124 drivers/net/ethernet/intel/iavf/iavf_client.h int (*open)(struct iavf_info *ldev, struct iavf_client *client); ldev 130 drivers/net/ethernet/intel/iavf/iavf_client.h void (*close)(struct iavf_info *ldev, struct iavf_client *client, ldev 134 drivers/net/ethernet/intel/iavf/iavf_client.h void (*l2_param_change)(struct iavf_info *ldev, ldev 139 drivers/net/ethernet/intel/iavf/iavf_client.h int (*virtchnl_receive)(struct iavf_info *ldev, ldev 121 drivers/net/ethernet/mellanox/mlx5/core/lag.c int mlx5_lag_dev_get_netdev_idx(struct mlx5_lag *ldev, ldev 127 drivers/net/ethernet/mellanox/mlx5/core/lag.c if (ldev->pf[i].netdev == ndev) ldev 133 drivers/net/ethernet/mellanox/mlx5/core/lag.c static bool __mlx5_lag_is_roce(struct mlx5_lag *ldev) ldev 135 drivers/net/ethernet/mellanox/mlx5/core/lag.c return !!(ldev->flags & MLX5_LAG_FLAG_ROCE); ldev 138 drivers/net/ethernet/mellanox/mlx5/core/lag.c static bool __mlx5_lag_is_sriov(struct mlx5_lag *ldev) ldev 140 drivers/net/ethernet/mellanox/mlx5/core/lag.c return !!(ldev->flags & MLX5_LAG_FLAG_SRIOV); ldev 159 drivers/net/ethernet/mellanox/mlx5/core/lag.c void mlx5_modify_lag(struct mlx5_lag *ldev, ldev 162 drivers/net/ethernet/mellanox/mlx5/core/lag.c struct mlx5_core_dev *dev0 = ldev->pf[0].dev; ldev 169 drivers/net/ethernet/mellanox/mlx5/core/lag.c if (v2p_port1 != ldev->v2p_map[0] || ldev 170 drivers/net/ethernet/mellanox/mlx5/core/lag.c v2p_port2 != ldev->v2p_map[1]) { ldev 171 drivers/net/ethernet/mellanox/mlx5/core/lag.c ldev->v2p_map[0] = v2p_port1; ldev 172 drivers/net/ethernet/mellanox/mlx5/core/lag.c ldev->v2p_map[1] = v2p_port2; ldev 175 drivers/net/ethernet/mellanox/mlx5/core/lag.c ldev->v2p_map[0], ldev->v2p_map[1]); ldev 185 drivers/net/ethernet/mellanox/mlx5/core/lag.c static int mlx5_create_lag(struct mlx5_lag *ldev, ldev 188 drivers/net/ethernet/mellanox/mlx5/core/lag.c struct mlx5_core_dev *dev0 = ldev->pf[0].dev; ldev 191 drivers/net/ethernet/mellanox/mlx5/core/lag.c mlx5_infer_tx_affinity_mapping(tracker, &ldev->v2p_map[0], ldev 192 drivers/net/ethernet/mellanox/mlx5/core/lag.c &ldev->v2p_map[1]); ldev 195 drivers/net/ethernet/mellanox/mlx5/core/lag.c ldev->v2p_map[0], ldev->v2p_map[1]); ldev 197 drivers/net/ethernet/mellanox/mlx5/core/lag.c err = mlx5_cmd_create_lag(dev0, ldev->v2p_map[0], ldev->v2p_map[1]); ldev 205 drivers/net/ethernet/mellanox/mlx5/core/lag.c int mlx5_activate_lag(struct mlx5_lag *ldev, ldev 210 drivers/net/ethernet/mellanox/mlx5/core/lag.c struct mlx5_core_dev *dev0 = ldev->pf[0].dev; ldev 213 drivers/net/ethernet/mellanox/mlx5/core/lag.c err = mlx5_create_lag(ldev, tracker); ldev 226 drivers/net/ethernet/mellanox/mlx5/core/lag.c ldev->flags |= flags; ldev 230 drivers/net/ethernet/mellanox/mlx5/core/lag.c static int mlx5_deactivate_lag(struct mlx5_lag *ldev) ldev 232 drivers/net/ethernet/mellanox/mlx5/core/lag.c struct mlx5_core_dev *dev0 = ldev->pf[0].dev; ldev 233 drivers/net/ethernet/mellanox/mlx5/core/lag.c bool roce_lag = __mlx5_lag_is_roce(ldev); ldev 236 drivers/net/ethernet/mellanox/mlx5/core/lag.c ldev->flags &= ~MLX5_LAG_MODE_FLAGS; ldev 253 drivers/net/ethernet/mellanox/mlx5/core/lag.c static bool mlx5_lag_check_prereq(struct mlx5_lag *ldev) ldev 255 drivers/net/ethernet/mellanox/mlx5/core/lag.c if (!ldev->pf[0].dev || !ldev->pf[1].dev) ldev 259 drivers/net/ethernet/mellanox/mlx5/core/lag.c return mlx5_esw_lag_prereq(ldev->pf[0].dev, ldev->pf[1].dev); ldev 261 drivers/net/ethernet/mellanox/mlx5/core/lag.c return (!mlx5_sriov_is_enabled(ldev->pf[0].dev) && ldev 262 drivers/net/ethernet/mellanox/mlx5/core/lag.c !mlx5_sriov_is_enabled(ldev->pf[1].dev)); ldev 266 drivers/net/ethernet/mellanox/mlx5/core/lag.c static void mlx5_lag_add_ib_devices(struct mlx5_lag *ldev) ldev 271 drivers/net/ethernet/mellanox/mlx5/core/lag.c if (ldev->pf[i].dev) ldev 272 drivers/net/ethernet/mellanox/mlx5/core/lag.c mlx5_add_dev_by_protocol(ldev->pf[i].dev, ldev 276 drivers/net/ethernet/mellanox/mlx5/core/lag.c static void mlx5_lag_remove_ib_devices(struct mlx5_lag *ldev) ldev 281 drivers/net/ethernet/mellanox/mlx5/core/lag.c if (ldev->pf[i].dev) ldev 282 drivers/net/ethernet/mellanox/mlx5/core/lag.c mlx5_remove_dev_by_protocol(ldev->pf[i].dev, ldev 286 drivers/net/ethernet/mellanox/mlx5/core/lag.c static void mlx5_do_bond(struct mlx5_lag *ldev) ldev 288 drivers/net/ethernet/mellanox/mlx5/core/lag.c struct mlx5_core_dev *dev0 = ldev->pf[0].dev; ldev 289 drivers/net/ethernet/mellanox/mlx5/core/lag.c struct mlx5_core_dev *dev1 = ldev->pf[1].dev; ldev 298 drivers/net/ethernet/mellanox/mlx5/core/lag.c tracker = ldev->tracker; ldev 301 drivers/net/ethernet/mellanox/mlx5/core/lag.c do_bond = tracker.is_bonded && mlx5_lag_check_prereq(ldev); ldev 303 drivers/net/ethernet/mellanox/mlx5/core/lag.c if (do_bond && !__mlx5_lag_is_active(ldev)) { ldev 313 drivers/net/ethernet/mellanox/mlx5/core/lag.c mlx5_lag_remove_ib_devices(ldev); ldev 315 drivers/net/ethernet/mellanox/mlx5/core/lag.c err = mlx5_activate_lag(ldev, &tracker, ldev 320 drivers/net/ethernet/mellanox/mlx5/core/lag.c mlx5_lag_add_ib_devices(ldev); ldev 329 drivers/net/ethernet/mellanox/mlx5/core/lag.c } else if (do_bond && __mlx5_lag_is_active(ldev)) { ldev 330 drivers/net/ethernet/mellanox/mlx5/core/lag.c mlx5_modify_lag(ldev, &tracker); ldev 331 drivers/net/ethernet/mellanox/mlx5/core/lag.c } else if (!do_bond && __mlx5_lag_is_active(ldev)) { ldev 332 drivers/net/ethernet/mellanox/mlx5/core/lag.c roce_lag = __mlx5_lag_is_roce(ldev); ldev 339 drivers/net/ethernet/mellanox/mlx5/core/lag.c err = mlx5_deactivate_lag(ldev); ldev 344 drivers/net/ethernet/mellanox/mlx5/core/lag.c mlx5_lag_add_ib_devices(ldev); ldev 348 drivers/net/ethernet/mellanox/mlx5/core/lag.c static void mlx5_queue_bond_work(struct mlx5_lag *ldev, unsigned long delay) ldev 350 drivers/net/ethernet/mellanox/mlx5/core/lag.c queue_delayed_work(ldev->wq, &ldev->bond_work, delay); ldev 356 drivers/net/ethernet/mellanox/mlx5/core/lag.c struct mlx5_lag *ldev = container_of(delayed_work, struct mlx5_lag, ldev 363 drivers/net/ethernet/mellanox/mlx5/core/lag.c mlx5_queue_bond_work(ldev, HZ); ldev 367 drivers/net/ethernet/mellanox/mlx5/core/lag.c mlx5_do_bond(ldev); ldev 371 drivers/net/ethernet/mellanox/mlx5/core/lag.c static int mlx5_handle_changeupper_event(struct mlx5_lag *ldev, ldev 396 drivers/net/ethernet/mellanox/mlx5/core/lag.c idx = mlx5_lag_dev_get_netdev_idx(ldev, ndev_tmp); ldev 429 drivers/net/ethernet/mellanox/mlx5/core/lag.c static int mlx5_handle_changelowerstate_event(struct mlx5_lag *ldev, ldev 440 drivers/net/ethernet/mellanox/mlx5/core/lag.c idx = mlx5_lag_dev_get_netdev_idx(ldev, ndev); ldev 461 drivers/net/ethernet/mellanox/mlx5/core/lag.c struct mlx5_lag *ldev; ldev 470 drivers/net/ethernet/mellanox/mlx5/core/lag.c ldev = container_of(this, struct mlx5_lag, nb); ldev 471 drivers/net/ethernet/mellanox/mlx5/core/lag.c tracker = ldev->tracker; ldev 475 drivers/net/ethernet/mellanox/mlx5/core/lag.c changed = mlx5_handle_changeupper_event(ldev, &tracker, ndev, ldev 479 drivers/net/ethernet/mellanox/mlx5/core/lag.c changed = mlx5_handle_changelowerstate_event(ldev, &tracker, ldev 485 drivers/net/ethernet/mellanox/mlx5/core/lag.c ldev->tracker = tracker; ldev 489 drivers/net/ethernet/mellanox/mlx5/core/lag.c mlx5_queue_bond_work(ldev, 0); ldev 496 drivers/net/ethernet/mellanox/mlx5/core/lag.c struct mlx5_lag *ldev; ldev 498 drivers/net/ethernet/mellanox/mlx5/core/lag.c ldev = kzalloc(sizeof(*ldev), GFP_KERNEL); ldev 499 drivers/net/ethernet/mellanox/mlx5/core/lag.c if (!ldev) ldev 502 drivers/net/ethernet/mellanox/mlx5/core/lag.c ldev->wq = create_singlethread_workqueue("mlx5_lag"); ldev 503 drivers/net/ethernet/mellanox/mlx5/core/lag.c if (!ldev->wq) { ldev 504 drivers/net/ethernet/mellanox/mlx5/core/lag.c kfree(ldev); ldev 508 drivers/net/ethernet/mellanox/mlx5/core/lag.c INIT_DELAYED_WORK(&ldev->bond_work, mlx5_do_bond_work); ldev 510 drivers/net/ethernet/mellanox/mlx5/core/lag.c return ldev; ldev 513 drivers/net/ethernet/mellanox/mlx5/core/lag.c static void mlx5_lag_dev_free(struct mlx5_lag *ldev) ldev 515 drivers/net/ethernet/mellanox/mlx5/core/lag.c destroy_workqueue(ldev->wq); ldev 516 drivers/net/ethernet/mellanox/mlx5/core/lag.c kfree(ldev); ldev 519 drivers/net/ethernet/mellanox/mlx5/core/lag.c static void mlx5_lag_dev_add_pf(struct mlx5_lag *ldev, ldev 529 drivers/net/ethernet/mellanox/mlx5/core/lag.c ldev->pf[fn].dev = dev; ldev 530 drivers/net/ethernet/mellanox/mlx5/core/lag.c ldev->pf[fn].netdev = netdev; ldev 531 drivers/net/ethernet/mellanox/mlx5/core/lag.c ldev->tracker.netdev_state[fn].link_up = 0; ldev 532 drivers/net/ethernet/mellanox/mlx5/core/lag.c ldev->tracker.netdev_state[fn].tx_enabled = 0; ldev 534 drivers/net/ethernet/mellanox/mlx5/core/lag.c dev->priv.lag = ldev; ldev 539 drivers/net/ethernet/mellanox/mlx5/core/lag.c static void mlx5_lag_dev_remove_pf(struct mlx5_lag *ldev, ldev 545 drivers/net/ethernet/mellanox/mlx5/core/lag.c if (ldev->pf[i].dev == dev) ldev 552 drivers/net/ethernet/mellanox/mlx5/core/lag.c memset(&ldev->pf[i], 0, sizeof(*ldev->pf)); ldev 561 drivers/net/ethernet/mellanox/mlx5/core/lag.c struct mlx5_lag *ldev = NULL; ldev 572 drivers/net/ethernet/mellanox/mlx5/core/lag.c ldev = tmp_dev->priv.lag; ldev 574 drivers/net/ethernet/mellanox/mlx5/core/lag.c if (!ldev) { ldev 575 drivers/net/ethernet/mellanox/mlx5/core/lag.c ldev = mlx5_lag_dev_alloc(); ldev 576 drivers/net/ethernet/mellanox/mlx5/core/lag.c if (!ldev) { ldev 582 drivers/net/ethernet/mellanox/mlx5/core/lag.c mlx5_lag_dev_add_pf(ldev, dev, netdev); ldev 584 drivers/net/ethernet/mellanox/mlx5/core/lag.c if (!ldev->nb.notifier_call) { ldev 585 drivers/net/ethernet/mellanox/mlx5/core/lag.c ldev->nb.notifier_call = mlx5_lag_netdev_event; ldev 586 drivers/net/ethernet/mellanox/mlx5/core/lag.c if (register_netdevice_notifier(&ldev->nb)) { ldev 587 drivers/net/ethernet/mellanox/mlx5/core/lag.c ldev->nb.notifier_call = NULL; ldev 592 drivers/net/ethernet/mellanox/mlx5/core/lag.c err = mlx5_lag_mp_init(ldev); ldev 601 drivers/net/ethernet/mellanox/mlx5/core/lag.c struct mlx5_lag *ldev; ldev 604 drivers/net/ethernet/mellanox/mlx5/core/lag.c ldev = mlx5_lag_dev_get(dev); ldev 605 drivers/net/ethernet/mellanox/mlx5/core/lag.c if (!ldev) ldev 608 drivers/net/ethernet/mellanox/mlx5/core/lag.c if (__mlx5_lag_is_active(ldev)) ldev 609 drivers/net/ethernet/mellanox/mlx5/core/lag.c mlx5_deactivate_lag(ldev); ldev 611 drivers/net/ethernet/mellanox/mlx5/core/lag.c mlx5_lag_dev_remove_pf(ldev, dev); ldev 614 drivers/net/ethernet/mellanox/mlx5/core/lag.c if (ldev->pf[i].dev) ldev 618 drivers/net/ethernet/mellanox/mlx5/core/lag.c if (ldev->nb.notifier_call) ldev 619 drivers/net/ethernet/mellanox/mlx5/core/lag.c unregister_netdevice_notifier(&ldev->nb); ldev 620 drivers/net/ethernet/mellanox/mlx5/core/lag.c mlx5_lag_mp_cleanup(ldev); ldev 621 drivers/net/ethernet/mellanox/mlx5/core/lag.c cancel_delayed_work_sync(&ldev->bond_work); ldev 622 drivers/net/ethernet/mellanox/mlx5/core/lag.c mlx5_lag_dev_free(ldev); ldev 628 drivers/net/ethernet/mellanox/mlx5/core/lag.c struct mlx5_lag *ldev; ldev 632 drivers/net/ethernet/mellanox/mlx5/core/lag.c ldev = mlx5_lag_dev_get(dev); ldev 633 drivers/net/ethernet/mellanox/mlx5/core/lag.c res = ldev && __mlx5_lag_is_roce(ldev); ldev 642 drivers/net/ethernet/mellanox/mlx5/core/lag.c struct mlx5_lag *ldev; ldev 646 drivers/net/ethernet/mellanox/mlx5/core/lag.c ldev = mlx5_lag_dev_get(dev); ldev 647 drivers/net/ethernet/mellanox/mlx5/core/lag.c res = ldev && __mlx5_lag_is_active(ldev); ldev 656 drivers/net/ethernet/mellanox/mlx5/core/lag.c struct mlx5_lag *ldev; ldev 660 drivers/net/ethernet/mellanox/mlx5/core/lag.c ldev = mlx5_lag_dev_get(dev); ldev 661 drivers/net/ethernet/mellanox/mlx5/core/lag.c res = ldev && __mlx5_lag_is_sriov(ldev); ldev 670 drivers/net/ethernet/mellanox/mlx5/core/lag.c struct mlx5_lag *ldev; ldev 673 drivers/net/ethernet/mellanox/mlx5/core/lag.c ldev = mlx5_lag_dev_get(dev); ldev 674 drivers/net/ethernet/mellanox/mlx5/core/lag.c if (!ldev) ldev 677 drivers/net/ethernet/mellanox/mlx5/core/lag.c mlx5_do_bond(ldev); ldev 686 drivers/net/ethernet/mellanox/mlx5/core/lag.c struct mlx5_lag *ldev; ldev 689 drivers/net/ethernet/mellanox/mlx5/core/lag.c ldev = mlx5_lag_dev_get(dev); ldev 691 drivers/net/ethernet/mellanox/mlx5/core/lag.c if (!(ldev && __mlx5_lag_is_roce(ldev))) ldev 694 drivers/net/ethernet/mellanox/mlx5/core/lag.c if (ldev->tracker.tx_type == NETDEV_LAG_TX_TYPE_ACTIVEBACKUP) { ldev 695 drivers/net/ethernet/mellanox/mlx5/core/lag.c ndev = ldev->tracker.netdev_state[0].tx_enabled ? ldev 696 drivers/net/ethernet/mellanox/mlx5/core/lag.c ldev->pf[0].netdev : ldev->pf[1].netdev; ldev 698 drivers/net/ethernet/mellanox/mlx5/core/lag.c ndev = ldev->pf[0].netdev; ldev 714 drivers/net/ethernet/mellanox/mlx5/core/lag.c struct mlx5_lag *ldev; ldev 719 drivers/net/ethernet/mellanox/mlx5/core/lag.c ldev = mlx5_lag_dev_get(dev); ldev 720 drivers/net/ethernet/mellanox/mlx5/core/lag.c if (!ldev || !__mlx5_lag_is_roce(ldev) || ldev->pf[0].dev == dev) ldev 734 drivers/net/ethernet/mellanox/mlx5/core/lag.c struct mlx5_lag *ldev; ldev 746 drivers/net/ethernet/mellanox/mlx5/core/lag.c ldev = mlx5_lag_dev_get(dev); ldev 747 drivers/net/ethernet/mellanox/mlx5/core/lag.c if (ldev && __mlx5_lag_is_roce(ldev)) { ldev 749 drivers/net/ethernet/mellanox/mlx5/core/lag.c mdev[0] = ldev->pf[0].dev; ldev 750 drivers/net/ethernet/mellanox/mlx5/core/lag.c mdev[1] = ldev->pf[1].dev; ldev 52 drivers/net/ethernet/mellanox/mlx5/core/lag.h __mlx5_lag_is_active(struct mlx5_lag *ldev) ldev 54 drivers/net/ethernet/mellanox/mlx5/core/lag.h return !!(ldev->flags & MLX5_LAG_MODE_FLAGS); ldev 57 drivers/net/ethernet/mellanox/mlx5/core/lag.h void mlx5_modify_lag(struct mlx5_lag *ldev, ldev 59 drivers/net/ethernet/mellanox/mlx5/core/lag.h int mlx5_activate_lag(struct mlx5_lag *ldev, ldev 62 drivers/net/ethernet/mellanox/mlx5/core/lag.h int mlx5_lag_dev_get_netdev_idx(struct mlx5_lag *ldev, ldev 12 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c static bool mlx5_lag_multipath_check_prereq(struct mlx5_lag *ldev) ldev 14 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c if (!ldev->pf[0].dev || !ldev->pf[1].dev) ldev 17 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c return mlx5_esw_multipath_prereq(ldev->pf[0].dev, ldev->pf[1].dev); ldev 20 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c static bool __mlx5_lag_is_multipath(struct mlx5_lag *ldev) ldev 22 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c return !!(ldev->flags & MLX5_LAG_FLAG_MULTIPATH); ldev 27 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c struct mlx5_lag *ldev; ldev 30 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c ldev = mlx5_lag_dev_get(dev); ldev 31 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c res = ldev && __mlx5_lag_is_multipath(ldev); ldev 46 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c static void mlx5_lag_set_port_affinity(struct mlx5_lag *ldev, int port) ldev 50 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c if (!__mlx5_lag_is_multipath(ldev)) ldev 73 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c mlx5_core_warn(ldev->pf[0].dev, "Invalid affinity port %d", ldev 79 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c mlx5_notifier_call_chain(ldev->pf[0].dev->priv.events, ldev 84 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c mlx5_notifier_call_chain(ldev->pf[1].dev->priv.events, ldev 88 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c mlx5_modify_lag(ldev, &tracker); ldev 94 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c struct mlx5_lag *ldev = container_of(mp, struct mlx5_lag, lag_mp); ldev 96 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c flush_workqueue(ldev->wq); ldev 101 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c struct mlx5_lag *ldev; ldev 109 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c static void mlx5_lag_fib_route_event(struct mlx5_lag *ldev, ldev 113 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c struct lag_mp *mp = &ldev->lag_mp; ldev 128 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c if (__mlx5_lag_is_active(ldev)) { ldev 131 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c int i = mlx5_lag_dev_get_netdev_idx(ldev, nh_dev); ldev 133 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c mlx5_lag_set_port_affinity(ldev, ++i); ldev 144 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c if (!(fib_nh0->fib_nh_dev == ldev->pf[0].netdev && ldev 145 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c fib_nh1->fib_nh_dev == ldev->pf[1].netdev) && ldev 146 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c !(fib_nh0->fib_nh_dev == ldev->pf[1].netdev && ldev 147 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c fib_nh1->fib_nh_dev == ldev->pf[0].netdev)) { ldev 148 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c mlx5_core_warn(ldev->pf[0].dev, "Multipath offload require two ports of the same HCA\n"); ldev 153 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c if (!mp->mfi && !__mlx5_lag_is_active(ldev)) { ldev 156 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c tracker = ldev->tracker; ldev 157 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c mlx5_activate_lag(ldev, &tracker, MLX5_LAG_FLAG_MULTIPATH); ldev 160 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c mlx5_lag_set_port_affinity(ldev, 0); ldev 164 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c static void mlx5_lag_fib_nexthop_event(struct mlx5_lag *ldev, ldev 169 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c struct lag_mp *mp = &ldev->lag_mp; ldev 177 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c int i = mlx5_lag_dev_get_netdev_idx(ldev, fib_nh->fib_nh_dev); ldev 181 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c mlx5_lag_set_port_affinity(ldev, i); ldev 185 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c mlx5_lag_set_port_affinity(ldev, 0); ldev 193 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c struct mlx5_lag *ldev = fib_work->ldev; ldev 203 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c mlx5_lag_fib_route_event(ldev, fib_work->event, ldev 210 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c mlx5_lag_fib_nexthop_event(ldev, ldev 223 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c mlx5_lag_init_fib_work(struct mlx5_lag *ldev, unsigned long event) ldev 232 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c fib_work->ldev = ldev; ldev 243 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c struct mlx5_lag *ldev = container_of(mp, struct mlx5_lag, lag_mp); ldev 257 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c if (!mlx5_lag_multipath_check_prereq(ldev)) ldev 273 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c if (fib_dev != ldev->pf[0].netdev && ldev 274 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c fib_dev != ldev->pf[1].netdev) { ldev 277 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c fib_work = mlx5_lag_init_fib_work(ldev, event); ldev 290 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c fib_work = mlx5_lag_init_fib_work(ldev, event); ldev 300 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c queue_work(ldev->wq, &fib_work->work); ldev 305 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c int mlx5_lag_mp_init(struct mlx5_lag *ldev) ldev 307 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c struct lag_mp *mp = &ldev->lag_mp; ldev 322 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c void mlx5_lag_mp_cleanup(struct mlx5_lag *ldev) ldev 324 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.c struct lag_mp *mp = &ldev->lag_mp; ldev 17 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.h int mlx5_lag_mp_init(struct mlx5_lag *ldev); ldev 18 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.h void mlx5_lag_mp_cleanup(struct mlx5_lag *ldev); ldev 22 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.h static inline int mlx5_lag_mp_init(struct mlx5_lag *ldev) { return 0; } ldev 23 drivers/net/ethernet/mellanox/mlx5/core/lag_mp.h static inline void mlx5_lag_mp_cleanup(struct mlx5_lag *ldev) {} ldev 179 drivers/net/usb/qmi_wwan.c struct net_device *ldev; ldev 182 drivers/net/usb/qmi_wwan.c netdev_for_each_upper_dev_rcu(dev->net, ldev, iter) { ldev 183 drivers/net/usb/qmi_wwan.c priv = netdev_priv(ldev); ldev 186 drivers/net/usb/qmi_wwan.c return ldev; ldev 408 drivers/net/usb/qmi_wwan.c struct net_device *ldev; ldev 412 drivers/net/usb/qmi_wwan.c netdev_for_each_upper_dev_rcu(dev, ldev, iter) { ldev 413 drivers/net/usb/qmi_wwan.c priv = netdev_priv(ldev); ldev 1503 drivers/net/usb/qmi_wwan.c struct net_device *ldev; ldev 1516 drivers/net/usb/qmi_wwan.c netdev_for_each_upper_dev_rcu(dev->net, ldev, iter) ldev 1517 drivers/net/usb/qmi_wwan.c qmimux_unregister_device(ldev, &list); ldev 719 drivers/parisc/lba_pci.c struct lba_device *ldev = LBA_DEV(parisc_walk_tree(bus->bridge)); ldev 739 drivers/parisc/lba_pci.c ldev->hba.io_space.name, ldev 740 drivers/parisc/lba_pci.c ldev->hba.io_space.start, ldev->hba.io_space.end, ldev 741 drivers/parisc/lba_pci.c ldev->hba.io_space.flags); ldev 743 drivers/parisc/lba_pci.c ldev->hba.lmmio_space.name, ldev 744 drivers/parisc/lba_pci.c ldev->hba.lmmio_space.start, ldev->hba.lmmio_space.end, ldev 745 drivers/parisc/lba_pci.c ldev->hba.lmmio_space.flags); ldev 747 drivers/parisc/lba_pci.c err = request_resource(&ioport_resource, &(ldev->hba.io_space)); ldev 753 drivers/parisc/lba_pci.c if (ldev->hba.elmmio_space.flags) { ldev 755 drivers/parisc/lba_pci.c &(ldev->hba.elmmio_space)); ldev 760 drivers/parisc/lba_pci.c (long)ldev->hba.elmmio_space.start, ldev 761 drivers/parisc/lba_pci.c (long)ldev->hba.elmmio_space.end); ldev 768 drivers/parisc/lba_pci.c if (ldev->hba.lmmio_space.flags) { ldev 769 drivers/parisc/lba_pci.c err = request_resource(&iomem_resource, &(ldev->hba.lmmio_space)); ldev 773 drivers/parisc/lba_pci.c (long)ldev->hba.lmmio_space.start, ldev 774 drivers/parisc/lba_pci.c (long)ldev->hba.lmmio_space.end); ldev 780 drivers/parisc/lba_pci.c if (ldev->hba.gmmio_space.flags) { ldev 781 drivers/parisc/lba_pci.c err = request_resource(&iomem_resource, &(ldev->hba.gmmio_space)); ldev 785 drivers/parisc/lba_pci.c (long)ldev->hba.gmmio_space.start, ldev 786 drivers/parisc/lba_pci.c (long)ldev->hba.gmmio_space.end); ldev 834 drivers/parisc/lba_pci.c iosapic_fixup_irq(ldev->iosapic_obj, dev); ldev 244 drivers/scsi/myrs.c mbox->ldev_info.ldev.ldev_num = ldev_num; ldev 1030 drivers/scsi/myrs.c mbox->set_devstate.ldev.ldev_num = ldev_num; ldev 1163 drivers/scsi/myrs.c mbox->ldev_info.ldev.ldev_num = ldev_num; ldev 1166 drivers/scsi/myrs.c mbox->ldev_info.ldev.ldev_num = ldev_num; ldev 1265 drivers/scsi/myrs.c mbox->cc.ldev.ldev_num = ldev_num; ldev 1270 drivers/scsi/myrs.c mbox->cc.ldev.ldev_num = ldev_num; ldev 750 drivers/scsi/myrs.h struct myrs_ldev ldev; /* Bytes 16-18 */ ldev 795 drivers/scsi/myrs.h struct myrs_ldev ldev; /* Bytes 16-18 */ ldev 812 drivers/scsi/myrs.h struct myrs_ldev ldev; /* Bytes 16-18 */ ldev 17 drivers/staging/kpc2000/kpc_dma/dma.c struct kpc_dma_device *ldev = (struct kpc_dma_device *)dev_id; ldev 19 drivers/staging/kpc2000/kpc_dma/dma.c if ((GetEngineControl(ldev) & ENG_CTL_IRQ_ACTIVE) || (ldev->desc_completed->MyDMAAddr != GetEngineCompletePtr(ldev))) ldev 20 drivers/staging/kpc2000/kpc_dma/dma.c schedule_work(&ldev->irq_work); ldev 39 drivers/staging/kpc2000/kpc_dma/fileops.c struct kpc_dma_device *ldev; ldev 53 drivers/staging/kpc2000/kpc_dma/fileops.c ldev = priv->ldev; ldev 54 drivers/staging/kpc2000/kpc_dma/fileops.c BUG_ON(ldev == NULL); ldev 58 drivers/staging/kpc2000/kpc_dma/fileops.c dev_err(&priv->ldev->pldev->dev, "Couldn't kmalloc space for for the aio data\n"); ldev 64 drivers/staging/kpc2000/kpc_dma/fileops.c acd->ldev = priv->ldev; ldev 73 drivers/staging/kpc2000/kpc_dma/fileops.c dev_err(&priv->ldev->pldev->dev, "Couldn't kmalloc space for for the page pointers\n"); ldev 83 drivers/staging/kpc2000/kpc_dma/fileops.c dev_err(&priv->ldev->pldev->dev, "Couldn't get_user_pages (%ld)\n", rv); ldev 90 drivers/staging/kpc2000/kpc_dma/fileops.c dev_err(&priv->ldev->pldev->dev, "Couldn't alloc sg_table (%ld)\n", rv); ldev 95 drivers/staging/kpc2000/kpc_dma/fileops.c acd->mapped_entry_count = dma_map_sg(&ldev->pldev->dev, acd->sgt.sgl, acd->sgt.nents, ldev->dir); ldev 97 drivers/staging/kpc2000/kpc_dma/fileops.c dev_err(&priv->ldev->pldev->dev, "Couldn't dma_map_sg (%d)\n", acd->mapped_entry_count); ldev 106 drivers/staging/kpc2000/kpc_dma/fileops.c lock_engine(ldev); ldev 109 drivers/staging/kpc2000/kpc_dma/fileops.c num_descrs_avail = count_descriptors_available(ldev); ldev 110 drivers/staging/kpc2000/kpc_dma/fileops.c dev_dbg(&priv->ldev->pldev->dev, " mapped_entry_count = %d num_descrs_needed = %d num_descrs_avail = %d\n", acd->mapped_entry_count, desc_needed, num_descrs_avail); ldev 111 drivers/staging/kpc2000/kpc_dma/fileops.c if (desc_needed >= ldev->desc_pool_cnt) { ldev 112 drivers/staging/kpc2000/kpc_dma/fileops.c dev_warn(&priv->ldev->pldev->dev, " mapped_entry_count = %d num_descrs_needed = %d num_descrs_avail = %d TOO MANY to ever complete!\n", acd->mapped_entry_count, desc_needed, num_descrs_avail); ldev 117 drivers/staging/kpc2000/kpc_dma/fileops.c dev_warn(&priv->ldev->pldev->dev, " mapped_entry_count = %d num_descrs_needed = %d num_descrs_avail = %d Too many to complete right now.\n", acd->mapped_entry_count, desc_needed, num_descrs_avail); ldev 123 drivers/staging/kpc2000/kpc_dma/fileops.c desc = ldev->desc_next; ldev 162 drivers/staging/kpc2000/kpc_dma/fileops.c dev_dbg(&priv->ldev->pldev->dev, " Filled descriptor %p (acd = %p)\n", desc, desc->acd); ldev 164 drivers/staging/kpc2000/kpc_dma/fileops.c ldev->desc_next = desc->Next; ldev 170 drivers/staging/kpc2000/kpc_dma/fileops.c SetEngineSWPtr(ldev, ldev->desc_next); ldev 172 drivers/staging/kpc2000/kpc_dma/fileops.c unlock_engine(ldev); ldev 188 drivers/staging/kpc2000/kpc_dma/fileops.c unlock_engine(ldev); ldev 189 drivers/staging/kpc2000/kpc_dma/fileops.c dma_unmap_sg(&ldev->pldev->dev, acd->sgt.sgl, acd->sgt.nents, ldev->dir); ldev 200 drivers/staging/kpc2000/kpc_dma/fileops.c dev_dbg(&priv->ldev->pldev->dev, "%s returning with error %ld\n", __func__, rv); ldev 211 drivers/staging/kpc2000/kpc_dma/fileops.c BUG_ON(acd->ldev == NULL); ldev 212 drivers/staging/kpc2000/kpc_dma/fileops.c BUG_ON(acd->ldev->pldev == NULL); ldev 220 drivers/staging/kpc2000/kpc_dma/fileops.c dma_unmap_sg(&acd->ldev->pldev->dev, acd->sgt.sgl, acd->sgt.nents, acd->ldev->dir); ldev 248 drivers/staging/kpc2000/kpc_dma/fileops.c struct kpc_dma_device *ldev = kpc_dma_lookup_device(iminor(inode)); ldev 250 drivers/staging/kpc2000/kpc_dma/fileops.c if (!ldev) ldev 253 drivers/staging/kpc2000/kpc_dma/fileops.c if (!atomic_dec_and_test(&ldev->open_count)) { ldev 254 drivers/staging/kpc2000/kpc_dma/fileops.c atomic_inc(&ldev->open_count); ldev 262 drivers/staging/kpc2000/kpc_dma/fileops.c priv->ldev = ldev; ldev 273 drivers/staging/kpc2000/kpc_dma/fileops.c struct kpc_dma_device *eng = priv->ldev; ldev 297 drivers/staging/kpc2000/kpc_dma/fileops.c atomic_inc(&priv->ldev->open_count); /* release the device */ ldev 307 drivers/staging/kpc2000/kpc_dma/fileops.c if (priv->ldev->dir != DMA_FROM_DEVICE) ldev 318 drivers/staging/kpc2000/kpc_dma/fileops.c if (priv->ldev->dir != DMA_TO_DEVICE) ldev 39 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c static void kpc_dma_add_device(struct kpc_dma_device *ldev) ldev 42 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c list_add(&ldev->list, &kpc_dma_list); ldev 46 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c static void kpc_dma_del_device(struct kpc_dma_device *ldev) ldev 49 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c list_del(&ldev->list); ldev 56 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c struct kpc_dma_device *ldev; ldev 61 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c ldev = platform_get_drvdata(pldev); ldev 62 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c if (!ldev) ldev 74 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c readl(ldev->eng_regs + 1), ldev 75 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c readl(ldev->eng_regs + 2), ldev 76 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c readl(ldev->eng_regs + 3), ldev 77 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c readl(ldev->eng_regs + 4), ldev 78 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c ldev->desc_pool_first, ldev 79 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c ldev->desc_pool_last, ldev 80 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c ldev->desc_next, ldev 81 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c ldev->desc_completed ldev 101 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c struct kpc_dma_device *ldev = kzalloc(sizeof(struct kpc_dma_device), GFP_KERNEL); ldev 103 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c if (!ldev) { ldev 109 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c INIT_LIST_HEAD(&ldev->list); ldev 111 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c ldev->pldev = pldev; ldev 112 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c platform_set_drvdata(pldev, ldev); ldev 113 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c atomic_set(&ldev->open_count, 1); ldev 115 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c mutex_init(&ldev->sem); ldev 116 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c lock_engine(ldev); ldev 121 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c dev_err(&ldev->pldev->dev, "%s: didn't get the engine regs resource!\n", __func__); ldev 125 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c ldev->eng_regs = ioremap_nocache(r->start, resource_size(r)); ldev 126 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c if (!ldev->eng_regs) { ldev 127 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c dev_err(&ldev->pldev->dev, "%s: failed to ioremap engine regs!\n", __func__); ldev 134 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c dev_err(&ldev->pldev->dev, "%s: didn't get the IRQ resource!\n", __func__); ldev 138 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c ldev->irq = r->start; ldev 142 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c ldev->kpc_dma_dev = device_create(kpc_dma_class, &pldev->dev, dev, ldev, "kpc_dma%d", pldev->id); ldev 143 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c if (IS_ERR(ldev->kpc_dma_dev)) { ldev 144 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c dev_err(&ldev->pldev->dev, "%s: device_create failed: %d\n", __func__, rv); ldev 149 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c rv = setup_dma_engine(ldev, 30); ldev 151 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c dev_err(&ldev->pldev->dev, "%s: failed to setup_dma_engine: %d\n", __func__, rv); ldev 156 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c rv = sysfs_create_files(&(ldev->pldev->dev.kobj), ndd_attr_list); ldev 158 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c dev_err(&ldev->pldev->dev, "%s: Failed to add sysfs files: %d\n", __func__, rv); ldev 162 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c kpc_dma_add_device(ldev); ldev 167 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c destroy_dma_engine(ldev); ldev 171 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c kfree(ldev); ldev 179 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c struct kpc_dma_device *ldev = platform_get_drvdata(pldev); ldev 181 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c if (!ldev) ldev 184 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c lock_engine(ldev); ldev 185 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c sysfs_remove_files(&(ldev->pldev->dev.kobj), ndd_attr_list); ldev 186 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c destroy_dma_engine(ldev); ldev 187 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c kpc_dma_del_device(ldev); ldev 188 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c device_destroy(kpc_dma_class, MKDEV(assigned_major_num, ldev->pldev->id)); ldev 189 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.c kfree(ldev); ldev 50 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.h struct kpc_dma_device *ldev; ldev 86 drivers/staging/kpc2000/kpc_dma/kpc_dma_driver.h struct kpc_dma_device *ldev; ldev 178 drivers/video/backlight/locomolcd.c static int locomolcd_probe(struct locomo_dev *ldev) ldev 184 drivers/video/backlight/locomolcd.c locomolcd_dev = ldev; ldev 186 drivers/video/backlight/locomolcd.c locomo_gpio_set_dir(ldev->dev.parent, LOCOMO_GPIO_FL_VR, 0); ldev 202 drivers/video/backlight/locomolcd.c &ldev->dev, NULL, ldev 331 fs/squashfs/inode.c struct squashfs_ldev_inode *sqsh_ino = &squashfs_ino.ldev; ldev 398 fs/squashfs/squashfs_fs.h struct squashfs_ldev_inode ldev; ldev 30 include/linux/leds-pca9532.h struct led_classdev ldev; ldev 4293 include/linux/netdevice.h #define netdev_for_each_lower_dev(dev, ldev, iter) \ ldev 4295 include/linux/netdevice.h ldev = netdev_lower_get_next(dev, &(iter)); \ ldev 4296 include/linux/netdevice.h ldev; \ ldev 4297 include/linux/netdevice.h ldev = netdev_lower_get_next(dev, &(iter))) ldev 169 net/6lowpan/debugfs.c struct lowpan_dev *ldev = lowpan_dev(dev); ldev 179 net/6lowpan/debugfs.c debugfs_create_file("active", 0644, root, &ldev->ctx.table[id], ldev 182 net/6lowpan/debugfs.c debugfs_create_file("compression", 0644, root, &ldev->ctx.table[id], ldev 185 net/6lowpan/debugfs.c debugfs_create_file("prefix", 0644, root, &ldev->ctx.table[id], ldev 188 net/6lowpan/debugfs.c debugfs_create_file("prefix_len", 0644, root, &ldev->ctx.table[id], ldev 230 net/6lowpan/debugfs.c struct lowpan_dev *ldev) ldev 237 net/6lowpan/debugfs.c root = debugfs_create_dir("ieee802154", ldev->iface_debugfs); ldev 246 net/6lowpan/debugfs.c struct lowpan_dev *ldev = lowpan_dev(dev); ldev 251 net/6lowpan/debugfs.c ldev->iface_debugfs = debugfs_create_dir(dev->name, lowpan_debugfs); ldev 253 net/6lowpan/debugfs.c contexts = debugfs_create_dir("contexts", ldev->iface_debugfs); ldev 261 net/6lowpan/debugfs.c lowpan_dev_debugfs_802154_init(dev, ldev); ldev 6838 net/core/dev.c struct net_device *ldev, *next, *now, *dev_stack[MAX_NEST_DEV + 1]; ldev 6854 net/core/dev.c ldev = netdev_next_lower_dev(now, &iter); ldev 6855 net/core/dev.c if (!ldev) ldev 6858 net/core/dev.c next = ldev; ldev 6859 net/core/dev.c niter = &ldev->adj_list.lower; ldev 6885 net/core/dev.c struct net_device *ldev, *next, *now, *dev_stack[MAX_NEST_DEV + 1]; ldev 6902 net/core/dev.c ldev = __netdev_next_lower_dev(now, &iter, &ignore); ldev 6903 net/core/dev.c if (!ldev) ldev 6908 net/core/dev.c next = ldev; ldev 6909 net/core/dev.c niter = &ldev->adj_list.lower; ldev 6966 net/core/dev.c struct net_device *ldev; ldev 6972 net/core/dev.c ldev = __netdev_next_lower_dev(dev, &iter, &ignore); ldev 6973 net/core/dev.c ldev; ldev 6974 net/core/dev.c ldev = __netdev_next_lower_dev(dev, &iter, &ignore)) { ldev 6977 net/core/dev.c if (max_depth < ldev->lower_level) ldev 6978 net/core/dev.c max_depth = ldev->lower_level; ldev 7001 net/core/dev.c struct net_device *ldev, *next, *now, *dev_stack[MAX_NEST_DEV + 1]; ldev 7017 net/core/dev.c ldev = netdev_next_lower_dev_rcu(now, &iter); ldev 7018 net/core/dev.c if (!ldev) ldev 7021 net/core/dev.c next = ldev; ldev 7022 net/core/dev.c niter = &ldev->adj_list.lower; ldev 722 net/decnet/af_decnet.c struct net_device *dev, *ldev; ldev 747 net/decnet/af_decnet.c ldev = NULL; ldev 752 net/decnet/af_decnet.c ldev = dev; ldev 757 net/decnet/af_decnet.c if (ldev == NULL) ldev 99 net/ieee802154/6lowpan/core.c static void lowpan_setup(struct net_device *ldev) ldev 101 net/ieee802154/6lowpan/core.c memset(ldev->broadcast, 0xff, IEEE802154_ADDR_LEN); ldev 103 net/ieee802154/6lowpan/core.c ldev->hard_header_len = sizeof(struct ipv6hdr); ldev 104 net/ieee802154/6lowpan/core.c ldev->flags = IFF_BROADCAST | IFF_MULTICAST; ldev 105 net/ieee802154/6lowpan/core.c ldev->priv_flags |= IFF_NO_QUEUE; ldev 107 net/ieee802154/6lowpan/core.c ldev->netdev_ops = &lowpan_netdev_ops; ldev 108 net/ieee802154/6lowpan/core.c ldev->header_ops = &lowpan_header_ops; ldev 109 net/ieee802154/6lowpan/core.c ldev->needs_free_netdev = true; ldev 110 net/ieee802154/6lowpan/core.c ldev->features |= NETIF_F_NETNS_LOCAL; ldev 123 net/ieee802154/6lowpan/core.c static int lowpan_newlink(struct net *src_net, struct net_device *ldev, ldev 137 net/ieee802154/6lowpan/core.c wdev = dev_get_by_index(dev_net(ldev), nla_get_u32(tb[IFLA_LINK])); ldev 150 net/ieee802154/6lowpan/core.c lowpan_802154_dev(ldev)->wdev = wdev; ldev 152 net/ieee802154/6lowpan/core.c memcpy(ldev->dev_addr, wdev->dev_addr, IEEE802154_ADDR_LEN); ldev 159 net/ieee802154/6lowpan/core.c ldev->needed_headroom = LOWPAN_IPHC_MAX_HEADER_LEN + ldev 161 net/ieee802154/6lowpan/core.c ldev->needed_tailroom = wdev->needed_tailroom; ldev 163 net/ieee802154/6lowpan/core.c ldev->neigh_priv_len = sizeof(struct lowpan_802154_neigh); ldev 165 net/ieee802154/6lowpan/core.c ret = lowpan_register_netdevice(ldev, LOWPAN_LLTYPE_IEEE802154); ldev 171 net/ieee802154/6lowpan/core.c wdev->ieee802154_ptr->lowpan_dev = ldev; ldev 175 net/ieee802154/6lowpan/core.c static void lowpan_dellink(struct net_device *ldev, struct list_head *head) ldev 177 net/ieee802154/6lowpan/core.c struct net_device *wdev = lowpan_802154_dev(ldev)->wdev; ldev 182 net/ieee802154/6lowpan/core.c lowpan_unregister_netdevice(ldev); ldev 34 net/ieee802154/6lowpan/reassembly.c struct sk_buff *prev, struct net_device *ldev); ldev 88 net/ieee802154/6lowpan/reassembly.c struct net_device *ldev; ldev 122 net/ieee802154/6lowpan/reassembly.c ldev = skb->dev; ldev 123 net/ieee802154/6lowpan/reassembly.c if (ldev) ldev 145 net/ieee802154/6lowpan/reassembly.c res = lowpan_frag_reasm(fq, skb, prev_tail, ldev); ldev 164 net/ieee802154/6lowpan/reassembly.c struct sk_buff *prev_tail, struct net_device *ldev) ldev 175 net/ieee802154/6lowpan/reassembly.c skb->dev = ldev; ldev 274 net/ieee802154/6lowpan/rx.c struct net_device *ldev; ldev 281 net/ieee802154/6lowpan/rx.c ldev = wdev->ieee802154_ptr->lowpan_dev; ldev 282 net/ieee802154/6lowpan/rx.c if (!ldev || !netif_running(ldev)) ldev 289 net/ieee802154/6lowpan/rx.c skb->dev = ldev; ldev 33 net/ieee802154/6lowpan/tx.c int lowpan_header_create(struct sk_buff *skb, struct net_device *ldev, ldev 37 net/ieee802154/6lowpan/tx.c struct wpan_dev *wpan_dev = lowpan_802154_dev(ldev)->wdev->ieee802154_ptr; ldev 56 net/ieee802154/6lowpan/tx.c if (!memcmp(daddr, ldev->broadcast, EUI64_ADDR_LEN)) { ldev 62 net/ieee802154/6lowpan/tx.c n = neigh_lookup(&nd_tbl, &hdr->daddr, ldev); ldev 158 net/ieee802154/6lowpan/tx.c lowpan_xmit_fragmented(struct sk_buff *skb, struct net_device *ldev, ldev 167 net/ieee802154/6lowpan/tx.c frag_tag = htons(lowpan_802154_dev(ldev)->fragment_tag); ldev 168 net/ieee802154/6lowpan/tx.c lowpan_802154_dev(ldev)->fragment_tag++; ldev 214 net/ieee802154/6lowpan/tx.c ldev->stats.tx_packets++; ldev 215 net/ieee802154/6lowpan/tx.c ldev->stats.tx_bytes += dgram_size; ldev 224 net/ieee802154/6lowpan/tx.c static int lowpan_header(struct sk_buff *skb, struct net_device *ldev, ldev 227 net/ieee802154/6lowpan/tx.c struct wpan_dev *wpan_dev = lowpan_802154_dev(ldev)->wdev->ieee802154_ptr; ldev 234 net/ieee802154/6lowpan/tx.c lowpan_header_compress(skb, ldev, &info.daddr, &info.saddr); ldev 246 net/ieee802154/6lowpan/tx.c return wpan_dev_hard_header(skb, lowpan_802154_dev(ldev)->wdev, ldev 250 net/ieee802154/6lowpan/tx.c netdev_tx_t lowpan_xmit(struct sk_buff *skb, struct net_device *ldev) ldev 263 net/ieee802154/6lowpan/tx.c if (unlikely(skb_headroom(skb) < ldev->needed_headroom || ldev 264 net/ieee802154/6lowpan/tx.c skb_tailroom(skb) < ldev->needed_tailroom)) { ldev 267 net/ieee802154/6lowpan/tx.c nskb = skb_copy_expand(skb, ldev->needed_headroom, ldev 268 net/ieee802154/6lowpan/tx.c ldev->needed_tailroom, GFP_ATOMIC); ldev 282 net/ieee802154/6lowpan/tx.c ret = lowpan_header(skb, ldev, &dgram_size, &dgram_offset); ldev 296 net/ieee802154/6lowpan/tx.c skb->dev = lowpan_802154_dev(ldev)->wdev; ldev 297 net/ieee802154/6lowpan/tx.c ldev->stats.tx_packets++; ldev 298 net/ieee802154/6lowpan/tx.c ldev->stats.tx_bytes += dgram_size; ldev 304 net/ieee802154/6lowpan/tx.c rc = lowpan_xmit_fragmented(skb, ldev, &wpan_hdr, dgram_size, ldev 753 net/ipv6/ip6_tunnel.c struct net_device *ldev = NULL; ldev 756 net/ipv6/ip6_tunnel.c ldev = dev_get_by_index_rcu(net, p->link); ldev 759 net/ipv6/ip6_tunnel.c likely(ipv6_chk_addr_and_flags(net, laddr, ldev, false, ldev 762 net/ipv6/ip6_tunnel.c likely(!ipv6_chk_addr_and_flags(net, raddr, ldev, true, ldev 988 net/ipv6/ip6_tunnel.c struct net_device *ldev = NULL; ldev 992 net/ipv6/ip6_tunnel.c ldev = dev_get_by_index_rcu(net, p->link); ldev 994 net/ipv6/ip6_tunnel.c if (unlikely(!ipv6_chk_addr_and_flags(net, laddr, ldev, false, ldev 1000 net/ipv6/ip6_tunnel.c unlikely(ipv6_chk_addr_and_flags(net, raddr, ldev, ldev 675 sound/aoa/fabrics/layout.c struct layout_dev *ldev = snd_kcontrol_chip(kcontrol); ldev 679 sound/aoa/fabrics/layout.c ucontrol->value.integer.value[0] = ldev->switch_on_headphone; ldev 682 sound/aoa/fabrics/layout.c ucontrol->value.integer.value[0] = ldev->switch_on_lineout; ldev 693 sound/aoa/fabrics/layout.c struct layout_dev *ldev = snd_kcontrol_chip(kcontrol); ldev 697 sound/aoa/fabrics/layout.c ldev->switch_on_headphone = !!ucontrol->value.integer.value[0]; ldev 700 sound/aoa/fabrics/layout.c ldev->switch_on_lineout = !!ucontrol->value.integer.value[0]; ldev 731 sound/aoa/fabrics/layout.c struct layout_dev *ldev = snd_kcontrol_chip(kcontrol); ldev 736 sound/aoa/fabrics/layout.c v = ldev->gpio.methods->get_detect(&ldev->gpio, ldev 740 sound/aoa/fabrics/layout.c v = ldev->gpio.methods->get_detect(&ldev->gpio, ldev 769 sound/aoa/fabrics/layout.c struct layout_dev *ldev, ldev 780 sound/aoa/fabrics/layout.c ref = of_get_property(ldev->sound, propname, NULL); ldev 798 sound/aoa/fabrics/layout.c codec->soundbus_dev = ldev->sdev; ldev 799 sound/aoa/fabrics/layout.c codec->gpio = &ldev->gpio; ldev 820 sound/aoa/fabrics/layout.c struct layout_dev *ldev; ldev 823 sound/aoa/fabrics/layout.c list_for_each_entry(ldev, &layouts_list, list) { ldev 825 sound/aoa/fabrics/layout.c if (!ldev->layout->codecs[i].name) ldev 827 sound/aoa/fabrics/layout.c if (strcmp(ldev->layout->codecs[i].name, codec->name) == 0) { ldev 829 sound/aoa/fabrics/layout.c ldev, ldev 830 sound/aoa/fabrics/layout.c &ldev->layout->codecs[i]) == 0) ldev 853 sound/aoa/fabrics/layout.c struct layout_dev *ldev; ldev 858 sound/aoa/fabrics/layout.c ldev = dptr->ptr; ldev 859 sound/aoa/fabrics/layout.c if (data == &ldev->selfptr_headphone) { ldev 860 sound/aoa/fabrics/layout.c v = ldev->gpio.methods->get_detect(&ldev->gpio, AOA_NOTIFY_HEADPHONE); ldev 861 sound/aoa/fabrics/layout.c detected = ldev->headphone_detected_ctrl; ldev 862 sound/aoa/fabrics/layout.c update = ldev->switch_on_headphone; ldev 864 sound/aoa/fabrics/layout.c ldev->gpio.methods->set_speakers(&ldev->gpio, !v); ldev 865 sound/aoa/fabrics/layout.c ldev->gpio.methods->set_headphone(&ldev->gpio, v); ldev 866 sound/aoa/fabrics/layout.c ldev->gpio.methods->set_lineout(&ldev->gpio, 0); ldev 868 sound/aoa/fabrics/layout.c } else if (data == &ldev->selfptr_lineout) { ldev 869 sound/aoa/fabrics/layout.c v = ldev->gpio.methods->get_detect(&ldev->gpio, AOA_NOTIFY_LINE_OUT); ldev 870 sound/aoa/fabrics/layout.c detected = ldev->lineout_detected_ctrl; ldev 871 sound/aoa/fabrics/layout.c update = ldev->switch_on_lineout; ldev 873 sound/aoa/fabrics/layout.c ldev->gpio.methods->set_speakers(&ldev->gpio, !v); ldev 874 sound/aoa/fabrics/layout.c ldev->gpio.methods->set_headphone(&ldev->gpio, 0); ldev 875 sound/aoa/fabrics/layout.c ldev->gpio.methods->set_lineout(&ldev->gpio, v); ldev 883 sound/aoa/fabrics/layout.c c = ldev->headphone_ctrl; ldev 886 sound/aoa/fabrics/layout.c c = ldev->speaker_ctrl; ldev 889 sound/aoa/fabrics/layout.c c = ldev->lineout_ctrl; ldev 900 sound/aoa/fabrics/layout.c struct layout_dev *ldev = layout_device; ldev 913 sound/aoa/fabrics/layout.c ldev->master_ctrl = ctl; ldev 919 sound/aoa/fabrics/layout.c ldev->gpio.methods->set_speakers(codec->gpio, 1); ldev 921 sound/aoa/fabrics/layout.c ldev->speaker_ctrl = ctl; ldev 926 sound/aoa/fabrics/layout.c ldev->gpio.methods->set_headphone(codec->gpio, 1); ldev 928 sound/aoa/fabrics/layout.c ldev->headphone_ctrl = ctl; ldev 930 sound/aoa/fabrics/layout.c ldev->have_headphone_detect = ldev 931 sound/aoa/fabrics/layout.c !ldev->gpio.methods ldev 932 sound/aoa/fabrics/layout.c ->set_notify(&ldev->gpio, ldev 935 sound/aoa/fabrics/layout.c &ldev->selfptr_headphone); ldev 936 sound/aoa/fabrics/layout.c if (ldev->have_headphone_detect) { ldev 938 sound/aoa/fabrics/layout.c ldev); ldev 941 sound/aoa/fabrics/layout.c ldev); ldev 942 sound/aoa/fabrics/layout.c ldev->headphone_detected_ctrl = ctl; ldev 948 sound/aoa/fabrics/layout.c ldev->gpio.methods->set_lineout(codec->gpio, 1); ldev 953 sound/aoa/fabrics/layout.c ldev->lineout_ctrl = ctl; ldev 955 sound/aoa/fabrics/layout.c ldev->have_lineout_detect = ldev 956 sound/aoa/fabrics/layout.c !ldev->gpio.methods ldev 957 sound/aoa/fabrics/layout.c ->set_notify(&ldev->gpio, ldev 960 sound/aoa/fabrics/layout.c &ldev->selfptr_lineout); ldev 961 sound/aoa/fabrics/layout.c if (ldev->have_lineout_detect) { ldev 963 sound/aoa/fabrics/layout.c ldev); ldev 970 sound/aoa/fabrics/layout.c ldev); ldev 975 sound/aoa/fabrics/layout.c ldev->lineout_detected_ctrl = ctl; ldev 982 sound/aoa/fabrics/layout.c if (ldev->have_headphone_detect) ldev 983 sound/aoa/fabrics/layout.c layout_notify(&ldev->selfptr_headphone); ldev 984 sound/aoa/fabrics/layout.c if (ldev->have_lineout_detect) ldev 985 sound/aoa/fabrics/layout.c layout_notify(&ldev->selfptr_lineout); ldev 1001 sound/aoa/fabrics/layout.c struct layout_dev *ldev = NULL; ldev 1030 sound/aoa/fabrics/layout.c ldev = kzalloc(sizeof(struct layout_dev), GFP_KERNEL); ldev 1031 sound/aoa/fabrics/layout.c if (!ldev) ldev 1034 sound/aoa/fabrics/layout.c layout_device = ldev; ldev 1035 sound/aoa/fabrics/layout.c ldev->sdev = sdev; ldev 1036 sound/aoa/fabrics/layout.c ldev->sound = sound; ldev 1037 sound/aoa/fabrics/layout.c ldev->layout = layout; ldev 1038 sound/aoa/fabrics/layout.c ldev->gpio.node = sound->parent; ldev 1044 sound/aoa/fabrics/layout.c ldev->gpio.methods = ftr_gpio_methods; ldev 1049 sound/aoa/fabrics/layout.c ldev->gpio.methods = pmf_gpio_methods; ldev 1053 sound/aoa/fabrics/layout.c ldev->selfptr_headphone.ptr = ldev; ldev 1054 sound/aoa/fabrics/layout.c ldev->selfptr_lineout.ptr = ldev; ldev 1055 sound/aoa/fabrics/layout.c dev_set_drvdata(&sdev->ofdev.dev, ldev); ldev 1056 sound/aoa/fabrics/layout.c list_add(&ldev->list, &layouts_list); ldev 1062 sound/aoa/fabrics/layout.c sdev->pcmid = ldev->layout->pcmid; ldev 1063 sound/aoa/fabrics/layout.c if (ldev->layout->busname) { ldev 1064 sound/aoa/fabrics/layout.c sdev->pcmname = ldev->layout->busname; ldev 1069 sound/aoa/fabrics/layout.c ldev->gpio.methods->init(&ldev->gpio); ldev 1079 sound/aoa/fabrics/layout.c ldev->switch_on_headphone = 1; ldev 1080 sound/aoa/fabrics/layout.c ldev->switch_on_lineout = 1; ldev 1084 sound/aoa/fabrics/layout.c ldev->gpio.methods->exit(&ldev->gpio); ldev 1088 sound/aoa/fabrics/layout.c list_del(&ldev->list); ldev 1090 sound/aoa/fabrics/layout.c kfree(ldev); ldev 1099 sound/aoa/fabrics/layout.c struct layout_dev *ldev = dev_get_drvdata(&sdev->ofdev.dev); ldev 1103 sound/aoa/fabrics/layout.c if (ldev->codecs[i]) { ldev 1104 sound/aoa/fabrics/layout.c aoa_fabric_unlink_codec(ldev->codecs[i]); ldev 1106 sound/aoa/fabrics/layout.c ldev->codecs[i] = NULL; ldev 1108 sound/aoa/fabrics/layout.c list_del(&ldev->list); ldev 1110 sound/aoa/fabrics/layout.c of_node_put(ldev->sound); ldev 1112 sound/aoa/fabrics/layout.c ldev->gpio.methods->set_notify(&ldev->gpio, ldev 1116 sound/aoa/fabrics/layout.c ldev->gpio.methods->set_notify(&ldev->gpio, ldev 1121 sound/aoa/fabrics/layout.c ldev->gpio.methods->exit(&ldev->gpio); ldev 1123 sound/aoa/fabrics/layout.c kfree(ldev); ldev 1132 sound/aoa/fabrics/layout.c struct layout_dev *ldev = dev_get_drvdata(dev); ldev 1134 sound/aoa/fabrics/layout.c if (ldev->gpio.methods && ldev->gpio.methods->all_amps_off) ldev 1135 sound/aoa/fabrics/layout.c ldev->gpio.methods->all_amps_off(&ldev->gpio); ldev 1142 sound/aoa/fabrics/layout.c struct layout_dev *ldev = dev_get_drvdata(dev); ldev 1144 sound/aoa/fabrics/layout.c if (ldev->gpio.methods && ldev->gpio.methods->all_amps_restore) ldev 1145 sound/aoa/fabrics/layout.c ldev->gpio.methods->all_amps_restore(&ldev->gpio);