re_work          1543 drivers/infiniband/hw/bnxt_re/main.c 	struct bnxt_re_work *re_work;
re_work          1547 drivers/infiniband/hw/bnxt_re/main.c 	re_work = container_of(work, struct bnxt_re_work, work);
re_work          1548 drivers/infiniband/hw/bnxt_re/main.c 	rdev = re_work->rdev;
re_work          1550 drivers/infiniband/hw/bnxt_re/main.c 	if (re_work->event != NETDEV_REGISTER &&
re_work          1554 drivers/infiniband/hw/bnxt_re/main.c 	switch (re_work->event) {
re_work          1587 drivers/infiniband/hw/bnxt_re/main.c 	kfree(re_work);
re_work          1613 drivers/infiniband/hw/bnxt_re/main.c 	struct bnxt_re_work *re_work;
re_work          1661 drivers/infiniband/hw/bnxt_re/main.c 		re_work = kzalloc(sizeof(*re_work), GFP_ATOMIC);
re_work          1662 drivers/infiniband/hw/bnxt_re/main.c 		if (re_work) {
re_work          1663 drivers/infiniband/hw/bnxt_re/main.c 			re_work->rdev = rdev;
re_work          1664 drivers/infiniband/hw/bnxt_re/main.c 			re_work->event = event;
re_work          1665 drivers/infiniband/hw/bnxt_re/main.c 			re_work->vlan_dev = (real_dev == netdev ?
re_work          1667 drivers/infiniband/hw/bnxt_re/main.c 			INIT_WORK(&re_work->work, bnxt_re_task);
re_work          1669 drivers/infiniband/hw/bnxt_re/main.c 			queue_work(bnxt_re_wq, &re_work->work);