workq 39 drivers/crypto/ccree/cc_request_mgr.c struct workqueue_struct *workq; workq 104 drivers/crypto/ccree/cc_request_mgr.c flush_workqueue(req_mgr_h->workq); workq 105 drivers/crypto/ccree/cc_request_mgr.c destroy_workqueue(req_mgr_h->workq); workq 134 drivers/crypto/ccree/cc_request_mgr.c req_mgr_h->workq = create_singlethread_workqueue("ccree"); workq 135 drivers/crypto/ccree/cc_request_mgr.c if (!req_mgr_h->workq) { workq 555 drivers/crypto/ccree/cc_request_mgr.c queue_delayed_work(request_mgr_handle->workq, workq 64 drivers/gpu/drm/msm/hdmi/hdmi.c if (hdmi->workq) { workq 65 drivers/gpu/drm/msm/hdmi/hdmi.c flush_workqueue(hdmi->workq); workq 66 drivers/gpu/drm/msm/hdmi/hdmi.c destroy_workqueue(hdmi->workq); workq 241 drivers/gpu/drm/msm/hdmi/hdmi.c hdmi->workq = alloc_ordered_workqueue("msm_hdmi", 0); workq 75 drivers/gpu/drm/msm/hdmi/hdmi.h struct workqueue_struct *workq; workq 271 drivers/gpu/drm/msm/hdmi/hdmi_connector.c queue_work(hdmi->workq, &hdmi_connector->hpd_work); workq 232 drivers/gpu/drm/msm/hdmi/hdmi_hdcp.c queue_work(hdmi->workq, &hdcp_ctrl->hdcp_reauth_work); workq 452 drivers/gpu/drm/msm/hdmi/hdmi_hdcp.c queue_work(hdmi->workq, &hdcp_ctrl->hdcp_auth_work); workq 552 drivers/gpu/drm/msm/hdmi/hdmi_hdcp.c queue_work(hdmi->workq, &hdcp_ctrl->hdcp_reauth_work); workq 1326 drivers/gpu/drm/msm/hdmi/hdmi_hdcp.c queue_work(hdmi->workq, &hdcp_ctrl->hdcp_auth_work); workq 94 drivers/gpu/drm/tiny/gm12u320.c struct workqueue_struct *workq; workq 186 drivers/gpu/drm/tiny/gm12u320.c gm12u320->fb_update.workq = create_singlethread_workqueue(DRIVER_NAME); workq 187 drivers/gpu/drm/tiny/gm12u320.c if (!gm12u320->fb_update.workq) workq 197 drivers/gpu/drm/tiny/gm12u320.c if (gm12u320->fb_update.workq) workq 198 drivers/gpu/drm/tiny/gm12u320.c destroy_workqueue(gm12u320->fb_update.workq); workq 468 drivers/gpu/drm/tiny/gm12u320.c queue_work(gm12u320->fb_update.workq, &gm12u320->fb_update.work); workq 105 drivers/hwmon/xgene-hwmon.c struct work_struct workq; workq 442 drivers/hwmon/xgene-hwmon.c ctx = container_of(work, struct xgene_hwmon_dev, workq); workq 522 drivers/hwmon/xgene-hwmon.c schedule_work(&ctx->workq); workq 591 drivers/hwmon/xgene-hwmon.c schedule_work(&ctx->workq); workq 637 drivers/hwmon/xgene-hwmon.c INIT_WORK(&ctx->workq, xgene_hwmon_evt_work); workq 750 drivers/hwmon/xgene-hwmon.c schedule_work(&ctx->workq); workq 381 drivers/iio/adc/at91-sama5d2_adc.c struct work_struct workq; workq 1213 drivers/iio/adc/at91-sama5d2_adc.c schedule_work(&st->touch_st.workq); workq 1244 drivers/iio/adc/at91-sama5d2_adc.c static void at91_adc_workq_handler(struct work_struct *workq) workq 1246 drivers/iio/adc/at91-sama5d2_adc.c struct at91_adc_touch *touch_st = container_of(workq, workq 1247 drivers/iio/adc/at91-sama5d2_adc.c struct at91_adc_touch, workq); workq 1729 drivers/iio/adc/at91-sama5d2_adc.c INIT_WORK(&st->touch_st.workq, at91_adc_workq_handler); workq 510 drivers/infiniband/hw/cxgb3/cxio_hal.c rdev_p->ctrl_qp.workq = dma_alloc_coherent( workq 516 drivers/infiniband/hw/cxgb3/cxio_hal.c if (!rdev_p->ctrl_qp.workq) { workq 550 drivers/infiniband/hw/cxgb3/cxio_hal.c &rdev_p->ctrl_qp.dma_addr, rdev_p->ctrl_qp.workq, workq 563 drivers/infiniband/hw/cxgb3/cxio_hal.c * sizeof(union t3_wr), rdev_p->ctrl_qp.workq, workq 604 drivers/infiniband/hw/cxgb3/cxio_hal.c wqe = (__be64 *)(rdev_p->ctrl_qp.workq + (rdev_p->ctrl_qp.wptr % workq 646 drivers/infiniband/hw/cxgb3/cxio_hal.c wqe = (__be64 *)(rdev_p->ctrl_qp.workq + (rdev_p->ctrl_qp.wptr % workq 72 drivers/infiniband/hw/cxgb3/cxio_hal.h union t3_wr *workq; /* the work request queue */ workq 105 drivers/infiniband/hw/cxgb3/iwch_cm.c static struct workqueue_struct *workq; workq 2205 drivers/infiniband/hw/cxgb3/iwch_cm.c queue_work(workq, &skb_work); workq 2247 drivers/infiniband/hw/cxgb3/iwch_cm.c workq = alloc_ordered_workqueue("iw_cxgb3", WQ_MEM_RECLAIM); workq 2248 drivers/infiniband/hw/cxgb3/iwch_cm.c if (!workq) workq 2256 drivers/infiniband/hw/cxgb3/iwch_cm.c flush_workqueue(workq); workq 2257 drivers/infiniband/hw/cxgb3/iwch_cm.c destroy_workqueue(workq); workq 138 drivers/infiniband/hw/cxgb4/cm.c static struct workqueue_struct *workq; workq 4336 drivers/infiniband/hw/cxgb4/cm.c queue_work(workq, &skb_work); workq 4354 drivers/infiniband/hw/cxgb4/cm.c queue_work(workq, &skb_work); workq 4457 drivers/infiniband/hw/cxgb4/cm.c workq = alloc_ordered_workqueue("iw_cxgb4", WQ_MEM_RECLAIM); workq 4458 drivers/infiniband/hw/cxgb4/cm.c if (!workq) workq 4467 drivers/infiniband/hw/cxgb4/cm.c flush_workqueue(workq); workq 4468 drivers/infiniband/hw/cxgb4/cm.c destroy_workqueue(workq); workq 847 drivers/isdn/hardware/mISDN/avmfritz.c cancel_work_sync(&bch->workq); workq 1397 drivers/isdn/hardware/mISDN/mISDNipac.c cancel_work_sync(&bch->workq); workq 1581 drivers/isdn/hardware/mISDN/mISDNisar.c cancel_work_sync(&bch->workq); workq 801 drivers/isdn/hardware/mISDN/netjet.c cancel_work_sync(&bch->workq); workq 1036 drivers/isdn/hardware/mISDN/w6692.c cancel_work_sync(&bch->workq); workq 185 drivers/isdn/mISDN/dsp.h struct work_struct workq; workq 1585 drivers/isdn/mISDN/dsp_cmx.c schedule_work(&dsp->workq); workq 1618 drivers/isdn/mISDN/dsp_cmx.c schedule_work(&dsp->workq); workq 1940 drivers/isdn/mISDN/dsp_cmx.c schedule_work(&dsp->workq); workq 1956 drivers/isdn/mISDN/dsp_cmx.c schedule_work(&member->dsp->workq); workq 681 drivers/isdn/mISDN/dsp_core.c schedule_work(&dsp->workq); workq 888 drivers/isdn/mISDN/dsp_core.c schedule_work(&dsp->workq); workq 975 drivers/isdn/mISDN/dsp_core.c cancel_work_sync(&dsp->workq); workq 1007 drivers/isdn/mISDN/dsp_core.c struct dsp *dsp = container_of(work, struct dsp, workq); workq 1065 drivers/isdn/mISDN/dsp_core.c INIT_WORK(&ndsp->workq, (void *)dsp_send_bh); workq 16 drivers/isdn/mISDN/hwchannel.c struct dchannel *dch = container_of(ws, struct dchannel, workq); workq 39 drivers/isdn/mISDN/hwchannel.c struct bchannel *bch = container_of(ws, struct bchannel, workq); workq 69 drivers/isdn/mISDN/hwchannel.c INIT_WORK(&ch->workq, dchannel_bh); workq 92 drivers/isdn/mISDN/hwchannel.c INIT_WORK(&ch->workq, bchannel_bh); workq 110 drivers/isdn/mISDN/hwchannel.c flush_work(&ch->workq); workq 150 drivers/isdn/mISDN/hwchannel.c cancel_work_sync(&ch->workq); workq 67 drivers/isdn/mISDN/l1oip.h struct work_struct workq; workq 805 drivers/isdn/mISDN/l1oip_core.c struct l1oip *hc = container_of(work, struct l1oip, workq); workq 824 drivers/isdn/mISDN/l1oip_core.c schedule_work(&hc->workq); workq 1241 drivers/isdn/mISDN/l1oip_core.c cancel_work_sync(&hc->workq); workq 1487 drivers/isdn/mISDN/l1oip_core.c INIT_WORK(&hc->workq, (void *)l1oip_send_bh); workq 31 drivers/isdn/mISDN/stack.c wake_up_interruptible(&st->workq); workq 280 drivers/isdn/mISDN/stack.c wait_event_interruptible(st->workq, (st->status & workq 378 drivers/isdn/mISDN/stack.c init_waitqueue_head(&newst->workq); workq 638 drivers/isdn/mISDN/stack.c wake_up_interruptible(&st->workq); workq 992 drivers/net/ethernet/chelsio/cxgb4/cxgb4.h struct workqueue_struct *workq; workq 1377 drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c queue_work(adap->workq, &adap->tid_release_task); workq 2223 drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c queue_work(adap->workq, &adap->db_full_task); workq 2233 drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c queue_work(adap->workq, &adap->db_drop_task); workq 3474 drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c queue_work(adap->workq, &adap->fatal_err_notify_task); workq 5759 drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c adapter->workq = create_singlethread_workqueue("cxgb4"); workq 5760 drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c if (!adapter->workq) { workq 6115 drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c if (adapter->workq) workq 6116 drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c destroy_workqueue(adapter->workq); workq 6153 drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c destroy_workqueue(adapter->workq); workq 66 drivers/net/ethernet/huawei/hinic/hinic_dev.h struct workqueue_struct *workq; workq 397 drivers/net/ethernet/huawei/hinic/hinic_hw_eqs.c queue_work(aeqs->workq, &aeq_work->work); workq 800 drivers/net/ethernet/huawei/hinic/hinic_hw_eqs.c aeqs->workq = create_singlethread_workqueue(HINIC_EQS_WQ_NAME); workq 801 drivers/net/ethernet/huawei/hinic/hinic_hw_eqs.c if (!aeqs->workq) workq 822 drivers/net/ethernet/huawei/hinic/hinic_hw_eqs.c destroy_workqueue(aeqs->workq); workq 837 drivers/net/ethernet/huawei/hinic/hinic_hw_eqs.c destroy_workqueue(aeqs->workq); workq 211 drivers/net/ethernet/huawei/hinic/hinic_hw_eqs.h struct workqueue_struct *workq; workq 755 drivers/net/ethernet/huawei/hinic/hinic_main.c queue_work(nic_dev->workq, &rx_mode_work->work); workq 989 drivers/net/ethernet/huawei/hinic/hinic_main.c nic_dev->workq = create_singlethread_workqueue(HINIC_WQ_NAME); workq 990 drivers/net/ethernet/huawei/hinic/hinic_main.c if (!nic_dev->workq) { workq 1046 drivers/net/ethernet/huawei/hinic/hinic_main.c destroy_workqueue(nic_dev->workq); workq 1133 drivers/net/ethernet/huawei/hinic/hinic_main.c destroy_workqueue(nic_dev->workq); workq 140 drivers/net/ethernet/netronome/nfp/ccm_mbox.c queue_work(nn->mbox_cmsg.workq, &nn->mbox_cmsg.runq_work); workq 671 drivers/net/ethernet/netronome/nfp/ccm_mbox.c queue_work(nn->mbox_cmsg.workq, workq 723 drivers/net/ethernet/netronome/nfp/ccm_mbox.c drain_workqueue(nn->mbox_cmsg.workq); workq 733 drivers/net/ethernet/netronome/nfp/ccm_mbox.c nn->mbox_cmsg.workq = alloc_workqueue("nfp-ccm-mbox", WQ_UNBOUND, 0); workq 734 drivers/net/ethernet/netronome/nfp/ccm_mbox.c if (!nn->mbox_cmsg.workq) workq 741 drivers/net/ethernet/netronome/nfp/ccm_mbox.c destroy_workqueue(nn->mbox_cmsg.workq); workq 681 drivers/net/ethernet/netronome/nfp/nfp_net.h struct workqueue_struct *workq; workq 91 drivers/scsi/libiscsi.c if (ihost->workq) workq 92 drivers/scsi/libiscsi.c queue_work(ihost->workq, &conn->xmitwork); workq 741 drivers/scsi/libiscsi.c if (!ihost->workq) { workq 1725 drivers/scsi/libiscsi.c if (!ihost->workq) { workq 1907 drivers/scsi/libiscsi.c if (ihost->workq) workq 1908 drivers/scsi/libiscsi.c flush_workqueue(ihost->workq); workq 2630 drivers/scsi/libiscsi.c ihost->workq = create_singlethread_workqueue(ihost->workq_name); workq 2631 drivers/scsi/libiscsi.c if (!ihost->workq) workq 2675 drivers/scsi/libiscsi.c if (ihost->workq) workq 2676 drivers/scsi/libiscsi.c destroy_workqueue(ihost->workq); workq 1267 drivers/scsi/ufs/ufshcd.c queue_work(hba->clk_scaling.workq, workq 1772 drivers/scsi/ufs/ufshcd.c hba->clk_scaling.workq = create_singlethread_workqueue(wq_name); workq 1782 drivers/scsi/ufs/ufshcd.c destroy_workqueue(hba->clk_scaling.workq); workq 1847 drivers/scsi/ufs/ufshcd.c queue_work(hba->clk_scaling.workq, workq 401 drivers/scsi/ufs/ufshcd.h struct workqueue_struct *workq; workq 75 include/linux/mISDNhw.h schedule_work(&((s)->workq)); \ workq 81 include/linux/mISDNhw.h struct work_struct workq; workq 144 include/linux/mISDNhw.h struct work_struct workq; workq 511 include/linux/mISDNif.h wait_queue_head_t workq; workq 363 include/scsi/libiscsi.h struct workqueue_struct *workq;