tmo 3131 drivers/acpi/nfit/core.c unsigned int tmo = acpi_desc->scrub_tmo; tmo 3142 drivers/acpi/nfit/core.c return min(30U * 60U, tmo * 2); tmo 3207 drivers/acpi/nfit/core.c static void __sched_ars(struct acpi_nfit_desc *acpi_desc, unsigned int tmo) tmo 3213 drivers/acpi/nfit/core.c if (tmo) tmo 3214 drivers/acpi/nfit/core.c acpi_desc->scrub_tmo = tmo; tmo 3215 drivers/acpi/nfit/core.c queue_delayed_work(nfit_wq, &acpi_desc->dwork, tmo * HZ); tmo 3236 drivers/acpi/nfit/core.c unsigned int tmo; tmo 3242 drivers/acpi/nfit/core.c tmo = __acpi_nfit_scrub(acpi_desc, query_rc); tmo 3243 drivers/acpi/nfit/core.c if (tmo) tmo 3244 drivers/acpi/nfit/core.c __sched_ars(acpi_desc, tmo); tmo 761 drivers/ata/ahci.c unsigned long tmo = deadline - jiffies; tmo 798 drivers/ata/ahci.c deadline += tmo; tmo 599 drivers/block/paride/pcd.c static int pcd_ready_wait(struct pcd_unit *cd, int tmo) tmo 605 drivers/block/paride/pcd.c while (k < tmo) { tmo 288 drivers/block/paride/pg.c static int pg_wait(struct pg *dev, int go, int stop, unsigned long tmo, char *msg) tmo 296 drivers/block/paride/pg.c && time_before(jiffies, tmo)) { tmo 303 drivers/block/paride/pg.c to = time_after_eq(jiffies, tmo); tmo 320 drivers/block/paride/pg.c static int pg_command(struct pg *dev, char *cmd, int dlen, unsigned long tmo) tmo 328 drivers/block/paride/pg.c if (pg_wait(dev, STAT_BUSY | STAT_DRQ, 0, tmo, "before command")) tmo 335 drivers/block/paride/pg.c if (pg_wait(dev, STAT_BUSY, STAT_DRQ, tmo, "command DRQ")) tmo 357 drivers/block/paride/pg.c static int pg_completion(struct pg *dev, char *buf, unsigned long tmo) tmo 362 drivers/block/paride/pg.c tmo, "completion"); tmo 380 drivers/block/paride/pg.c tmo, "completion"); tmo 394 drivers/block/paride/pt.c static int pt_poll_dsc(struct pt_unit *tape, int pause, int tmo, char *msg) tmo 402 drivers/block/paride/pt.c while (k < tmo) { tmo 413 drivers/block/paride/pt.c if ((k >= tmo) || (s & STAT_ERR)) { tmo 414 drivers/block/paride/pt.c if (k >= tmo) tmo 425 drivers/block/paride/pt.c static void pt_media_access_cmd(struct pt_unit *tape, int tmo, char *cmd, char *fun) tmo 432 drivers/block/paride/pt.c pt_poll_dsc(tape, HZ, tmo, fun); tmo 484 drivers/block/paride/pt.c static int pt_ready_wait(struct pt_unit *tape, int tmo) tmo 490 drivers/block/paride/pt.c while (k < tmo) { tmo 259 drivers/dma/ioat/dma.c static int ioat_quiesce(struct ioatdma_chan *ioat_chan, unsigned long tmo) tmo 261 drivers/dma/ioat/dma.c unsigned long end = jiffies + tmo; tmo 269 drivers/dma/ioat/dma.c if (tmo && time_after(jiffies, end)) { tmo 280 drivers/dma/ioat/dma.c static int ioat_reset_sync(struct ioatdma_chan *ioat_chan, unsigned long tmo) tmo 282 drivers/dma/ioat/dma.c unsigned long end = jiffies + tmo; tmo 313 drivers/dma/ioat/init.c unsigned long tmo; tmo 372 drivers/dma/ioat/init.c tmo = wait_for_completion_timeout(&cmp, msecs_to_jiffies(3000)); tmo 374 drivers/dma/ioat/init.c if (tmo == 0 || tmo 801 drivers/dma/ioat/init.c unsigned long tmo; tmo 889 drivers/dma/ioat/init.c tmo = wait_for_completion_timeout(&cmp, msecs_to_jiffies(3000)); tmo 891 drivers/dma/ioat/init.c if (tmo == 0 || tmo 957 drivers/dma/ioat/init.c tmo = wait_for_completion_timeout(&cmp, msecs_to_jiffies(3000)); tmo 959 drivers/dma/ioat/init.c if (tmo == 0 || tmo 1010 drivers/dma/ioat/init.c tmo = wait_for_completion_timeout(&cmp, msecs_to_jiffies(3000)); tmo 1012 drivers/dma/ioat/init.c if (tmo == 0 || tmo 3396 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c long r = 1, tmo; tmo 3399 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c tmo = msecs_to_jiffies(8000); tmo 3401 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c tmo = msecs_to_jiffies(100); tmo 3418 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c tmo = dma_fence_wait_timeout(fence, false, tmo); tmo 3421 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c if (tmo == 0) { tmo 3424 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c } else if (tmo < 0) { tmo 3425 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c r = tmo; tmo 3435 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c tmo = dma_fence_wait_timeout(fence, false, tmo); tmo 3438 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c if (r < 0 || tmo <= 0) { tmo 3439 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c DRM_ERROR("recover vram bo from shadow failed, r is %ld, tmo is %ld\n", r, tmo); tmo 359 drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c long tmo; tmo 374 drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c tmo = tmo_mm; tmo 376 drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c tmo = tmo_gfx; tmo 378 drivers/gpu/drm/amd/amdgpu/amdgpu_ib.c r = amdgpu_ring_test_ib(ring, tmo); tmo 174 drivers/infiniband/ulp/srp/ib_srp.c int tmo = *(int *)kp->arg; tmo 176 drivers/infiniband/ulp/srp/ib_srp.c if (tmo >= 0) tmo 177 drivers/infiniband/ulp/srp/ib_srp.c return sprintf(buffer, "%d", tmo); tmo 184 drivers/infiniband/ulp/srp/ib_srp.c int tmo, res; tmo 186 drivers/infiniband/ulp/srp/ib_srp.c res = srp_parse_tmo(&tmo, val); tmo 191 drivers/infiniband/ulp/srp/ib_srp.c res = srp_tmo_valid(tmo, srp_fast_io_fail_tmo, tmo 194 drivers/infiniband/ulp/srp/ib_srp.c res = srp_tmo_valid(srp_reconnect_delay, tmo, srp_dev_loss_tmo); tmo 197 drivers/infiniband/ulp/srp/ib_srp.c tmo); tmo 200 drivers/infiniband/ulp/srp/ib_srp.c *(int *)kp->arg = tmo; tmo 626 drivers/input/keyboard/lm8323.c unsigned long tmo; tmo 669 drivers/input/keyboard/lm8323.c tmo = jiffies + msecs_to_jiffies(100); tmo 674 drivers/input/keyboard/lm8323.c if (time_after(jiffies, tmo)) { tmo 175 drivers/md/dm-log-userspace-transfer.c unsigned long tmo; tmo 240 drivers/md/dm-log-userspace-transfer.c tmo = wait_for_completion_timeout(&(pkg.complete), DM_ULOG_RETRY_TIMEOUT); tmo 244 drivers/md/dm-log-userspace-transfer.c if (!tmo) { tmo 1118 drivers/mmc/host/mtk-sd.c unsigned long tmo = jiffies + msecs_to_jiffies(20); tmo 1121 drivers/mmc/host/mtk-sd.c time_before(jiffies, tmo)) tmo 1131 drivers/mmc/host/mtk-sd.c tmo = jiffies + msecs_to_jiffies(20); tmo 1134 drivers/mmc/host/mtk-sd.c time_before(jiffies, tmo)) tmo 10162 drivers/net/ethernet/broadcom/bnxt/bnxt.c int n = 0, tmo; tmo 10187 drivers/net/ethernet/broadcom/bnxt/bnxt.c tmo = HZ / 10; tmo 10190 drivers/net/ethernet/broadcom/bnxt/bnxt.c tmo = bp->fw_reset_min_dsecs * HZ / 10; tmo 10192 drivers/net/ethernet/broadcom/bnxt/bnxt.c bnxt_queue_fw_reset_work(bp, tmo); tmo 10660 drivers/net/ethernet/broadcom/bnxt/bnxt.c int tmo; tmo 10684 drivers/net/ethernet/broadcom/bnxt/bnxt.c tmo = HZ / 10; tmo 10687 drivers/net/ethernet/broadcom/bnxt/bnxt.c tmo = bp->fw_reset_min_dsecs * HZ / 10; tmo 10690 drivers/net/ethernet/broadcom/bnxt/bnxt.c bnxt_queue_fw_reset_work(bp, tmo); tmo 256 drivers/net/ethernet/ti/cpsw_sl.c void cpsw_sl_reset(struct cpsw_sl *sl, unsigned long tmo) tmo 258 drivers/net/ethernet/ti/cpsw_sl.c unsigned long timeout = jiffies + msecs_to_jiffies(tmo); tmo 313 drivers/net/ethernet/ti/cpsw_sl.c int cpsw_sl_wait_for_idle(struct cpsw_sl *sl, unsigned long tmo) tmo 315 drivers/net/ethernet/ti/cpsw_sl.c unsigned long timeout = jiffies + msecs_to_jiffies(tmo); tmo 63 drivers/net/ethernet/ti/cpsw_sl.h void cpsw_sl_reset(struct cpsw_sl *sl, unsigned long tmo); tmo 68 drivers/net/ethernet/ti/cpsw_sl.h int cpsw_sl_wait_for_idle(struct cpsw_sl *sl, unsigned long tmo); tmo 26 drivers/net/ethernet/ti/cpts.c unsigned long tmo; tmo 37 drivers/net/ethernet/ti/cpts.c return time_after(jiffies, event->tmo); tmo 86 drivers/net/ethernet/ti/cpts.c if (time_after(jiffies, skb_cb->tmo)) { tmo 128 drivers/net/ethernet/ti/cpts.c if (time_after(jiffies, skb_cb->tmo)) { tmo 158 drivers/net/ethernet/ti/cpts.c event->tmo = jiffies + 2; tmo 408 drivers/net/ethernet/ti/cpts.c skb_cb->tmo = jiffies + msecs_to_jiffies(100); tmo 94 drivers/net/ethernet/ti/cpts.h unsigned long tmo; tmo 67 drivers/net/phy/dp83640.c unsigned long tmo; tmo 88 drivers/net/phy/dp83640.c unsigned long tmo; tmo 276 drivers/net/phy/dp83640.c rxts->tmo = jiffies + SKB_TIMESTAMP_TIMEOUT; tmo 586 drivers/net/phy/dp83640.c return time_after(jiffies, rxts->tmo); tmo 927 drivers/net/phy/dp83640.c if (time_after(jiffies, skb_info->tmo)) { tmo 1419 drivers/net/phy/dp83640.c if (!time_after(jiffies, skb_info->tmo)) { tmo 1470 drivers/net/phy/dp83640.c skb_info->tmo = jiffies + SKB_TIMESTAMP_TIMEOUT; tmo 1496 drivers/net/phy/dp83640.c skb_info->tmo = jiffies + SKB_TIMESTAMP_TIMEOUT; tmo 112 drivers/net/wireless/st/cw1200/pm.c unsigned long tmo) tmo 117 drivers/net/wireless/st/cw1200/pm.c if (!timer_pending(&pm->stay_awake) || cur_tmo < (long)tmo) tmo 118 drivers/net/wireless/st/cw1200/pm.c mod_timer(&pm->stay_awake, jiffies + tmo); tmo 125 drivers/net/wireless/st/cw1200/pm.c long tmo; tmo 128 drivers/net/wireless/st/cw1200/pm.c tmo = work->timer.expires - jiffies; tmo 129 drivers/net/wireless/st/cw1200/pm.c if (tmo < 0) tmo 130 drivers/net/wireless/st/cw1200/pm.c tmo = 0; tmo 132 drivers/net/wireless/st/cw1200/pm.c tmo = -1; tmo 134 drivers/net/wireless/st/cw1200/pm.c return tmo; tmo 139 drivers/net/wireless/st/cw1200/pm.c unsigned long tmo) tmo 141 drivers/net/wireless/st/cw1200/pm.c if ((long)tmo < 0) tmo 144 drivers/net/wireless/st/cw1200/pm.c return queue_delayed_work(priv->workqueue, work, tmo); tmo 34 drivers/net/wireless/st/cw1200/pm.h unsigned long tmo); tmo 37 drivers/net/wireless/st/cw1200/pm.h unsigned long tmo) tmo 122 drivers/net/wireless/st/cw1200/queue.c unsigned long tmo = item->queue_timestamp + queue->ttl; tmo 123 drivers/net/wireless/st/cw1200/queue.c mod_timer(&queue->gc, tmo); tmo 125 drivers/net/wireless/st/cw1200/queue.c tmo - jiffies); tmo 20 drivers/net/wireless/st/cw1200/scan.c int tmo = 2000; tmo 34 drivers/net/wireless/st/cw1200/scan.c tmo += scan->ch[i].max_chan_time + 10; tmo 39 drivers/net/wireless/st/cw1200/scan.c cw1200_pm_stay_awake(&priv->pm_state, msecs_to_jiffies(tmo)); tmo 41 drivers/net/wireless/st/cw1200/scan.c msecs_to_jiffies(tmo)); tmo 2084 drivers/net/wireless/st/cw1200/sta.c long tmo = priv->join_dtim_period * tmo 2093 drivers/net/wireless/st/cw1200/sta.c mod_timer(&priv->mcast_timeout, jiffies + tmo); tmo 83 drivers/net/wireless/st/cw1200/wsm.c void *arg, u16 cmd, long tmo); tmo 1076 drivers/net/wireless/st/cw1200/wsm.c void *arg, u16 cmd, long tmo) tmo 1127 drivers/net/wireless/st/cw1200/wsm.c priv->wsm_cmd.done, tmo); tmo 420 drivers/nvdimm/security.c unsigned int tmo; tmo 432 drivers/nvdimm/security.c tmo = nvdimm->sec.overwrite_tmo; tmo 442 drivers/nvdimm/security.c tmo += 10; tmo 443 drivers/nvdimm/security.c queue_delayed_work(system_wq, &nvdimm->dwork, tmo * HZ); tmo 444 drivers/nvdimm/security.c nvdimm->sec.overwrite_tmo = min(15U * 60U, tmo); tmo 702 drivers/rapidio/devices/rio_mport_cdev.c unsigned long tmo = msecs_to_jiffies(dma_timeout); tmo 766 drivers/rapidio/devices/rio_mport_cdev.c wret = wait_for_completion_interruptible_timeout(&req->req_comp, tmo); tmo 1015 drivers/rapidio/devices/rio_mport_cdev.c unsigned long tmo; tmo 1027 drivers/rapidio/devices/rio_mport_cdev.c tmo = msecs_to_jiffies(w_param.timeout); tmo 1029 drivers/rapidio/devices/rio_mport_cdev.c tmo = msecs_to_jiffies(dma_timeout); tmo 1044 drivers/rapidio/devices/rio_mport_cdev.c wret = wait_for_completion_interruptible_timeout(&req->req_comp, tmo); tmo 1947 drivers/rapidio/devices/rio_mport_cdev.c unsigned long tmo = msecs_to_jiffies(dma_timeout); tmo 1981 drivers/rapidio/devices/rio_mport_cdev.c wret = wait_for_completion_interruptible_timeout(&priv->comp, tmo); tmo 1436 drivers/rapidio/rio_cm.c unsigned long tmo = msecs_to_jiffies(3000); tmo 1451 drivers/rapidio/rio_cm.c wret = wait_for_completion_interruptible_timeout(&ch->comp_close, tmo); tmo 4120 drivers/scsi/csiostor/csio_hw.c io_req->tmo -= min_t(uint32_t, io_req->tmo, ECM_MIN_TMO); tmo 4122 drivers/scsi/csiostor/csio_hw.c if (!io_req->tmo) { tmo 1702 drivers/scsi/csiostor/csio_lnode.c wr->tmo_val = (uint8_t) io_req->tmo; tmo 76 drivers/scsi/csiostor/csio_mb.c csio_mb_hello(struct csio_hw *hw, struct csio_mb *mbp, uint32_t tmo, tmo 82 drivers/scsi/csiostor/csio_mb.c CSIO_INIT_MBP(mbp, cmdp, tmo, hw, cbfn, 1); tmo 140 drivers/scsi/csiostor/csio_mb.c csio_mb_bye(struct csio_hw *hw, struct csio_mb *mbp, uint32_t tmo, tmo 145 drivers/scsi/csiostor/csio_mb.c CSIO_INIT_MBP(mbp, cmdp, tmo, hw, cbfn, 1); tmo 162 drivers/scsi/csiostor/csio_mb.c csio_mb_reset(struct csio_hw *hw, struct csio_mb *mbp, uint32_t tmo, tmo 168 drivers/scsi/csiostor/csio_mb.c CSIO_INIT_MBP(mbp, cmdp, tmo, hw, cbfn, 1); tmo 193 drivers/scsi/csiostor/csio_mb.c csio_mb_params(struct csio_hw *hw, struct csio_mb *mbp, uint32_t tmo, tmo 203 drivers/scsi/csiostor/csio_mb.c CSIO_INIT_MBP(mbp, cmdp, tmo, hw, cbfn, 1); tmo 264 drivers/scsi/csiostor/csio_mb.c csio_mb_ldst(struct csio_hw *hw, struct csio_mb *mbp, uint32_t tmo, int reg) tmo 267 drivers/scsi/csiostor/csio_mb.c CSIO_INIT_MBP(mbp, ldst_cmd, tmo, hw, NULL, 1); tmo 300 drivers/scsi/csiostor/csio_mb.c csio_mb_caps_config(struct csio_hw *hw, struct csio_mb *mbp, uint32_t tmo, tmo 307 drivers/scsi/csiostor/csio_mb.c CSIO_INIT_MBP(mbp, cmdp, tmo, hw, cbfn, wr ? 0 : 1); tmo 342 drivers/scsi/csiostor/csio_mb.c csio_mb_port(struct csio_hw *hw, struct csio_mb *mbp, uint32_t tmo, tmo 348 drivers/scsi/csiostor/csio_mb.c CSIO_INIT_MBP(mbp, cmdp, tmo, hw, cbfn, 1); tmo 413 drivers/scsi/csiostor/csio_mb.c csio_mb_initialize(struct csio_hw *hw, struct csio_mb *mbp, uint32_t tmo, tmo 418 drivers/scsi/csiostor/csio_mb.c CSIO_INIT_MBP(mbp, cmdp, tmo, hw, cbfn, 1); tmo 1205 drivers/scsi/csiostor/csio_mb.c if (mbp->tmo < CSIO_MB_POLL_FREQ) { tmo 1206 drivers/scsi/csiostor/csio_mb.c csio_err(hw, "Invalid tmo: 0x%x\n", mbp->tmo); tmo 1279 drivers/scsi/csiostor/csio_mb.c mod_timer(&mbm->timer, jiffies + msecs_to_jiffies(mbp->tmo)); tmo 1298 drivers/scsi/csiostor/csio_mb.c for (ii = 0; ii < mbp->tmo; ii += CSIO_MB_POLL_FREQ) { tmo 97 drivers/scsi/csiostor/csio_mb.h (__mbp)->tmo = (__tmo); \ tmo 121 drivers/scsi/csiostor/csio_mb.h uint32_t tmo; /* Timeout */ tmo 174 drivers/scsi/csiostor/csio_mb.h void csio_mb_ldst(struct csio_hw *hw, struct csio_mb *mbp, uint32_t tmo, tmo 217 drivers/scsi/csiostor/csio_scsi.c wr->tmo_val = (uint8_t) req->tmo; tmo 376 drivers/scsi/csiostor/csio_scsi.c wr->tmo_val = (uint8_t)(req->tmo); tmo 429 drivers/scsi/csiostor/csio_scsi.c wr->tmo_val = (uint8_t)(req->tmo); tmo 660 drivers/scsi/csiostor/csio_scsi.c wr->tmo_val = (uint8_t) req->tmo; tmo 1233 drivers/scsi/csiostor/csio_scsi.c csio_scsi_abort_io_q(struct csio_scsim *scm, struct list_head *q, uint32_t tmo) tmo 1237 drivers/scsi/csiostor/csio_scsi.c int count = DIV_ROUND_UP(tmo, CSIO_SCSI_ABORT_Q_POLL_MS); tmo 1837 drivers/scsi/csiostor/csio_scsi.c ioreq->tmo = 0; tmo 1891 drivers/scsi/csiostor/csio_scsi.c ioreq->tmo = CSIO_SCSI_ABRT_TMO_MS; tmo 1916 drivers/scsi/csiostor/csio_scsi.c unsigned long tmo = 0; tmo 1943 drivers/scsi/csiostor/csio_scsi.c tmo = CSIO_SCSI_ABRT_TMO_MS; tmo 1965 drivers/scsi/csiostor/csio_scsi.c wait_for_completion_timeout(&ioreq->cmplobj, msecs_to_jiffies(tmo)); tmo 2127 drivers/scsi/csiostor/csio_scsi.c ioreq->tmo = CSIO_SCSI_LUNRST_TMO_MS / 1000; tmo 2134 drivers/scsi/csiostor/csio_scsi.c count = DIV_ROUND_UP((ioreq->tmo + 10) * 1000, CSIO_SCSI_TM_POLL_MS); tmo 246 drivers/scsi/csiostor/csio_wr.h uint32_t tmo; /* Driver timeout */ tmo 1050 drivers/scsi/isci/host.c unsigned long tmo = sci_controller_get_suggested_start_timeout(ihost); tmo 1055 drivers/scsi/isci/host.c sci_controller_start(ihost, tmo); tmo 394 drivers/scsi/libsas/sas_init.c const unsigned long tmo = msecs_to_jiffies(25000); tmo 407 drivers/scsi/libsas/sas_init.c wait_event_timeout(ha->eh_wait_q, phys_suspended(ha) == 0, tmo); tmo 321 drivers/scsi/lpfc/lpfc_ct.c uint32_t tmo, uint8_t retry) tmo 360 drivers/scsi/lpfc/lpfc_ct.c if (!tmo) { tmo 362 drivers/scsi/lpfc/lpfc_ct.c tmo = (3 * phba->fc_ratov); tmo 364 drivers/scsi/lpfc/lpfc_ct.c icmd->ulpTimeout = tmo; tmo 1276 drivers/scsi/lpfc/lpfc_els.c uint32_t tmo, did; tmo 1335 drivers/scsi/lpfc/lpfc_els.c tmo = phba->fc_ratov; tmo 1338 drivers/scsi/lpfc/lpfc_els.c phba->fc_ratov = tmo; tmo 6468 drivers/scsi/lpfc/lpfc_els.c int i, tmo; tmo 6577 drivers/scsi/lpfc/lpfc_els.c tmo = ((phba->fc_ratov * 3) + 3); tmo 6579 drivers/scsi/lpfc/lpfc_els.c jiffies + msecs_to_jiffies(1000 * tmo)); tmo 4633 drivers/scsi/lpfc/lpfc_hbadisc.c uint32_t tmo; tmo 4637 drivers/scsi/lpfc/lpfc_hbadisc.c tmo = (((phba->fc_edtov + 999) / 1000) + 1); tmo 4642 drivers/scsi/lpfc/lpfc_hbadisc.c tmo = ((phba->fc_ratov * 3) + 3); tmo 4649 drivers/scsi/lpfc/lpfc_hbadisc.c tmo, vport->port_state, vport->fc_flag); tmo 4652 drivers/scsi/lpfc/lpfc_hbadisc.c mod_timer(&vport->fc_disctmo, jiffies + msecs_to_jiffies(1000 * tmo)); tmo 4661 drivers/scsi/lpfc/lpfc_hbadisc.c vport->port_state, tmo, tmo 426 drivers/scsi/lpfc/lpfc_nvme.c uint32_t tmo, uint8_t retry) tmo 452 drivers/scsi/lpfc/lpfc_nvme.c if (!tmo) tmo 454 drivers/scsi/lpfc/lpfc_nvme.c tmo = (3 * phba->fc_ratov); tmo 529 drivers/scsi/lpfc/lpfc_nvme.c genwqe->drvrTimeout = tmo + LPFC_DRVR_TIMEOUT; tmo 1664 drivers/scsi/myrs.c mbox->SCSI_10.tmo.tmo_scale = MYRS_TMO_SCALE_MINUTES; tmo 1665 drivers/scsi/myrs.c mbox->SCSI_10.tmo.tmo_val = timeout / 60; tmo 1667 drivers/scsi/myrs.c mbox->SCSI_10.tmo.tmo_scale = MYRS_TMO_SCALE_SECONDS; tmo 1668 drivers/scsi/myrs.c mbox->SCSI_10.tmo.tmo_val = timeout; tmo 1711 drivers/scsi/myrs.c mbox->SCSI_255.tmo.tmo_scale = MYRS_TMO_SCALE_MINUTES; tmo 1712 drivers/scsi/myrs.c mbox->SCSI_255.tmo.tmo_val = timeout / 60; tmo 1714 drivers/scsi/myrs.c mbox->SCSI_255.tmo.tmo_scale = MYRS_TMO_SCALE_SECONDS; tmo 1715 drivers/scsi/myrs.c mbox->SCSI_255.tmo.tmo_val = timeout; tmo 695 drivers/scsi/myrs.h struct myrs_cmd_tmo tmo; /* Byte 19 */ tmo 708 drivers/scsi/myrs.h struct myrs_cmd_tmo tmo; /* Byte 19 */ tmo 721 drivers/scsi/myrs.h struct myrs_cmd_tmo tmo; /* Byte 19 */ tmo 737 drivers/scsi/myrs.h struct myrs_cmd_tmo tmo; /* Byte 19 */ tmo 751 drivers/scsi/myrs.h struct myrs_cmd_tmo tmo; /* Byte 19 */ tmo 765 drivers/scsi/myrs.h struct myrs_cmd_tmo tmo; /* Byte 19 */ tmo 780 drivers/scsi/myrs.h struct myrs_cmd_tmo tmo; /* Byte 19 */ tmo 798 drivers/scsi/myrs.h struct myrs_cmd_tmo tmo; /* Byte 19 */ tmo 813 drivers/scsi/myrs.h struct myrs_cmd_tmo tmo; /* Byte 19 */ tmo 832 drivers/scsi/myrs.h struct myrs_cmd_tmo tmo; /* Byte 19 */ tmo 851 drivers/scsi/myrs.h struct myrs_cmd_tmo tmo; /* Byte 19 */ tmo 2136 drivers/scsi/qedf/qedf_io.c int tmo = 0; tmo 2215 drivers/scsi/qedf/qedf_io.c tmo = wait_for_completion_timeout(&io_req->cleanup_done, tmo 2218 drivers/scsi/qedf/qedf_io.c if (!tmo) { tmo 2277 drivers/scsi/qedf/qedf_io.c int tmo = 0; tmo 2346 drivers/scsi/qedf/qedf_io.c tmo = wait_for_completion_timeout(&io_req->tm_done, tmo 2349 drivers/scsi/qedf/qedf_io.c if (!tmo) { tmo 290 drivers/scsi/qla2xxx/qla_gbl.h extern void qla2x00_init_timer(srb_t *sp, unsigned long tmo); tmo 79 drivers/scsi/qla2xxx/qla_init.c unsigned long tmo; tmo 83 drivers/scsi/qla2xxx/qla_init.c tmo = ha->r_a_tov / 10 * 2; tmo 85 drivers/scsi/qla2xxx/qla_init.c tmo = FX00_DEF_RATOV * 2; tmo 91 drivers/scsi/qla2xxx/qla_init.c tmo = ha->login_timeout; tmo 93 drivers/scsi/qla2xxx/qla_init.c return tmo; tmo 2508 drivers/scsi/qla2xxx/qla_iocb.c void qla2x00_init_timer(srb_t *sp, unsigned long tmo) tmo 2511 drivers/scsi/qla2xxx/qla_iocb.c sp->u.iocb_cmd.timer.expires = jiffies + tmo * HZ; tmo 300 drivers/scsi/qla2xxx/qla_mr.c qlafx00_driver_shutdown(scsi_qla_host_t *vha, int tmo) tmo 312 drivers/scsi/qla2xxx/qla_mr.c if (tmo) tmo 313 drivers/scsi/qla2xxx/qla_mr.c mcp->tov = tmo; tmo 244 drivers/scsi/qla2xxx/qla_sup.c uint16_t data, uint32_t tmo) tmo 283 drivers/scsi/qla2xxx/qla_sup.c if (!--tmo) { tmo 7406 drivers/scsi/qla4xxx/ql4_os.c uint16_t tmo = 0; tmo 7431 drivers/scsi/qla4xxx/ql4_os.c tmo = ((ha->def_timeout > LOGIN_TOV) && tmo 7436 drivers/scsi/qla4xxx/ql4_os.c "Default time to wait for login to ddb %d\n", tmo)); tmo 7438 drivers/scsi/qla4xxx/ql4_os.c wtime = jiffies + (HZ * tmo); tmo 8467 drivers/scsi/qla4xxx/ql4_os.c uint16_t tmo = 0; tmo 8494 drivers/scsi/qla4xxx/ql4_os.c tmo = ((ha->def_timeout > LOGIN_TOV) && tmo 8499 drivers/scsi/qla4xxx/ql4_os.c "Default time to wait for build ddb %d\n", tmo)); tmo 8501 drivers/scsi/qla4xxx/ql4_os.c wtime = jiffies + (HZ * tmo); tmo 194 drivers/scsi/scsi_transport_srp.c static ssize_t srp_show_tmo(char *buf, int tmo) tmo 196 drivers/scsi/scsi_transport_srp.c return tmo >= 0 ? sprintf(buf, "%d\n", tmo) : sprintf(buf, "off\n"); tmo 199 drivers/scsi/scsi_transport_srp.c int srp_parse_tmo(int *tmo, const char *buf) tmo 204 drivers/scsi/scsi_transport_srp.c res = kstrtoint(buf, 0, tmo); tmo 206 drivers/scsi/scsi_transport_srp.c *tmo = -1; tmo 82 drivers/scsi/snic/snic_main.c int tmo = SNIC_DFLT_CMD_TIMEOUT * HZ; tmo 90 drivers/scsi/snic/snic_main.c tmo = snic->fwinfo.io_tmo * HZ; tmo 93 drivers/scsi/snic/snic_main.c blk_queue_rq_timeout(sdev->request_queue, tmo); tmo 415 drivers/scsi/sun3_scsi.c int tmo = 20000; /* .2 sec */ tmo 421 drivers/scsi/sun3_scsi.c if(--tmo <= 0) { tmo 1156 drivers/staging/media/allegro-dvt/allegro-core.c unsigned long tmo; tmo 1158 drivers/staging/media/allegro-dvt/allegro-core.c tmo = wait_for_completion_timeout(&dev->init_complete, tmo 1160 drivers/staging/media/allegro-dvt/allegro-core.c if (tmo == 0) tmo 885 drivers/target/target_core_user.c static int tcmu_setup_cmd_timer(struct tcmu_cmd *tcmu_cmd, unsigned int tmo, tmo 902 drivers/target/target_core_user.c udev->name, tmo / MSEC_PER_SEC); tmo 905 drivers/target/target_core_user.c if (!tmo) tmo 908 drivers/target/target_core_user.c tcmu_cmd->deadline = round_jiffies_up(jiffies + msecs_to_jiffies(tmo)); tmo 918 drivers/target/target_core_user.c unsigned int tmo; tmo 928 drivers/target/target_core_user.c tmo = udev->qfull_time_out; tmo 930 drivers/target/target_core_user.c tmo = udev->cmd_time_out; tmo 932 drivers/target/target_core_user.c tmo = TCMU_TIME_OUT; tmo 934 drivers/target/target_core_user.c ret = tcmu_setup_cmd_timer(tcmu_cmd, tmo, &udev->qfull_timer); tmo 1109 drivers/usb/core/devio.c unsigned int tmo; tmo 1132 drivers/usb/core/devio.c tmo = ctrl.timeout; tmo 1145 drivers/usb/core/devio.c snoop_urb(dev, NULL, pipe, ctrl.wLength, tmo, SUBMIT, NULL, 0); tmo 1150 drivers/usb/core/devio.c tbuf, ctrl.wLength, tmo); tmo 1168 drivers/usb/core/devio.c snoop_urb(dev, NULL, pipe, ctrl.wLength, tmo, SUBMIT, tmo 1174 drivers/usb/core/devio.c tbuf, ctrl.wLength, tmo); tmo 1196 drivers/usb/core/devio.c unsigned int tmo, len1, pipe; tmo 1226 drivers/usb/core/devio.c tmo = bulk.timeout; tmo 1232 drivers/usb/core/devio.c snoop_urb(dev, NULL, pipe, len1, tmo, SUBMIT, NULL, 0); tmo 1235 drivers/usb/core/devio.c i = usb_bulk_msg(dev, pipe, tbuf, len1, &len2, tmo); tmo 1252 drivers/usb/core/devio.c snoop_urb(dev, NULL, pipe, len1, tmo, SUBMIT, tbuf, len1); tmo 1255 drivers/usb/core/devio.c i = usb_bulk_msg(dev, pipe, tbuf, len1, &len2, tmo); tmo 650 drivers/usb/typec/ucsi/ucsi.c unsigned long tmo; tmo 660 drivers/usb/typec/ucsi/ucsi.c tmo = jiffies + msecs_to_jiffies(UCSI_TIMEOUT_MS); tmo 686 drivers/usb/typec/ucsi/ucsi.c } while (time_is_after_jiffies(tmo)); tmo 284 drivers/video/fbdev/omap/lcd_mipid.c unsigned long tmo; tmo 287 drivers/video/fbdev/omap/lcd_mipid.c tmo = jiffies + msecs_to_jiffies(100); tmo 296 drivers/video/fbdev/omap/lcd_mipid.c if (time_after(jiffies, tmo)) { tmo 116 include/scsi/scsi_transport_srp.h int srp_parse_tmo(int *tmo, const char *buf); tmo 390 net/decnet/dn_route.c long tmo = (long)(dn_rt_deadline - now); tmo 392 net/decnet/dn_route.c if (user_mode && tmo < dn_rt_max_delay - dn_rt_min_delay) tmo 393 net/decnet/dn_route.c tmo = 0; tmo 395 net/decnet/dn_route.c if (delay > tmo) tmo 396 net/decnet/dn_route.c delay = tmo; tmo 331 net/ipv4/inet_diag.c long tmo; tmo 341 net/ipv4/inet_diag.c tmo = tw->tw_timer.expires - jiffies; tmo 342 net/ipv4/inet_diag.c if (tmo < 0) tmo 343 net/ipv4/inet_diag.c tmo = 0; tmo 350 net/ipv4/inet_diag.c r->idiag_expires = jiffies_to_msecs(tmo); tmo 367 net/ipv4/inet_diag.c long tmo; tmo 383 net/ipv4/inet_diag.c tmo = inet_reqsk(sk)->rsk_timer.expires - jiffies; tmo 384 net/ipv4/inet_diag.c r->idiag_expires = (tmo >= 0) ? jiffies_to_msecs(tmo) : 0; tmo 2474 net/ipv4/tcp.c const int tmo = tcp_fin_time(sk); tmo 2476 net/ipv4/tcp.c if (tmo > TCP_TIMEWAIT_LEN) { tmo 2478 net/ipv4/tcp.c tmo - TCP_TIMEWAIT_LEN); tmo 2480 net/ipv4/tcp.c tcp_time_wait(sk, TCP_FIN_WAIT2, tmo); tmo 6262 net/ipv4/tcp_input.c int tmo; tmo 6296 net/ipv4/tcp_input.c tmo = tcp_fin_time(sk); tmo 6297 net/ipv4/tcp_input.c if (tmo > TCP_TIMEWAIT_LEN) { tmo 6298 net/ipv4/tcp_input.c inet_csk_reset_keepalive_timer(sk, tmo - TCP_TIMEWAIT_LEN); tmo 6306 net/ipv4/tcp_input.c inet_csk_reset_keepalive_timer(sk, tmo); tmo 6308 net/ipv4/tcp_input.c tcp_time_wait(sk, TCP_FIN_WAIT2, tmo); tmo 673 net/ipv4/tcp_timer.c const int tmo = tcp_fin_time(sk) - TCP_TIMEWAIT_LEN; tmo 675 net/ipv4/tcp_timer.c if (tmo > 0) { tmo 676 net/ipv4/tcp_timer.c tcp_time_wait(sk, TCP_FIN_WAIT2, tmo); tmo 216 net/ipv6/icmp.c int tmo = net->ipv6.sysctl.icmpv6_time; tmo 221 net/ipv6/icmp.c tmo >>= ((128 - rt->rt6i_dst.plen)>>5); tmo 224 net/ipv6/icmp.c res = inet_peer_xrlim_allow(peer, tmo); tmo 523 net/packet/af_packet.c unsigned int mbits = 0, msec = 0, div = 0, tmo = 0; tmo 555 net/packet/af_packet.c tmo = mbits * msec; tmo 558 net/packet/af_packet.c return tmo+1; tmo 559 net/packet/af_packet.c return tmo; tmo 326 net/xfrm/xfrm_policy.c time64_t tmo = xp->lft.hard_add_expires_seconds + tmo 328 net/xfrm/xfrm_policy.c if (tmo <= 0) tmo 330 net/xfrm/xfrm_policy.c if (tmo < next) tmo 331 net/xfrm/xfrm_policy.c next = tmo; tmo 334 net/xfrm/xfrm_policy.c time64_t tmo = xp->lft.hard_use_expires_seconds + tmo 336 net/xfrm/xfrm_policy.c if (tmo <= 0) tmo 338 net/xfrm/xfrm_policy.c if (tmo < next) tmo 339 net/xfrm/xfrm_policy.c next = tmo; tmo 342 net/xfrm/xfrm_policy.c time64_t tmo = xp->lft.soft_add_expires_seconds + tmo 344 net/xfrm/xfrm_policy.c if (tmo <= 0) { tmo 346 net/xfrm/xfrm_policy.c tmo = XFRM_KM_TIMEOUT; tmo 348 net/xfrm/xfrm_policy.c if (tmo < next) tmo 349 net/xfrm/xfrm_policy.c next = tmo; tmo 352 net/xfrm/xfrm_policy.c time64_t tmo = xp->lft.soft_use_expires_seconds + tmo 354 net/xfrm/xfrm_policy.c if (tmo <= 0) { tmo 356 net/xfrm/xfrm_policy.c tmo = XFRM_KM_TIMEOUT; tmo 358 net/xfrm/xfrm_policy.c if (tmo < next) tmo 359 net/xfrm/xfrm_policy.c next = tmo; tmo 536 net/xfrm/xfrm_state.c long tmo = x->lft.hard_add_expires_seconds + tmo 538 net/xfrm/xfrm_state.c if (tmo <= 0) { tmo 545 net/xfrm/xfrm_state.c tmo = x->lft.hard_add_expires_seconds - x->saved_tmo; tmo 549 net/xfrm/xfrm_state.c if (tmo < next) tmo 550 net/xfrm/xfrm_state.c next = tmo; tmo 553 net/xfrm/xfrm_state.c long tmo = x->lft.hard_use_expires_seconds + tmo 555 net/xfrm/xfrm_state.c if (tmo <= 0) tmo 557 net/xfrm/xfrm_state.c if (tmo < next) tmo 558 net/xfrm/xfrm_state.c next = tmo; tmo 563 net/xfrm/xfrm_state.c long tmo = x->lft.soft_add_expires_seconds + tmo 565 net/xfrm/xfrm_state.c if (tmo <= 0) { tmo 568 net/xfrm/xfrm_state.c } else if (tmo < next) { tmo 569 net/xfrm/xfrm_state.c next = tmo; tmo 571 net/xfrm/xfrm_state.c x->saved_tmo = tmo; tmo 575 net/xfrm/xfrm_state.c long tmo = x->lft.soft_use_expires_seconds + tmo 577 net/xfrm/xfrm_state.c if (tmo <= 0) tmo 579 net/xfrm/xfrm_state.c else if (tmo < next) tmo 580 net/xfrm/xfrm_state.c next = tmo; tmo 87 sound/soc/au1x/ac97c.c unsigned int tmo, retry; tmo 95 sound/soc/au1x/ac97c.c tmo = 6; tmo 96 sound/soc/au1x/ac97c.c while ((RD(ctx, AC97_STATUS) & STAT_CP) && --tmo) tmo 98 sound/soc/au1x/ac97c.c if (!tmo) { tmo 108 sound/soc/au1x/ac97c.c tmo = 0x10000; tmo 109 sound/soc/au1x/ac97c.c while ((RD(ctx, AC97_STATUS) & STAT_CP) && --tmo) tmo 113 sound/soc/au1x/ac97c.c if (!tmo) tmo 118 sound/soc/au1x/ac97c.c } while (--retry && !tmo); tmo 129 sound/soc/au1x/ac97c.c unsigned int tmo, retry; tmo 135 sound/soc/au1x/ac97c.c for (tmo = 5; (RD(ctx, AC97_STATUS) & STAT_CP) && tmo; tmo--) tmo 137 sound/soc/au1x/ac97c.c if (!tmo) { tmo 144 sound/soc/au1x/ac97c.c for (tmo = 10; (RD(ctx, AC97_STATUS) & STAT_CP) && tmo; tmo--) tmo 146 sound/soc/au1x/ac97c.c if (!tmo) tmo 150 sound/soc/au1x/ac97c.c } while (--retry && !tmo); tmo 75 sound/soc/au1x/psc-ac97.c unsigned short retry, tmo; tmo 89 sound/soc/au1x/psc-ac97.c tmo = 20; tmo 94 sound/soc/au1x/psc-ac97.c } while (--tmo); tmo 104 sound/soc/au1x/psc-ac97.c tmo = 1; /* wrong register, try again */ tmo 106 sound/soc/au1x/psc-ac97.c } while (--retry && !tmo); tmo 116 sound/soc/au1x/psc-ac97.c unsigned int tmo, retry; tmo 129 sound/soc/au1x/psc-ac97.c tmo = 20; tmo 134 sound/soc/au1x/psc-ac97.c } while (--tmo); tmo 140 sound/soc/au1x/psc-ac97.c } while (--retry && !tmo); tmo 146 sound/soc/au1x/psc-i2s.c unsigned long tmo; tmo 152 sound/soc/au1x/psc-i2s.c tmo = 1000000; tmo 153 sound/soc/au1x/psc-i2s.c while (!(__raw_readl(I2S_STAT(pscdata)) & PSC_I2SSTAT_SR) && tmo) tmo 154 sound/soc/au1x/psc-i2s.c tmo--; tmo 156 sound/soc/au1x/psc-i2s.c if (!tmo) tmo 165 sound/soc/au1x/psc-i2s.c tmo = 1000000; tmo 166 sound/soc/au1x/psc-i2s.c while (!(__raw_readl(I2S_STAT(pscdata)) & PSC_I2SSTAT_DR) && tmo) tmo 167 sound/soc/au1x/psc-i2s.c tmo--; tmo 169 sound/soc/au1x/psc-i2s.c if (tmo) tmo 181 sound/soc/au1x/psc-i2s.c unsigned long tmo, stat; tmo 200 sound/soc/au1x/psc-i2s.c tmo = 1000000; tmo 201 sound/soc/au1x/psc-i2s.c while (!(__raw_readl(I2S_STAT(pscdata)) & I2SSTAT_BUSY(stype)) && tmo) tmo 202 sound/soc/au1x/psc-i2s.c tmo--; tmo 204 sound/soc/au1x/psc-i2s.c if (!tmo) { tmo 215 sound/soc/au1x/psc-i2s.c unsigned long tmo, stat; tmo 221 sound/soc/au1x/psc-i2s.c tmo = 1000000; tmo 222 sound/soc/au1x/psc-i2s.c while ((__raw_readl(I2S_STAT(pscdata)) & I2SSTAT_BUSY(stype)) && tmo) tmo 223 sound/soc/au1x/psc-i2s.c tmo--; tmo 202 sound/soc/sh/hac.c unsigned int tmo; tmo 207 sound/soc/sh/hac.c for (tmo = 1000; (tmo > 0) && !(HACREG(HACCR) & CR_CR); tmo--) tmo 210 sound/soc/sh/hac.c if (!tmo)