Lines Matching refs:ha
323 struct qla_hw_data *ha; in qla2x00_start_scsi() local
329 ha = vha->hw; in qla2x00_start_scsi()
330 reg = &ha->iobase->isp; in qla2x00_start_scsi()
332 req = ha->req_q_map[0]; in qla2x00_start_scsi()
333 rsp = ha->rsp_q_map[0]; in qla2x00_start_scsi()
347 spin_lock_irqsave(&ha->hardware_lock, flags); in qla2x00_start_scsi()
363 nseg = dma_map_sg(&ha->pdev->dev, scsi_sglist(cmd), in qla2x00_start_scsi()
373 req_cnt = ha->isp_ops->calc_req_entries(tot_dsds); in qla2x00_start_scsi()
375 cnt = RD_REG_WORD_RELAXED(ISP_REQ_Q_OUT(ha, reg)); in qla2x00_start_scsi()
401 SET_TARGET_ID(ha, cmd_pkt->target, sp->fcport->loop_id); in qla2x00_start_scsi()
410 ha->isp_ops->build_iocbs(sp, cmd_pkt, tot_dsds); in qla2x00_start_scsi()
427 WRT_REG_WORD(ISP_REQ_Q_IN(ha, reg), req->ring_index); in qla2x00_start_scsi()
428 RD_REG_WORD_RELAXED(ISP_REQ_Q_IN(ha, reg)); /* PCI Posting. */ in qla2x00_start_scsi()
435 spin_unlock_irqrestore(&ha->hardware_lock, flags); in qla2x00_start_scsi()
442 spin_unlock_irqrestore(&ha->hardware_lock, flags); in qla2x00_start_scsi()
453 struct qla_hw_data *ha = vha->hw; in qla2x00_start_iocbs() local
454 device_reg_t *reg = ISP_QUE_REG(ha, req->id); in qla2x00_start_iocbs()
456 if (IS_P3P_TYPE(ha)) { in qla2x00_start_iocbs()
468 if (ha->mqenable || IS_QLA83XX(ha) || IS_QLA27XX(ha)) { in qla2x00_start_iocbs()
470 RD_REG_DWORD_RELAXED(&ha->iobase->isp24.hccr); in qla2x00_start_iocbs()
471 } else if (IS_QLAFX00(ha)) { in qla2x00_start_iocbs()
474 QLAFX00_SET_HST_INTR(ha, ha->rqstq_intr_code); in qla2x00_start_iocbs()
475 } else if (IS_FWI2_CAPABLE(ha)) { in qla2x00_start_iocbs()
479 WRT_REG_WORD(ISP_REQ_Q_IN(ha, ®->isp), in qla2x00_start_iocbs()
481 RD_REG_WORD_RELAXED(ISP_REQ_Q_IN(ha, ®->isp)); in qla2x00_start_iocbs()
505 struct qla_hw_data *ha = vha->hw; in __qla2x00_marker() local
506 scsi_qla_host_t *base_vha = pci_get_drvdata(ha->pdev); in __qla2x00_marker()
508 req = ha->req_q_map[0]; in __qla2x00_marker()
520 if (IS_FWI2_CAPABLE(ha)) { in __qla2x00_marker()
528 SET_TARGET_ID(ha, mrk->target, loop_id); in __qla2x00_marker()
583 struct qla_hw_data *ha; in qla24xx_build_scsi_type_6_iocbs() local
606 ha = vha->hw; in qla24xx_build_scsi_type_6_iocbs()
628 dsd_ptr = list_first_entry(&ha->gbl_dsd_list, in qla24xx_build_scsi_type_6_iocbs()
632 ha->gbl_dsd_avail--; in qla24xx_build_scsi_type_6_iocbs()
635 ha->gbl_dsd_inuse++; in qla24xx_build_scsi_type_6_iocbs()
908 qla24xx_walk_and_build_sglist_no_difb(struct qla_hw_data *ha, srb_t *sp, in qla24xx_walk_and_build_sglist_no_difb() argument
965 dma_pool_alloc(ha->dl_dma_pool, GFP_ATOMIC, in qla24xx_walk_and_build_sglist_no_difb()
1024 qla24xx_walk_and_build_sglist(struct qla_hw_data *ha, srb_t *sp, uint32_t *dsd, in qla24xx_walk_and_build_sglist() argument
1065 dma_pool_alloc(ha->dl_dma_pool, GFP_ATOMIC, in qla24xx_walk_and_build_sglist()
1111 qla24xx_walk_and_build_prot_sglist(struct qla_hw_data *ha, srb_t *sp, in qla24xx_walk_and_build_prot_sglist() argument
1157 dma_pool_alloc(ha->dl_dma_pool, GFP_ATOMIC, in qla24xx_walk_and_build_prot_sglist()
1224 struct qla_hw_data *ha; in qla24xx_build_scsi_crc_2_iocbs() local
1236 ha = vha->hw; in qla24xx_build_scsi_crc_2_iocbs()
1264 dma_pool_alloc(ha->dl_dma_pool, GFP_ATOMIC, &crc_ctx_dma); in qla24xx_build_scsi_crc_2_iocbs()
1347 else if (IS_PI_UNINIT_CAPABLE(ha)) { in qla24xx_build_scsi_crc_2_iocbs()
1391 if (qla24xx_walk_and_build_sglist_no_difb(ha, sp, in qla24xx_build_scsi_crc_2_iocbs()
1394 } else if (qla24xx_walk_and_build_sglist(ha, sp, cur_dsd, in qla24xx_build_scsi_crc_2_iocbs()
1402 if (qla24xx_walk_and_build_prot_sglist(ha, sp, cur_dsd, in qla24xx_build_scsi_crc_2_iocbs()
1436 struct qla_hw_data *ha = vha->hw; in qla24xx_start_scsi() local
1454 spin_lock_irqsave(&ha->hardware_lock, flags); in qla24xx_start_scsi()
1470 nseg = dma_map_sg(&ha->pdev->dev, scsi_sglist(cmd), in qla24xx_start_scsi()
1480 cnt = IS_SHADOW_REG_CAPABLE(ha) ? *req->out_ptr : in qla24xx_start_scsi()
1545 RD_REG_DWORD_RELAXED(&ha->iobase->isp24.hccr); in qla24xx_start_scsi()
1552 spin_unlock_irqrestore(&ha->hardware_lock, flags); in qla24xx_start_scsi()
1559 spin_unlock_irqrestore(&ha->hardware_lock, flags); in qla24xx_start_scsi()
1587 struct qla_hw_data *ha = vha->hw; in qla24xx_dif_start_scsi() local
1616 spin_lock_irqsave(&ha->hardware_lock, flags); in qla24xx_dif_start_scsi()
1634 nseg = dma_map_sg(&ha->pdev->dev, scsi_sglist(cmd), in qla24xx_dif_start_scsi()
1664 nseg = dma_map_sg(&ha->pdev->dev, scsi_prot_sglist(cmd), in qla24xx_dif_start_scsi()
1684 cnt = IS_SHADOW_REG_CAPABLE(ha) ? *req->out_ptr : in qla24xx_dif_start_scsi()
1745 RD_REG_DWORD_RELAXED(&ha->iobase->isp24.hccr); in qla24xx_dif_start_scsi()
1752 spin_unlock_irqrestore(&ha->hardware_lock, flags); in qla24xx_dif_start_scsi()
1763 spin_unlock_irqrestore(&ha->hardware_lock, flags); in qla24xx_dif_start_scsi()
1771 struct qla_hw_data *ha = sp->fcport->vha->hw; in qla25xx_set_que() local
1774 if (ha->flags.cpu_affinity_enabled && affinity >= 0 && in qla25xx_set_que()
1775 affinity < ha->max_rsp_queues - 1) in qla25xx_set_que()
1776 *rsp = ha->rsp_q_map[affinity + 1]; in qla25xx_set_que()
1778 *rsp = ha->rsp_q_map[0]; in qla25xx_set_que()
1796 struct qla_hw_data *ha = vha->hw; in qla2x00_alloc_iocbs() local
1797 struct req_que *req = ha->req_q_map[0]; in qla2x00_alloc_iocbs()
1798 device_reg_t *reg = ISP_QUE_REG(ha, req->id); in qla2x00_alloc_iocbs()
1837 if (ha->mqenable || IS_QLA83XX(ha) || IS_QLA27XX(ha)) in qla2x00_alloc_iocbs()
1839 else if (IS_P3P_TYPE(ha)) in qla2x00_alloc_iocbs()
1841 else if (IS_FWI2_CAPABLE(ha)) in qla2x00_alloc_iocbs()
1843 else if (IS_QLAFX00(ha)) in qla2x00_alloc_iocbs()
1847 ISP_REQ_Q_OUT(ha, ®->isp)); in qla2x00_alloc_iocbs()
1862 if (IS_QLAFX00(ha)) { in qla2x00_alloc_iocbs()
1895 struct qla_hw_data *ha = sp->fcport->vha->hw; in qla2x00_login_iocb() local
1900 SET_TARGET_ID(ha, mbx->loop_id, sp->fcport->loop_id); in qla2x00_login_iocb()
1904 if (HAS_EXTENDED_IDS(ha)) { in qla2x00_login_iocb()
1935 struct qla_hw_data *ha = sp->fcport->vha->hw; in qla2x00_logout_iocb() local
1938 SET_TARGET_ID(ha, mbx->loop_id, sp->fcport->loop_id); in qla2x00_logout_iocb()
1940 mbx->mb1 = HAS_EXTENDED_IDS(ha) ? in qla2x00_logout_iocb()
1962 struct qla_hw_data *ha = sp->fcport->vha->hw; in qla2x00_adisc_iocb() local
1965 SET_TARGET_ID(ha, mbx->loop_id, sp->fcport->loop_id); in qla2x00_adisc_iocb()
1967 if (HAS_EXTENDED_IDS(ha)) { in qla2x00_adisc_iocb()
1973 mbx->mb2 = cpu_to_le16(MSW(ha->async_pd_dma)); in qla2x00_adisc_iocb()
1974 mbx->mb3 = cpu_to_le16(LSW(ha->async_pd_dma)); in qla2x00_adisc_iocb()
1975 mbx->mb6 = cpu_to_le16(MSW(MSD(ha->async_pd_dma))); in qla2x00_adisc_iocb()
1976 mbx->mb7 = cpu_to_le16(LSW(MSD(ha->async_pd_dma))); in qla2x00_adisc_iocb()
1987 struct qla_hw_data *ha = vha->hw; in qla24xx_tm_iocb() local
1998 tsk->timeout = cpu_to_le16(ha->r_a_tov / 10 * 2); in qla24xx_tm_iocb()
2067 struct qla_hw_data *ha = vha->hw; in qla2x00_ct_iocb() local
2076 SET_TARGET_ID(ha, ct_iocb->loop_id, sp->fcport->loop_id); in qla2x00_ct_iocb()
2144 struct qla_hw_data *ha = vha->hw; in qla24xx_ct_iocb() local
2190 ha->req_q_map[0]); in qla24xx_ct_iocb()
2230 struct qla_hw_data *ha = vha->hw; in qla82xx_start_scsi() local
2235 reg = &ha->iobase->isp82; in qla82xx_start_scsi()
2238 rsp = ha->rsp_q_map[0]; in qla82xx_start_scsi()
2243 dbval = 0x04 | (ha->portnum << 5); in qla82xx_start_scsi()
2257 spin_lock_irqsave(&ha->hardware_lock, flags); in qla82xx_start_scsi()
2273 nseg = dma_map_sg(&ha->pdev->dev, scsi_sglist(cmd), in qla82xx_start_scsi()
2289 if ((more_dsd_lists + ha->gbl_dsd_inuse) >= NUM_DSD_CHAIN) { in qla82xx_start_scsi()
2292 more_dsd_lists + ha->gbl_dsd_inuse, NUM_DSD_CHAIN, in qla82xx_start_scsi()
2297 if (more_dsd_lists <= ha->gbl_dsd_avail) in qla82xx_start_scsi()
2300 more_dsd_lists -= ha->gbl_dsd_avail; in qla82xx_start_scsi()
2311 dsd_ptr->dsd_addr = dma_pool_alloc(ha->dl_dma_pool, in qla82xx_start_scsi()
2320 list_add_tail(&dsd_ptr->list, &ha->gbl_dsd_list); in qla82xx_start_scsi()
2321 ha->gbl_dsd_avail++; in qla82xx_start_scsi()
2340 mempool_alloc(ha->ctx_mempool, GFP_ATOMIC); in qla82xx_start_scsi()
2348 ctx->fcp_cmnd = dma_pool_alloc(ha->fcp_cmnd_dma_pool, in qla82xx_start_scsi()
2411 if (ha->flags.fcp_prio_enabled) in qla82xx_start_scsi()
2471 if (ha->flags.fcp_prio_enabled) in qla82xx_start_scsi()
2513 qla82xx_wr_32(ha, (uintptr_t __force)ha->nxdb_wr_ptr, dbval); in qla82xx_start_scsi()
2515 WRT_REG_DWORD(ha->nxdb_wr_ptr, dbval); in qla82xx_start_scsi()
2517 while (RD_REG_DWORD(ha->nxdb_rd_ptr) != dbval) { in qla82xx_start_scsi()
2518 WRT_REG_DWORD(ha->nxdb_wr_ptr, dbval); in qla82xx_start_scsi()
2528 spin_unlock_irqrestore(&ha->hardware_lock, flags); in qla82xx_start_scsi()
2532 dma_pool_free(ha->fcp_cmnd_dma_pool, ctx->fcp_cmnd, ctx->fcp_cmnd_dma); in qla82xx_start_scsi()
2538 mempool_free(sp->u.scmd.ctx, ha->ctx_mempool); in qla82xx_start_scsi()
2541 spin_unlock_irqrestore(&ha->hardware_lock, flags); in qla82xx_start_scsi()
2573 struct qla_hw_data *ha = sp->fcport->vha->hw; in qla2x00_start_sp() local
2578 spin_lock_irqsave(&ha->hardware_lock, flags); in qla2x00_start_sp()
2589 IS_FWI2_CAPABLE(ha) ? in qla2x00_start_sp()
2594 IS_FWI2_CAPABLE(ha) ? in qla2x00_start_sp()
2603 IS_FWI2_CAPABLE(ha) ? in qla2x00_start_sp()
2608 IS_FWI2_CAPABLE(ha) ? in qla2x00_start_sp()
2613 IS_QLAFX00(ha) ? in qla2x00_start_sp()
2622 IS_QLAFX00(ha) ? in qla2x00_start_sp()
2631 qla2x00_start_iocbs(sp->fcport->vha, ha->req_q_map[0]); in qla2x00_start_sp()
2633 spin_unlock_irqrestore(&ha->hardware_lock, flags); in qla2x00_start_sp()
2737 struct qla_hw_data *ha = vha->hw; in qla2x00_start_bidir() local
2751 rsp = ha->rsp_q_map[0]; in qla2x00_start_bidir()
2763 spin_lock_irqsave(&ha->hardware_lock, flags); in qla2x00_start_bidir()
2785 cnt = IS_SHADOW_REG_CAPABLE(ha) ? *req->out_ptr : in qla2x00_start_bidir()
2824 spin_unlock_irqrestore(&ha->hardware_lock, flags); in qla2x00_start_bidir()