Lines Matching refs:snic

39 	struct snic *snic = svnic_dev_priv(wq->vdev);  in snic_wq_cmpl_frame_send()  local
44 SNIC_HOST_INFO(snic->shost, in snic_wq_cmpl_frame_send()
48 SNIC_TRC(snic->shost->host_no, 0, 0, in snic_wq_cmpl_frame_send()
51 pci_unmap_single(snic->pdev, buf->dma_addr, buf->len, PCI_DMA_TODEVICE); in snic_wq_cmpl_frame_send()
63 struct snic *snic = svnic_dev_priv(vdev); in snic_wq_cmpl_handler_cont() local
68 spin_lock_irqsave(&snic->wq_lock[q_num], flags); in snic_wq_cmpl_handler_cont()
69 svnic_wq_service(&snic->wq[q_num], in snic_wq_cmpl_handler_cont()
74 spin_unlock_irqrestore(&snic->wq_lock[q_num], flags); in snic_wq_cmpl_handler_cont()
80 snic_wq_cmpl_handler(struct snic *snic, int work_to_do) in snic_wq_cmpl_handler() argument
85 snic->s_stats.misc.last_ack_time = jiffies; in snic_wq_cmpl_handler()
86 for (i = 0; i < snic->wq_count; i++) { in snic_wq_cmpl_handler()
87 work_done += svnic_cq_service(&snic->cq[i], in snic_wq_cmpl_handler()
101 struct snic *snic = svnic_dev_priv(wq->vdev); in snic_free_wq_buf() local
105 pci_unmap_single(snic->pdev, buf->dma_addr, buf->len, PCI_DMA_TODEVICE); in snic_free_wq_buf()
108 spin_lock_irqsave(&snic->spl_cmd_lock, flags); in snic_free_wq_buf()
110 spin_unlock_irqrestore(&snic->spl_cmd_lock, flags); in snic_free_wq_buf()
116 spin_unlock_irqrestore(&snic->spl_cmd_lock, flags); in snic_free_wq_buf()
119 snic_pci_unmap_rsp_buf(snic, rqi); in snic_free_wq_buf()
123 snic_req_free(snic, rqi); in snic_free_wq_buf()
124 SNIC_HOST_INFO(snic->shost, "snic_free_wq_buf .. freed.\n"); in snic_free_wq_buf()
132 snic_select_wq(struct snic *snic) in snic_select_wq() argument
141 snic_queue_wq_desc(struct snic *snic, void *os_buf, u16 len) in snic_queue_wq_desc() argument
145 struct snic_fw_stats *fwstats = &snic->s_stats.fw; in snic_queue_wq_desc()
152 pa = pci_map_single(snic->pdev, os_buf, len, PCI_DMA_TODEVICE); in snic_queue_wq_desc()
153 if (pci_dma_mapping_error(snic->pdev, pa)) { in snic_queue_wq_desc()
154 SNIC_HOST_ERR(snic->shost, "qdesc: PCI DMA Mapping Fail.\n"); in snic_queue_wq_desc()
159 q_num = snic_select_wq(snic); in snic_queue_wq_desc()
161 spin_lock_irqsave(&snic->wq_lock[q_num], flags); in snic_queue_wq_desc()
162 if (!svnic_wq_desc_avail(snic->wq)) { in snic_queue_wq_desc()
163 pci_unmap_single(snic->pdev, pa, len, PCI_DMA_TODEVICE); in snic_queue_wq_desc()
164 spin_unlock_irqrestore(&snic->wq_lock[q_num], flags); in snic_queue_wq_desc()
165 atomic64_inc(&snic->s_stats.misc.wq_alloc_fail); in snic_queue_wq_desc()
166 SNIC_DBG("host = %d, WQ is Full\n", snic->shost->host_no); in snic_queue_wq_desc()
171 snic_queue_wq_eth_desc(&snic->wq[q_num], os_buf, pa, len, 0, 0, 1); in snic_queue_wq_desc()
172 spin_unlock_irqrestore(&snic->wq_lock[q_num], flags); in snic_queue_wq_desc()
187 snic_handle_untagged_req(struct snic *snic, struct snic_req_info *rqi) in snic_handle_untagged_req() argument
193 spin_lock_irqsave(&snic->spl_cmd_lock, flags); in snic_handle_untagged_req()
194 list_add_tail(&rqi->list, &snic->spl_cmd_list); in snic_handle_untagged_req()
195 spin_unlock_irqrestore(&snic->spl_cmd_lock, flags); in snic_handle_untagged_req()
203 snic_req_init(struct snic *snic, int sg_cnt) in snic_req_init() argument
211 rqi = mempool_alloc(snic->req_pool[typ], GFP_ATOMIC); in snic_req_init()
213 atomic64_inc(&snic->s_stats.io.alloc_fail); in snic_req_init()
214 SNIC_HOST_ERR(snic->shost, in snic_req_init()
225 rqi->snic = snic; in snic_req_init()
234 if (sg_cnt > atomic64_read(&snic->s_stats.io.max_sgl)) in snic_req_init()
235 atomic64_set(&snic->s_stats.io.max_sgl, sg_cnt); in snic_req_init()
238 atomic64_inc(&snic->s_stats.io.sgl_cnt[sg_cnt - 1]); in snic_req_init()
246 SNIC_SCSI_DBG(snic->shost, "Req_alloc:rqi = %p allocatd.\n", rqi); in snic_req_init()
255 snic_abort_req_init(struct snic *snic, struct snic_req_info *rqi) in snic_abort_req_init() argument
266 req = mempool_alloc(snic->req_pool[SNIC_REQ_TM_CACHE], GFP_ATOMIC); in snic_abort_req_init()
268 SNIC_HOST_ERR(snic->shost, "abts:Failed to alloc tm req.\n"); in snic_abort_req_init()
286 snic_dr_req_init(struct snic *snic, struct snic_req_info *rqi) in snic_dr_req_init() argument
292 req = mempool_alloc(snic->req_pool[SNIC_REQ_TM_CACHE], GFP_ATOMIC); in snic_dr_req_init()
294 SNIC_HOST_ERR(snic->shost, "dr:Failed to alloc tm req.\n"); in snic_dr_req_init()
311 snic_req_free(struct snic *snic, struct snic_req_info *rqi) in snic_req_free() argument
317 SNIC_SCSI_DBG(snic->shost, in snic_req_free()
322 mempool_free(rqi->abort_req, snic->req_pool[SNIC_REQ_TM_CACHE]); in snic_req_free()
325 mempool_free(rqi->dr_req, snic->req_pool[SNIC_REQ_TM_CACHE]); in snic_req_free()
327 mempool_free(rqi, snic->req_pool[rqi->rq_pool_type]); in snic_req_free()
331 snic_pci_unmap_rsp_buf(struct snic *snic, struct snic_req_info *rqi) in snic_pci_unmap_rsp_buf() argument
337 pci_unmap_single(snic->pdev, in snic_pci_unmap_rsp_buf()
347 snic_free_all_untagged_reqs(struct snic *snic) in snic_free_all_untagged_reqs() argument
353 spin_lock_irqsave(&snic->spl_cmd_lock, flags); in snic_free_all_untagged_reqs()
354 list_for_each_safe(cur, nxt, &snic->spl_cmd_list) { in snic_free_all_untagged_reqs()
358 snic_pci_unmap_rsp_buf(snic, rqi); in snic_free_all_untagged_reqs()
363 snic_req_free(snic, rqi); in snic_free_all_untagged_reqs()
365 spin_unlock_irqrestore(&snic->spl_cmd_lock, flags); in snic_free_all_untagged_reqs()
372 snic_release_untagged_req(struct snic *snic, struct snic_req_info *rqi) in snic_release_untagged_req() argument
376 spin_lock_irqsave(&snic->snic_lock, flags); in snic_release_untagged_req()
377 if (snic->in_remove) { in snic_release_untagged_req()
378 spin_unlock_irqrestore(&snic->snic_lock, flags); in snic_release_untagged_req()
381 spin_unlock_irqrestore(&snic->snic_lock, flags); in snic_release_untagged_req()
383 spin_lock_irqsave(&snic->spl_cmd_lock, flags); in snic_release_untagged_req()
385 spin_unlock_irqrestore(&snic->spl_cmd_lock, flags); in snic_release_untagged_req()
389 spin_unlock_irqrestore(&snic->spl_cmd_lock, flags); in snic_release_untagged_req()
390 snic_req_free(snic, rqi); in snic_release_untagged_req()
510 snic_calc_io_process_time(struct snic *snic, struct snic_req_info *rqi) in snic_calc_io_process_time() argument
516 if (duration > atomic64_read(&snic->s_stats.io.max_time)) in snic_calc_io_process_time()
517 atomic64_set(&snic->s_stats.io.max_time, duration); in snic_calc_io_process_time()