Lines Matching refs:card
102 static unsigned int rsxx_addr8_to_laddr(u64 addr8, struct rsxx_cardinfo *card) in rsxx_addr8_to_laddr() argument
106 tgt_addr8 = ((addr8 >> card->_stripe.upper_shift) & in rsxx_addr8_to_laddr()
107 card->_stripe.upper_mask) | in rsxx_addr8_to_laddr()
108 ((addr8) & card->_stripe.lower_mask); in rsxx_addr8_to_laddr()
113 static unsigned int rsxx_get_dma_tgt(struct rsxx_cardinfo *card, u64 addr8) in rsxx_get_dma_tgt() argument
117 tgt = (addr8 >> card->_stripe.target_shift) & card->_stripe.target_mask; in rsxx_get_dma_tgt()
122 void rsxx_dma_queue_reset(struct rsxx_cardinfo *card) in rsxx_dma_queue_reset() argument
125 iowrite32(DMA_QUEUE_RESET, card->regmap + RESET); in rsxx_dma_queue_reset()
204 static void dma_intr_coal_auto_tune(struct rsxx_cardinfo *card) in dma_intr_coal_auto_tune() argument
210 if (card->config.data.intr_coal.mode != RSXX_INTR_COAL_AUTO_TUNE || in dma_intr_coal_auto_tune()
211 unlikely(card->eeh_state)) in dma_intr_coal_auto_tune()
214 for (i = 0; i < card->n_targets; i++) in dma_intr_coal_auto_tune()
215 q_depth += atomic_read(&card->ctrl[i].stats.hw_q_depth); in dma_intr_coal_auto_tune()
217 intr_coal = dma_intr_coal_val(card->config.data.intr_coal.mode, in dma_intr_coal_auto_tune()
219 card->config.data.intr_coal.latency); in dma_intr_coal_auto_tune()
220 iowrite32(intr_coal, card->regmap + INTR_COAL); in dma_intr_coal_auto_tune()
227 if (!pci_dma_mapping_error(ctrl->card->dev, dma->dma_addr)) { in rsxx_free_dma()
228 pci_unmap_page(ctrl->card->dev, dma->dma_addr, in rsxx_free_dma()
251 dma->cb(ctrl->card, dma->cb_data, status ? 1 : 0); in rsxx_complete_dma()
295 dev_dbg(CARD_TO_DEV(ctrl->card), in rsxx_handle_dma_error()
309 if (ctrl->card->scrub_hard) { in rsxx_handle_dma_error()
342 dev_err(CARD_TO_DEV(ctrl->card), in rsxx_handle_dma_error()
363 unlikely(ctrl->card->eeh_state)) in dma_engine_stalled()
371 dev_warn(CARD_TO_DEV(ctrl->card), in dma_engine_stalled()
377 dev_warn(CARD_TO_DEV(ctrl->card), in dma_engine_stalled()
380 ctrl->card->dma_fault = 1; in dma_engine_stalled()
390 dev_info(CARD_TO_DEV(ctrl->card), in dma_engine_stalled()
406 if (unlikely(ctrl->card->halt) || in rsxx_issue_dmas()
407 unlikely(ctrl->card->eeh_state)) in rsxx_issue_dmas()
433 if (unlikely(ctrl->card->dma_fault)) { in rsxx_issue_dmas()
455 dma->dma_addr = pci_map_page(ctrl->card->dev, dma->page, in rsxx_issue_dmas()
457 if (pci_dma_mapping_error(ctrl->card->dev, dma->dma_addr)) { in rsxx_issue_dmas()
478 dev_dbg(CARD_TO_DEV(ctrl->card), in rsxx_issue_dmas()
499 if (unlikely(ctrl->card->eeh_state)) { in rsxx_issue_dmas()
519 if (unlikely(ctrl->card->halt) || in rsxx_dma_done()
520 unlikely(ctrl->card->dma_fault) || in rsxx_dma_done()
521 unlikely(ctrl->card->eeh_state)) in rsxx_dma_done()
541 spin_lock_irqsave(&ctrl->card->irq_lock, flags); in rsxx_dma_done()
542 rsxx_disable_ier(ctrl->card, CR_INTR_DMA_ALL); in rsxx_dma_done()
543 spin_unlock_irqrestore(&ctrl->card->irq_lock, flags); in rsxx_dma_done()
545 dev_err(CARD_TO_DEV(ctrl->card), in rsxx_dma_done()
552 dev_dbg(CARD_TO_DEV(ctrl->card), in rsxx_dma_done()
577 dma_intr_coal_auto_tune(ctrl->card); in rsxx_dma_done()
582 spin_lock_irqsave(&ctrl->card->irq_lock, flags); in rsxx_dma_done()
583 rsxx_enable_ier(ctrl->card, CR_INTR_DMA(ctrl->id)); in rsxx_dma_done()
584 spin_unlock_irqrestore(&ctrl->card->irq_lock, flags); in rsxx_dma_done()
614 static int rsxx_queue_discard(struct rsxx_cardinfo *card, in rsxx_queue_discard() argument
636 dev_dbg(CARD_TO_DEV(card), "Queuing[D] laddr %x\n", dma->laddr); in rsxx_queue_discard()
643 static int rsxx_queue_dma(struct rsxx_cardinfo *card, in rsxx_queue_dma() argument
669 dev_dbg(CARD_TO_DEV(card), in rsxx_queue_dma()
680 int rsxx_dma_queue_bio(struct rsxx_cardinfo *card, in rsxx_dma_queue_bio() argument
703 for (i = 0; i < card->n_targets; i++) { in rsxx_dma_queue_bio()
712 tgt = rsxx_get_dma_tgt(card, addr8); in rsxx_dma_queue_bio()
713 laddr = rsxx_addr8_to_laddr(addr8, card); in rsxx_dma_queue_bio()
715 st = rsxx_queue_discard(card, &dma_list[tgt], laddr, in rsxx_dma_queue_bio()
731 tgt = rsxx_get_dma_tgt(card, addr8); in rsxx_dma_queue_bio()
732 laddr = rsxx_addr8_to_laddr(addr8, card); in rsxx_dma_queue_bio()
737 st = rsxx_queue_dma(card, &dma_list[tgt], in rsxx_dma_queue_bio()
754 for (i = 0; i < card->n_targets; i++) { in rsxx_dma_queue_bio()
756 spin_lock_bh(&card->ctrl[i].queue_lock); in rsxx_dma_queue_bio()
757 card->ctrl[i].stats.sw_q_depth += dma_cnt[i]; in rsxx_dma_queue_bio()
758 list_splice_tail(&dma_list[i], &card->ctrl[i].queue); in rsxx_dma_queue_bio()
759 spin_unlock_bh(&card->ctrl[i].queue_lock); in rsxx_dma_queue_bio()
761 queue_work(card->ctrl[i].issue_wq, in rsxx_dma_queue_bio()
762 &card->ctrl[i].issue_dma_work); in rsxx_dma_queue_bio()
769 for (i = 0; i < card->n_targets; i++) in rsxx_dma_queue_bio()
770 rsxx_cleanup_dma_queue(&card->ctrl[i], &dma_list[i], in rsxx_dma_queue_bio()
863 static int rsxx_dma_stripe_setup(struct rsxx_cardinfo *card, in rsxx_dma_stripe_setup() argument
867 dev_err(CARD_TO_DEV(card), in rsxx_dma_stripe_setup()
872 card->_stripe.lower_mask = stripe_size8 - 1; in rsxx_dma_stripe_setup()
874 card->_stripe.upper_mask = ~(card->_stripe.lower_mask); in rsxx_dma_stripe_setup()
875 card->_stripe.upper_shift = ffs(card->n_targets) - 1; in rsxx_dma_stripe_setup()
877 card->_stripe.target_mask = card->n_targets - 1; in rsxx_dma_stripe_setup()
878 card->_stripe.target_shift = ffs(stripe_size8) - 1; in rsxx_dma_stripe_setup()
880 dev_dbg(CARD_TO_DEV(card), "_stripe.lower_mask = x%016llx\n", in rsxx_dma_stripe_setup()
881 card->_stripe.lower_mask); in rsxx_dma_stripe_setup()
882 dev_dbg(CARD_TO_DEV(card), "_stripe.upper_shift = x%016llx\n", in rsxx_dma_stripe_setup()
883 card->_stripe.upper_shift); in rsxx_dma_stripe_setup()
884 dev_dbg(CARD_TO_DEV(card), "_stripe.upper_mask = x%016llx\n", in rsxx_dma_stripe_setup()
885 card->_stripe.upper_mask); in rsxx_dma_stripe_setup()
886 dev_dbg(CARD_TO_DEV(card), "_stripe.target_mask = x%016llx\n", in rsxx_dma_stripe_setup()
887 card->_stripe.target_mask); in rsxx_dma_stripe_setup()
888 dev_dbg(CARD_TO_DEV(card), "_stripe.target_shift = x%016llx\n", in rsxx_dma_stripe_setup()
889 card->_stripe.target_shift); in rsxx_dma_stripe_setup()
894 int rsxx_dma_configure(struct rsxx_cardinfo *card) in rsxx_dma_configure() argument
898 intr_coal = dma_intr_coal_val(card->config.data.intr_coal.mode, in rsxx_dma_configure()
899 card->config.data.intr_coal.count, in rsxx_dma_configure()
900 card->config.data.intr_coal.latency); in rsxx_dma_configure()
901 iowrite32(intr_coal, card->regmap + INTR_COAL); in rsxx_dma_configure()
903 return rsxx_dma_stripe_setup(card, card->config.data.stripe_size); in rsxx_dma_configure()
906 int rsxx_dma_setup(struct rsxx_cardinfo *card) in rsxx_dma_setup() argument
912 dev_info(CARD_TO_DEV(card), in rsxx_dma_setup()
914 card->n_targets); in rsxx_dma_setup()
917 for (i = 0; i < card->n_targets; i++) in rsxx_dma_setup()
918 card->ctrl[i].regmap = card->regmap + (i * 4096); in rsxx_dma_setup()
920 card->dma_fault = 0; in rsxx_dma_setup()
923 rsxx_dma_queue_reset(card); in rsxx_dma_setup()
926 for (i = 0; i < card->n_targets; i++) { in rsxx_dma_setup()
927 st = rsxx_dma_ctrl_init(card->dev, &card->ctrl[i]); in rsxx_dma_setup()
931 card->ctrl[i].card = card; in rsxx_dma_setup()
932 card->ctrl[i].id = i; in rsxx_dma_setup()
935 card->scrub_hard = 1; in rsxx_dma_setup()
937 if (card->config_valid) in rsxx_dma_setup()
938 rsxx_dma_configure(card); in rsxx_dma_setup()
941 for (i = 0; i < card->n_targets; i++) { in rsxx_dma_setup()
942 spin_lock_irqsave(&card->irq_lock, flags); in rsxx_dma_setup()
943 rsxx_enable_ier_and_isr(card, CR_INTR_DMA(i)); in rsxx_dma_setup()
944 spin_unlock_irqrestore(&card->irq_lock, flags); in rsxx_dma_setup()
950 for (i = 0; i < card->n_targets; i++) { in rsxx_dma_setup()
951 struct rsxx_dma_ctrl *ctrl = &card->ctrl[i]; in rsxx_dma_setup()
967 pci_free_consistent(card->dev, STATUS_BUFFER_SIZE8, in rsxx_dma_setup()
971 pci_free_consistent(card->dev, COMMAND_BUFFER_SIZE8, in rsxx_dma_setup()
998 void rsxx_dma_destroy(struct rsxx_cardinfo *card) in rsxx_dma_destroy() argument
1003 for (i = 0; i < card->n_targets; i++) { in rsxx_dma_destroy()
1004 ctrl = &card->ctrl[i]; in rsxx_dma_destroy()
1028 pci_free_consistent(card->dev, STATUS_BUFFER_SIZE8, in rsxx_dma_destroy()
1030 pci_free_consistent(card->dev, COMMAND_BUFFER_SIZE8, in rsxx_dma_destroy()
1035 int rsxx_eeh_save_issued_dmas(struct rsxx_cardinfo *card) in rsxx_eeh_save_issued_dmas() argument
1043 issued_dmas = kzalloc(sizeof(*issued_dmas) * card->n_targets, in rsxx_eeh_save_issued_dmas()
1048 for (i = 0; i < card->n_targets; i++) { in rsxx_eeh_save_issued_dmas()
1052 dma = get_tracker_dma(card->ctrl[i].trackers, j); in rsxx_eeh_save_issued_dmas()
1057 card->ctrl[i].stats.writes_issued--; in rsxx_eeh_save_issued_dmas()
1059 card->ctrl[i].stats.discards_issued--; in rsxx_eeh_save_issued_dmas()
1061 card->ctrl[i].stats.reads_issued--; in rsxx_eeh_save_issued_dmas()
1064 pci_unmap_page(card->dev, dma->dma_addr, in rsxx_eeh_save_issued_dmas()
1072 push_tracker(card->ctrl[i].trackers, j); in rsxx_eeh_save_issued_dmas()
1076 spin_lock_bh(&card->ctrl[i].queue_lock); in rsxx_eeh_save_issued_dmas()
1077 list_splice(&issued_dmas[i], &card->ctrl[i].queue); in rsxx_eeh_save_issued_dmas()
1079 atomic_sub(cnt, &card->ctrl[i].stats.hw_q_depth); in rsxx_eeh_save_issued_dmas()
1080 card->ctrl[i].stats.sw_q_depth += cnt; in rsxx_eeh_save_issued_dmas()
1081 card->ctrl[i].e_cnt = 0; in rsxx_eeh_save_issued_dmas()
1082 spin_unlock_bh(&card->ctrl[i].queue_lock); in rsxx_eeh_save_issued_dmas()