Lines Matching refs:cqe
543 static inline int ehea_check_cqe(struct ehea_cqe *cqe, int *rq_num) in ehea_check_cqe() argument
545 *rq_num = (cqe->type & EHEA_CQE_TYPE_RQ) >> 5; in ehea_check_cqe()
546 if ((cqe->status & EHEA_CQE_STAT_ERR_MASK) == 0) in ehea_check_cqe()
548 if (((cqe->status & EHEA_CQE_STAT_ERR_TCP) != 0) && in ehea_check_cqe()
549 (cqe->header_length == 0)) in ehea_check_cqe()
555 struct sk_buff *skb, struct ehea_cqe *cqe, in ehea_fill_skb() argument
558 int length = cqe->num_bytes_transfered - 4; /*remove CRC */ in ehea_fill_skb()
565 if (cqe->status & EHEA_CQE_BLIND_CKSUM) { in ehea_fill_skb()
567 skb->csum = csum_unfold(~cqe->inet_checksum_value); in ehea_fill_skb()
576 struct ehea_cqe *cqe) in get_skb_by_index() argument
578 int skb_index = EHEA_BMASK_GET(EHEA_WR_ID_INDEX, cqe->wr_id); in get_skb_by_index()
629 struct ehea_cqe *cqe, int *processed_rq2, in ehea_treat_poll_error() argument
634 if (cqe->status & EHEA_CQE_STAT_ERR_TCP) in ehea_treat_poll_error()
636 if (cqe->status & EHEA_CQE_STAT_ERR_IP) in ehea_treat_poll_error()
638 if (cqe->status & EHEA_CQE_STAT_ERR_CRC) in ehea_treat_poll_error()
643 skb = get_skb_by_index(pr->rq2_skba.arr, pr->rq2_skba.len, cqe); in ehea_treat_poll_error()
647 skb = get_skb_by_index(pr->rq3_skba.arr, pr->rq3_skba.len, cqe); in ehea_treat_poll_error()
651 if (cqe->status & EHEA_CQE_STAT_FAT_ERR_MASK) { in ehea_treat_poll_error()
655 ehea_dump(cqe, sizeof(*cqe), "CQE"); in ehea_treat_poll_error()
670 struct ehea_cqe *cqe; in ehea_proc_rwqes() local
685 cqe = ehea_poll_rq1(qp, &wqe_index); in ehea_proc_rwqes()
686 while ((processed < budget) && cqe) { in ehea_proc_rwqes()
691 ehea_dump(cqe, sizeof(*cqe), "CQE"); in ehea_proc_rwqes()
695 if (!ehea_check_cqe(cqe, &rq)) { in ehea_proc_rwqes()
710 skb_copy_to_linear_data(skb, ((char *)cqe) + 64, in ehea_proc_rwqes()
711 cqe->num_bytes_transfered - 4); in ehea_proc_rwqes()
712 ehea_fill_skb(dev, skb, cqe, pr); in ehea_proc_rwqes()
716 skb_arr_rq2_len, cqe); in ehea_proc_rwqes()
722 ehea_fill_skb(dev, skb, cqe, pr); in ehea_proc_rwqes()
727 skb_arr_rq3_len, cqe); in ehea_proc_rwqes()
733 ehea_fill_skb(dev, skb, cqe, pr); in ehea_proc_rwqes()
739 if (cqe->status & EHEA_CQE_VLAN_TAG_XTRACT) in ehea_proc_rwqes()
741 cqe->vlan_tag); in ehea_proc_rwqes()
746 port_reset = ehea_treat_poll_error(pr, rq, cqe, in ehea_proc_rwqes()
752 cqe = ehea_poll_rq1(qp, &wqe_index); in ehea_proc_rwqes()
817 struct ehea_cqe *cqe; in ehea_proc_cqes() local
825 cqe = ehea_poll_cq(send_cq); in ehea_proc_cqes()
826 while (cqe && (quota > 0)) { in ehea_proc_cqes()
832 if (cqe->wr_id == SWQE_RESTART_CHECK) { in ehea_proc_cqes()
838 if (cqe->status & EHEA_CQE_STAT_ERR_MASK) { in ehea_proc_cqes()
840 cqe->status); in ehea_proc_cqes()
843 ehea_dump(cqe, sizeof(*cqe), "Send CQE"); in ehea_proc_cqes()
845 if (cqe->status & EHEA_CQE_STAT_RESET_MASK) { in ehea_proc_cqes()
853 ehea_dump(cqe, sizeof(*cqe), "CQE"); in ehea_proc_cqes()
855 if (likely(EHEA_BMASK_GET(EHEA_WR_ID_TYPE, cqe->wr_id) in ehea_proc_cqes()
858 index = EHEA_BMASK_GET(EHEA_WR_ID_INDEX, cqe->wr_id); in ehea_proc_cqes()
864 swqe_av += EHEA_BMASK_GET(EHEA_WR_ID_REFILL, cqe->wr_id); in ehea_proc_cqes()
867 cqe = ehea_poll_cq(send_cq); in ehea_proc_cqes()
884 return cqe; in ehea_proc_cqes()
894 struct ehea_cqe *cqe; in ehea_poll() local
909 cqe = ehea_poll_rq1(pr->qp, &wqe_index); in ehea_poll()
912 if (!cqe && !cqe_skb) in ehea_poll()