sq_wqe 2196 drivers/infiniband/hw/hns/hns_roce_hw_v1.c struct hns_roce_wqe_ctrl_seg *sq_wqe; sq_wqe 2293 drivers/infiniband/hw/hns/hns_roce_hw_v1.c sq_wqe = get_send_wqe(*cur_qp, roce_get_field(cqe->cqe_byte_4, sq_wqe 2297 drivers/infiniband/hw/hns/hns_roce_hw_v1.c switch (le32_to_cpu(sq_wqe->flag) & HNS_ROCE_WQE_OPCODE_MASK) { sq_wqe 2318 drivers/infiniband/hw/hns/hns_roce_hw_v1.c wc->wc_flags = (le32_to_cpu(sq_wqe->flag) & HNS_ROCE_WQE_IMM ? sq_wqe 593 drivers/net/ethernet/huawei/hinic/hinic_hw_qp.c struct hinic_sq_wqe *sq_wqe, struct hinic_sge *sges, sq_wqe 598 drivers/net/ethernet/huawei/hinic/hinic_hw_qp.c sq_prepare_ctrl(&sq_wqe->ctrl, prod_idx, nr_sges); sq_wqe 600 drivers/net/ethernet/huawei/hinic/hinic_hw_qp.c sq_prepare_task(&sq_wqe->task); sq_wqe 603 drivers/net/ethernet/huawei/hinic/hinic_hw_qp.c sq_wqe->buf_descs[i].sge = sges[i]; sq_wqe 664 drivers/net/ethernet/huawei/hinic/hinic_hw_qp.c return &hw_wqe->sq_wqe; sq_wqe 686 drivers/net/ethernet/huawei/hinic/hinic_hw_qp.c struct hinic_sq_wqe *sq_wqe, sq_wqe 689 drivers/net/ethernet/huawei/hinic/hinic_hw_qp.c struct hinic_hw_wqe *hw_wqe = (struct hinic_hw_wqe *)sq_wqe; sq_wqe 694 drivers/net/ethernet/huawei/hinic/hinic_hw_qp.c hinic_cpu_to_be32(sq_wqe, wqe_size); sq_wqe 714 drivers/net/ethernet/huawei/hinic/hinic_hw_qp.c struct hinic_sq_wqe *sq_wqe; sq_wqe 726 drivers/net/ethernet/huawei/hinic/hinic_hw_qp.c sq_wqe = &hw_wqe->sq_wqe; sq_wqe 727 drivers/net/ethernet/huawei/hinic/hinic_hw_qp.c ctrl = &sq_wqe->ctrl; sq_wqe 731 drivers/net/ethernet/huawei/hinic/hinic_hw_qp.c *wqe_size = sizeof(*ctrl) + sizeof(sq_wqe->task); sq_wqe 735 drivers/net/ethernet/huawei/hinic/hinic_hw_qp.c return &hw_wqe->sq_wqe; sq_wqe 756 drivers/net/ethernet/huawei/hinic/hinic_hw_qp.c return &hw_wqe->sq_wqe; sq_wqe 775 drivers/net/ethernet/huawei/hinic/hinic_hw_qp.c void hinic_sq_get_sges(struct hinic_sq_wqe *sq_wqe, struct hinic_sge *sges, sq_wqe 781 drivers/net/ethernet/huawei/hinic/hinic_hw_qp.c sges[i] = sq_wqe->buf_descs[i].sge; sq_wqe 450 drivers/net/ethernet/huawei/hinic/hinic_hw_wqe.h struct hinic_sq_wqe sq_wqe; sq_wqe 468 drivers/net/ethernet/huawei/hinic/hinic_tx.c struct hinic_sq_wqe *sq_wqe; sq_wqe 503 drivers/net/ethernet/huawei/hinic/hinic_tx.c sq_wqe = hinic_sq_get_wqe(txq->sq, wqe_size, &prod_idx); sq_wqe 504 drivers/net/ethernet/huawei/hinic/hinic_tx.c if (!sq_wqe) { sq_wqe 510 drivers/net/ethernet/huawei/hinic/hinic_tx.c sq_wqe = hinic_sq_get_wqe(txq->sq, wqe_size, &prod_idx); sq_wqe 511 drivers/net/ethernet/huawei/hinic/hinic_tx.c if (sq_wqe) { sq_wqe 527 drivers/net/ethernet/huawei/hinic/hinic_tx.c hinic_sq_prepare_wqe(txq->sq, prod_idx, sq_wqe, txq->sges, nr_sges); sq_wqe 529 drivers/net/ethernet/huawei/hinic/hinic_tx.c err = hinic_tx_offload(skb, &sq_wqe->task, &sq_wqe->ctrl.queue_info); sq_wqe 533 drivers/net/ethernet/huawei/hinic/hinic_tx.c hinic_sq_write_wqe(txq->sq, prod_idx, sq_wqe, skb, wqe_size); sq_wqe 579 drivers/net/ethernet/huawei/hinic/hinic_tx.c struct hinic_sq_wqe *sq_wqe; sq_wqe 585 drivers/net/ethernet/huawei/hinic/hinic_tx.c while ((sq_wqe = hinic_sq_read_wqebb(sq, &skb, &wqe_size, &ci))) { sq_wqe 586 drivers/net/ethernet/huawei/hinic/hinic_tx.c sq_wqe = hinic_sq_read_wqe(sq, &skb, wqe_size, &ci); sq_wqe 587 drivers/net/ethernet/huawei/hinic/hinic_tx.c if (!sq_wqe) sq_wqe 592 drivers/net/ethernet/huawei/hinic/hinic_tx.c hinic_sq_get_sges(sq_wqe, txq->free_sges, nr_sges); sq_wqe 615 drivers/net/ethernet/huawei/hinic/hinic_tx.c struct hinic_sq_wqe *sq_wqe; sq_wqe 628 drivers/net/ethernet/huawei/hinic/hinic_tx.c sq_wqe = hinic_sq_read_wqebb(sq, &skb, &wqe_size, &sw_ci); sq_wqe 629 drivers/net/ethernet/huawei/hinic/hinic_tx.c if ((!sq_wqe) || sq_wqe 637 drivers/net/ethernet/huawei/hinic/hinic_tx.c sq_wqe = hinic_sq_read_wqe(sq, &skb, wqe_size, &sw_ci); sq_wqe 638 drivers/net/ethernet/huawei/hinic/hinic_tx.c if (unlikely(!sq_wqe)) sq_wqe 647 drivers/net/ethernet/huawei/hinic/hinic_tx.c hinic_sq_get_sges(sq_wqe, txq->free_sges, nr_sges);