Lines Matching refs:cpu_to_be32
105 [IB_WR_SEND] = cpu_to_be32(MLX4_OPCODE_SEND),
106 [IB_WR_LSO] = cpu_to_be32(MLX4_OPCODE_LSO),
107 [IB_WR_SEND_WITH_IMM] = cpu_to_be32(MLX4_OPCODE_SEND_IMM),
108 [IB_WR_RDMA_WRITE] = cpu_to_be32(MLX4_OPCODE_RDMA_WRITE),
109 [IB_WR_RDMA_WRITE_WITH_IMM] = cpu_to_be32(MLX4_OPCODE_RDMA_WRITE_IMM),
110 [IB_WR_RDMA_READ] = cpu_to_be32(MLX4_OPCODE_RDMA_READ),
111 [IB_WR_ATOMIC_CMP_AND_SWP] = cpu_to_be32(MLX4_OPCODE_ATOMIC_CS),
112 [IB_WR_ATOMIC_FETCH_AND_ADD] = cpu_to_be32(MLX4_OPCODE_ATOMIC_FA),
113 [IB_WR_SEND_WITH_INV] = cpu_to_be32(MLX4_OPCODE_SEND_INVAL),
114 [IB_WR_LOCAL_INV] = cpu_to_be32(MLX4_OPCODE_LOCAL_INVAL),
115 [IB_WR_REG_MR] = cpu_to_be32(MLX4_OPCODE_FMR),
116 [IB_WR_MASKED_ATOMIC_CMP_AND_SWP] = cpu_to_be32(MLX4_OPCODE_MASKED_ATOMIC_CS),
117 [IB_WR_MASKED_ATOMIC_FETCH_AND_ADD] = cpu_to_be32(MLX4_OPCODE_MASKED_ATOMIC_FA),
118 [IB_WR_BIND_MW] = cpu_to_be32(MLX4_OPCODE_BIND_MW),
222 stamp = ind & qp->sq.wqe_cnt ? cpu_to_be32(0x7fffffff) : in stamp_send_wqe()
223 cpu_to_be32(0xffffffff); in stamp_send_wqe()
233 *wqe = cpu_to_be32(0xffffffff); in stamp_send_wqe()
252 av->port_pd = cpu_to_be32((qp->port << 24) | to_mpd(qp->ibqp.pd)->pdn); in post_nop_wqe()
259 inl->byte_count = cpu_to_be32(1 << 31 | (size - s - sizeof *inl)); in post_nop_wqe()
269 ctrl->owner_opcode = cpu_to_be32(MLX4_OPCODE_NOP | MLX4_WQE_CTRL_NEC) | in post_nop_wqe()
270 (n & qp->sq.wqe_cnt ? cpu_to_be32(1 << 31) : 0); in post_nop_wqe()
717 qp->sq_signal_bits = cpu_to_be32(MLX4_WQE_CTRL_CQ_UPDATE); in create_qp_common()
1279 return cpu_to_be32(hw_access_flags); in to_mlx4_access_flags()
1334 cpu_to_be32((ah->grh.traffic_class << 20) | in _mlx4_set_path()
1535 context->flags = cpu_to_be32((to_mlx4_state(new_state) << 28) | in __mlx4_ib_modify_qp()
1539 context->flags |= cpu_to_be32(MLX4_QP_PM_MIGRATED << 11); in __mlx4_ib_modify_qp()
1544 context->flags |= cpu_to_be32(MLX4_QP_PM_MIGRATED << 11); in __mlx4_ib_modify_qp()
1547 context->flags |= cpu_to_be32(MLX4_QP_PM_REARM << 11); in __mlx4_ib_modify_qp()
1550 context->flags |= cpu_to_be32(MLX4_QP_PM_ARMED << 11); in __mlx4_ib_modify_qp()
1588 context->xrcd = cpu_to_be32((u32) qp->xrcdn); in __mlx4_ib_modify_qp()
1590 context->param3 |= cpu_to_be32(1 << 30); in __mlx4_ib_modify_qp()
1594 context->usr_page = cpu_to_be32(to_mucontext(ibqp->uobject->context)->uar.index); in __mlx4_ib_modify_qp()
1596 context->usr_page = cpu_to_be32(dev->priv_uar.index); in __mlx4_ib_modify_qp()
1599 context->remote_qpn = cpu_to_be32(attr->dest_qp_num); in __mlx4_ib_modify_qp()
1705 context->pd = cpu_to_be32(pd->pdn); in __mlx4_ib_modify_qp()
1706 context->cqn_send = cpu_to_be32(send_cq->mcq.cqn); in __mlx4_ib_modify_qp()
1707 context->cqn_recv = cpu_to_be32(recv_cq->mcq.cqn); in __mlx4_ib_modify_qp()
1708 context->params1 = cpu_to_be32(MLX4_IB_ACK_REQ_FREQ << 28); in __mlx4_ib_modify_qp()
1712 context->params1 |= cpu_to_be32(1 << 11); in __mlx4_ib_modify_qp()
1715 context->params1 |= cpu_to_be32(attr->rnr_retry << 13); in __mlx4_ib_modify_qp()
1720 context->params1 |= cpu_to_be32(attr->retry_cnt << 16); in __mlx4_ib_modify_qp()
1727 cpu_to_be32(fls(attr->max_rd_atomic - 1) << 21); in __mlx4_ib_modify_qp()
1732 context->next_send_psn = cpu_to_be32(attr->sq_psn); in __mlx4_ib_modify_qp()
1737 cpu_to_be32(fls(attr->max_dest_rd_atomic - 1) << 21); in __mlx4_ib_modify_qp()
1747 context->params2 |= cpu_to_be32(MLX4_QP_BIT_RIC); in __mlx4_ib_modify_qp()
1750 context->rnr_nextrecvpsn |= cpu_to_be32(attr->min_rnr_timer << 24); in __mlx4_ib_modify_qp()
1754 context->rnr_nextrecvpsn |= cpu_to_be32(attr->rq_psn); in __mlx4_ib_modify_qp()
1760 context->qkey = cpu_to_be32(IB_QP_SET_QKEY); in __mlx4_ib_modify_qp()
1772 context->qkey = cpu_to_be32(attr->qkey); in __mlx4_ib_modify_qp()
1778 context->srqn = cpu_to_be32(1 << 24 | to_msrq(ibqp->srq)->msrq.srqn); in __mlx4_ib_modify_qp()
1825 if (!(context->flags & cpu_to_be32(1 << MLX4_RSS_QPC_FLAG_OFFSET))) in __mlx4_ib_modify_qp()
1826 context->srqn = cpu_to_be32(7 << 28); in __mlx4_ib_modify_qp()
1862 ctrl->owner_opcode = cpu_to_be32(1 << 31); in __mlx4_ib_modify_qp()
2182 mlx->flags &= cpu_to_be32(MLX4_WQE_CTRL_CQ_UPDATE); in build_sriov_qp0_header()
2185 mlx->flags |= cpu_to_be32(MLX4_WQE_MLX_VL15 | 0x1 | MLX4_WQE_MLX_SLR); in build_sriov_qp0_header()
2193 sqp->ud_header.bth.destination_qpn = cpu_to_be32(wr->remote_qpn); in build_sriov_qp0_header()
2196 cpu_to_be32(mdev->dev->caps.qp0_tunnel[sqp->qp.port - 1]); in build_sriov_qp0_header()
2198 sqp->ud_header.bth.psn = cpu_to_be32((sqp->send_psn++) & ((1 << 24) - 1)); in build_sriov_qp0_header()
2206 sqp->ud_header.deth.qkey = cpu_to_be32(qkey); in build_sriov_qp0_header()
2207 sqp->ud_header.deth.source_qpn = cpu_to_be32(sqp->qp.mqp.qpn); in build_sriov_qp0_header()
2223 inl->byte_count = cpu_to_be32(1 << 31 | header_size); in build_sriov_qp0_header()
2227 inl->byte_count = cpu_to_be32(1 << 31 | spc); in build_sriov_qp0_header()
2246 inl->byte_count = cpu_to_be32(1 << 31 | (header_size - spc)); in build_sriov_qp0_header()
2330 ah->av.ib.sl_tclass_flowlabel & cpu_to_be32(0xfffff); in build_mlx_header()
2355 mlx->flags &= cpu_to_be32(MLX4_WQE_CTRL_CQ_UPDATE); in build_mlx_header()
2358 mlx->flags |= cpu_to_be32((!sqp->qp.ibqp.qp_num ? MLX4_WQE_MLX_VL15 : 0) | in build_mlx_header()
2362 if (ah->av.ib.port_pd & cpu_to_be32(0x80000000)) in build_mlx_header()
2363 mlx->flags |= cpu_to_be32(0x1); /* force loopback */ in build_mlx_header()
2406 mlx->flags |= cpu_to_be32(MLX4_WQE_CTRL_FORCE_LOOPBACK); in build_mlx_header()
2424 sqp->ud_header.bth.destination_qpn = cpu_to_be32(wr->remote_qpn); in build_mlx_header()
2425 sqp->ud_header.bth.psn = cpu_to_be32((sqp->send_psn++) & ((1 << 24) - 1)); in build_mlx_header()
2426 sqp->ud_header.deth.qkey = cpu_to_be32(wr->remote_qkey & 0x80000000 ? in build_mlx_header()
2428 sqp->ud_header.deth.source_qpn = cpu_to_be32(sqp->qp.ibqp.qp_num); in build_mlx_header()
2454 inl->byte_count = cpu_to_be32(1 << 31 | header_size); in build_mlx_header()
2458 inl->byte_count = cpu_to_be32(1 << 31 | spc); in build_mlx_header()
2477 inl->byte_count = cpu_to_be32(1 << 31 | (header_size - spc)); in build_mlx_header()
2506 cpu_to_be32(MLX4_WQE_FMR_AND_BIND_PERM_ATOMIC) : 0) | in convert_access()
2508 cpu_to_be32(MLX4_WQE_FMR_AND_BIND_PERM_REMOTE_WRITE) : 0) | in convert_access()
2510 cpu_to_be32(MLX4_WQE_FMR_AND_BIND_PERM_REMOTE_READ) : 0) | in convert_access()
2511 (acc & IB_ACCESS_LOCAL_WRITE ? cpu_to_be32(MLX4_WQE_FMR_PERM_LOCAL_WRITE) : 0) | in convert_access()
2512 cpu_to_be32(MLX4_WQE_FMR_PERM_LOCAL_READ); in convert_access()
2521 fseg->mem_key = cpu_to_be32(wr->key); in set_reg_seg()
2526 fseg->page_size = cpu_to_be32(ilog2(mr->ibmr.page_size)); in set_reg_seg()
2536 cpu_to_be32(MLX4_WQE_FMR_AND_BIND_PERM_REMOTE_READ | in set_bind_seg()
2541 bseg->flags2 |= cpu_to_be32(MLX4_WQE_BIND_TYPE_2); in set_bind_seg()
2543 bseg->flags2 |= cpu_to_be32(MLX4_WQE_BIND_ZERO_BASED); in set_bind_seg()
2544 bseg->new_rkey = cpu_to_be32(wr->rkey); in set_bind_seg()
2545 bseg->lkey = cpu_to_be32(wr->bind_info.mr->lkey); in set_bind_seg()
2553 iseg->mem_key = cpu_to_be32(rkey); in set_local_inv_seg()
2560 rseg->rkey = cpu_to_be32(rkey); in set_raddr_seg()
2593 dseg->dqpn = cpu_to_be32(wr->remote_qpn); in set_datagram_seg()
2594 dseg->qkey = cpu_to_be32(wr->remote_qkey); in set_datagram_seg()
2609 sqp_av.port_pd = av->ib.port_pd | cpu_to_be32(0x80000000); in set_tunnel_datagram_seg()
2612 cpu_to_be32(0xf0000000); in set_tunnel_datagram_seg()
2616 dseg->dqpn = cpu_to_be32(dev->dev->caps.qp1_tunnel[port - 1]); in set_tunnel_datagram_seg()
2618 dseg->dqpn = cpu_to_be32(dev->dev->caps.qp0_tunnel[port - 1]); in set_tunnel_datagram_seg()
2620 dseg->qkey = cpu_to_be32(IB_QP_SET_QKEY); in set_tunnel_datagram_seg()
2632 hdr.remote_qpn = cpu_to_be32(wr->remote_qpn); in build_tunnel_header()
2634 hdr.qkey = cpu_to_be32(wr->remote_qkey); in build_tunnel_header()
2643 inl->byte_count = cpu_to_be32(1 << 31 | sizeof (hdr)); in build_tunnel_header()
2648 inl->byte_count = cpu_to_be32(1 << 31 | spc); in build_tunnel_header()
2653 inl->byte_count = cpu_to_be32(1 << 31 | (sizeof (hdr) - spc)); in build_tunnel_header()
2678 iseg->byte_count = cpu_to_be32((1 << 31) | 4); in set_mlx_icrc_seg()
2683 dseg->lkey = cpu_to_be32(sg->lkey); in set_data_seg()
2696 dseg->byte_count = cpu_to_be32(sg->length); in set_data_seg()
2701 dseg->byte_count = cpu_to_be32(sg->length); in __set_data_seg()
2702 dseg->lkey = cpu_to_be32(sg->lkey); in __set_data_seg()
2713 *blh = cpu_to_be32(1 << 6); in build_lso_seg()
2721 *lso_hdr_sz = cpu_to_be32(wr->mss << 16 | wr->hlen); in build_lso_seg()
2734 return cpu_to_be32(wr->ex.invalidate_rkey); in send_ieth()
2745 inl->byte_count = cpu_to_be32(1 << 31); in add_zero_len_inline()
2800 cpu_to_be32(MLX4_WQE_CTRL_CQ_UPDATE) : 0) | in mlx4_ib_post_send()
2802 cpu_to_be32(MLX4_WQE_CTRL_SOLICITED) : 0) | in mlx4_ib_post_send()
2804 cpu_to_be32(MLX4_WQE_CTRL_IP_CSUM | in mlx4_ib_post_send()
2856 cpu_to_be32(MLX4_WQE_CTRL_STRONG_ORDER); in mlx4_ib_post_send()
2864 cpu_to_be32(MLX4_WQE_CTRL_STRONG_ORDER); in mlx4_ib_post_send()
2872 cpu_to_be32(MLX4_WQE_CTRL_STRONG_ORDER); in mlx4_ib_post_send()
2898 *(__be32 *) wqe |= cpu_to_be32(0x80000000); in mlx4_ib_post_send()
3017 (ind & qp->sq.wqe_cnt ? cpu_to_be32(1 << 31) : 0) | blh; in mlx4_ib_post_send()
3114 cpu_to_be32(sizeof (struct mlx4_ib_proxy_sqp_hdr)); in mlx4_ib_post_recv()
3116 scat->lkey = cpu_to_be32(wr->sg_list->lkey); in mlx4_ib_post_recv()
3127 scat[i].lkey = cpu_to_be32(MLX4_INVALID_LKEY); in mlx4_ib_post_recv()
3146 *qp->db.db = cpu_to_be32(qp->rq.head & 0xffff); in mlx4_ib_post_recv()
3324 qp->sq_signal_bits == cpu_to_be32(MLX4_WQE_CTRL_CQ_UPDATE) ? in mlx4_ib_query_qp()