tx_ctx            448 drivers/infiniband/sw/siw/siw.h 	struct siw_iwarp_tx tx_ctx; /* Transmit context */
tx_ctx            492 drivers/infiniband/sw/siw/siw.h #define tx_qp(tx) container_of(tx, struct siw_qp, tx_ctx)
tx_ctx            493 drivers/infiniband/sw/siw/siw.h #define tx_wqe(qp) (&(qp)->tx_ctx.wqe_active)
tx_ctx            377 drivers/infiniband/sw/siw/siw_cm.c 	qp->tx_ctx.tx_suspend = 1;
tx_ctx            755 drivers/infiniband/sw/siw/siw_cm.c 		qp->tx_ctx.gso_seg_limit = 0;
tx_ctx           1301 drivers/infiniband/sw/siw/siw_cm.c 			cep->qp->tx_ctx.tx_suspend = 1;
tx_ctx           1606 drivers/infiniband/sw/siw/siw_cm.c 		qp->tx_ctx.gso_seg_limit = 0;
tx_ctx            134 drivers/infiniband/sw/siw/siw_qp.c 	qp->tx_ctx.tx_suspend = 1;
tx_ctx            229 drivers/infiniband/sw/siw/siw_qp.c 	struct siw_iwarp_tx *c_tx = &qp->tx_ctx;
tx_ctx            582 drivers/infiniband/sw/siw/siw_qp.c 	if (qp->tx_ctx.mpa_crc_hd) {
tx_ctx            583 drivers/infiniband/sw/siw/siw_qp.c 		crypto_shash_init(qp->tx_ctx.mpa_crc_hd);
tx_ctx            584 drivers/infiniband/sw/siw/siw_qp.c 		if (crypto_shash_update(qp->tx_ctx.mpa_crc_hd,
tx_ctx            590 drivers/infiniband/sw/siw/siw_qp.c 			if (crypto_shash_update(qp->tx_ctx.mpa_crc_hd,
tx_ctx            595 drivers/infiniband/sw/siw/siw_qp.c 		crypto_shash_final(qp->tx_ctx.mpa_crc_hd, (u8 *)&crc);
tx_ctx            659 drivers/infiniband/sw/siw/siw_qp.c 		qp->tx_ctx.ddp_msn[RDMAP_UNTAGGED_QN_SEND] = 0;
tx_ctx            660 drivers/infiniband/sw/siw/siw_qp.c 		qp->tx_ctx.ddp_msn[RDMAP_UNTAGGED_QN_RDMA_READ] = 0;
tx_ctx            661 drivers/infiniband/sw/siw/siw_qp.c 		qp->tx_ctx.ddp_msn[RDMAP_UNTAGGED_QN_TERMINATE] = 0;
tx_ctx            975 drivers/infiniband/sw/siw/siw_qp.c 				qp->tx_ctx.orq_fence = 1;
tx_ctx            997 drivers/infiniband/sw/siw/siw_qp.c 				qp->tx_ctx.orq_fence = 1;
tx_ctx           1152 drivers/infiniband/sw/siw/siw_qp_rx.c 	if (qp->tx_ctx.orq_fence) {
tx_ctx           1171 drivers/infiniband/sw/siw/siw_qp_rx.c 			qp->tx_ctx.orq_fence = 0;
tx_ctx           1175 drivers/infiniband/sw/siw/siw_qp_rx.c 			qp->tx_ctx.orq_fence = 0;
tx_ctx            695 drivers/infiniband/sw/siw/siw_qp_tx.c 	struct siw_iwarp_tx *c_tx = &qp->tx_ctx;
tx_ctx            783 drivers/infiniband/sw/siw/siw_qp_tx.c 	struct siw_iwarp_tx *c_tx = &qp->tx_ctx;
tx_ctx            785 drivers/infiniband/sw/siw/siw_qp_tx.c 	int rv = 0, burst_len = qp->tx_ctx.burst;
tx_ctx            905 drivers/infiniband/sw/siw/siw_qp_tx.c 	qp->tx_ctx.burst = burst_len;
tx_ctx           1025 drivers/infiniband/sw/siw/siw_qp_tx.c 	if (unlikely(qp->tx_ctx.tx_suspend)) {
tx_ctx           1083 drivers/infiniband/sw/siw/siw_qp_tx.c 			   qp->tx_ctx.ctrl_sent, qp->tx_ctx.ctrl_len,
tx_ctx           1084 drivers/infiniband/sw/siw/siw_qp_tx.c 			   qp->tx_ctx.bytes_unsent);
tx_ctx           1120 drivers/infiniband/sw/siw/siw_qp_tx.c 		if (!qp->tx_ctx.tx_suspend)
tx_ctx           1165 drivers/infiniband/sw/siw/siw_qp_tx.c 			   !qp->tx_ctx.tx_suspend)) {
tx_ctx           1173 drivers/infiniband/sw/siw/siw_qp_tx.c 				if (!qp->tx_ctx.tx_suspend)
tx_ctx            448 drivers/infiniband/sw/siw/siw_verbs.c 	qp->tx_ctx.gso_seg_limit = 1;
tx_ctx            449 drivers/infiniband/sw/siw/siw_verbs.c 	qp->tx_ctx.zcopy_tx = zcopy_tx;
tx_ctx            588 drivers/infiniband/sw/siw/siw_verbs.c 			qp->tx_ctx.tx_suspend = 1;
tx_ctx            637 drivers/infiniband/sw/siw/siw_verbs.c 	kfree(qp->tx_ctx.mpa_crc_hd);
tx_ctx            947 drivers/infiniband/sw/siw/siw_verbs.c 		qp->tx_ctx.in_syscall = 1;
tx_ctx            949 drivers/infiniband/sw/siw/siw_verbs.c 		if (siw_qp_sq_process(qp) != 0 && !(qp->tx_ctx.tx_suspend))
tx_ctx            952 drivers/infiniband/sw/siw/siw_verbs.c 		qp->tx_ctx.in_syscall = 0;
tx_ctx           3136 drivers/net/ethernet/intel/i40e/i40e_main.c 	struct i40e_hmc_obj_txq tx_ctx;
tx_ctx           3155 drivers/net/ethernet/intel/i40e/i40e_main.c 	memset(&tx_ctx, 0, sizeof(tx_ctx));
tx_ctx           3157 drivers/net/ethernet/intel/i40e/i40e_main.c 	tx_ctx.new_context = 1;
tx_ctx           3158 drivers/net/ethernet/intel/i40e/i40e_main.c 	tx_ctx.base = (ring->dma / 128);
tx_ctx           3159 drivers/net/ethernet/intel/i40e/i40e_main.c 	tx_ctx.qlen = ring->count;
tx_ctx           3160 drivers/net/ethernet/intel/i40e/i40e_main.c 	tx_ctx.fd_ena = !!(vsi->back->flags & (I40E_FLAG_FD_SB_ENABLED |
tx_ctx           3162 drivers/net/ethernet/intel/i40e/i40e_main.c 	tx_ctx.timesync_ena = !!(vsi->back->flags & I40E_FLAG_PTP);
tx_ctx           3165 drivers/net/ethernet/intel/i40e/i40e_main.c 		tx_ctx.head_wb_ena = 1;
tx_ctx           3166 drivers/net/ethernet/intel/i40e/i40e_main.c 	tx_ctx.head_wb_addr = ring->dma +
tx_ctx           3181 drivers/net/ethernet/intel/i40e/i40e_main.c 		tx_ctx.rdylist =
tx_ctx           3185 drivers/net/ethernet/intel/i40e/i40e_main.c 		tx_ctx.rdylist = le16_to_cpu(vsi->info.qs_handle[ring->dcb_tc]);
tx_ctx           3187 drivers/net/ethernet/intel/i40e/i40e_main.c 	tx_ctx.rdylist_act = 0;
tx_ctx           3199 drivers/net/ethernet/intel/i40e/i40e_main.c 	err = i40e_set_lan_tx_queue_context(hw, pf_q, &tx_ctx);
tx_ctx            549 drivers/net/ethernet/intel/i40e/i40e_virtchnl_pf.c 	struct i40e_hmc_obj_txq tx_ctx;
tx_ctx            567 drivers/net/ethernet/intel/i40e/i40e_virtchnl_pf.c 	memset(&tx_ctx, 0, sizeof(struct i40e_hmc_obj_txq));
tx_ctx            570 drivers/net/ethernet/intel/i40e/i40e_virtchnl_pf.c 	tx_ctx.base = info->dma_ring_addr / 128;
tx_ctx            571 drivers/net/ethernet/intel/i40e/i40e_virtchnl_pf.c 	tx_ctx.qlen = info->ring_len;
tx_ctx            572 drivers/net/ethernet/intel/i40e/i40e_virtchnl_pf.c 	tx_ctx.rdylist = le16_to_cpu(vsi->info.qs_handle[0]);
tx_ctx            573 drivers/net/ethernet/intel/i40e/i40e_virtchnl_pf.c 	tx_ctx.rdylist_act = 0;
tx_ctx            574 drivers/net/ethernet/intel/i40e/i40e_virtchnl_pf.c 	tx_ctx.head_wb_ena = info->headwb_enabled;
tx_ctx            575 drivers/net/ethernet/intel/i40e/i40e_virtchnl_pf.c 	tx_ctx.head_wb_addr = info->dma_headwb_addr;
tx_ctx            588 drivers/net/ethernet/intel/i40e/i40e_virtchnl_pf.c 	ret = i40e_set_lan_tx_queue_context(hw, pf_queue_id, &tx_ctx);
tx_ctx             46 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls.h 	struct tls_offload_context_tx *tx_ctx;
tx_ctx             55 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls.h 	struct tls_offload_context_tx         tx_ctx;
tx_ctx             63 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls.h 	struct tls_offload_context_tx *tx_ctx = tls_offload_ctx_tx(tls_ctx);
tx_ctx             68 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls.h 	shadow = (struct mlx5e_ktls_offload_context_tx_shadow *)tx_ctx;
tx_ctx             71 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls.h 	priv_tx->tx_ctx = tx_ctx;
tx_ctx             77 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls.h 	struct tls_offload_context_tx *tx_ctx = tls_offload_ctx_tx(tls_ctx);
tx_ctx             82 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls.h 	shadow = (struct mlx5e_ktls_offload_context_tx_shadow *)tx_ctx;
tx_ctx            198 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c 	struct tls_offload_context_tx *tx_ctx = priv_tx->tx_ctx;
tx_ctx            205 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c 	spin_lock_irqsave(&tx_ctx->lock, flags);
tx_ctx            206 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c 	record = tls_get_record(tx_ctx, tcp_seq, &info->rcd_sn);
tx_ctx            245 drivers/net/ethernet/mellanox/mlx5/core/en_accel/ktls_tx.c 	spin_unlock_irqrestore(&tx_ctx->lock, flags);
tx_ctx            130 drivers/net/ethernet/mellanox/mlx5/core/en_accel/tls.c 		struct mlx5e_tls_offload_context_tx *tx_ctx =
tx_ctx            133 drivers/net/ethernet/mellanox/mlx5/core/en_accel/tls.c 		tx_ctx->swid = htonl(swid);
tx_ctx            134 drivers/net/ethernet/mellanox/mlx5/core/en_accel/tls.c 		tx_ctx->expected_seq = start_offload_tcp_sn;
tx_ctx           2238 net/tls/tls_sw.c 	struct tls_sw_context_tx *tx_ctx = tls_sw_ctx_tx(ctx);
tx_ctx           2241 net/tls/tls_sw.c 	if (is_tx_ready(tx_ctx) &&
tx_ctx           2242 net/tls/tls_sw.c 	    !test_and_set_bit(BIT_TX_SCHEDULED, &tx_ctx->tx_bitmask))
tx_ctx           2243 net/tls/tls_sw.c 		schedule_delayed_work(&tx_ctx->tx_work.work, 0);
tx_ctx             35 net/wireless/lib80211_crypt_wep.c 	struct arc4_ctx tx_ctx;
tx_ctx            138 net/wireless/lib80211_crypt_wep.c 	arc4_setkey(&wep->tx_ctx, key, klen);
tx_ctx            139 net/wireless/lib80211_crypt_wep.c 	arc4_crypt(&wep->tx_ctx, pos, pos, len + 4);