txmsg             855 drivers/gpu/drm/drm_dp_mst_topology.c 			      struct drm_dp_sideband_msg_tx *txmsg)
txmsg             864 drivers/gpu/drm/drm_dp_mst_topology.c 	state = READ_ONCE(txmsg->state);
txmsg             870 drivers/gpu/drm/drm_dp_mst_topology.c 				    struct drm_dp_sideband_msg_tx *txmsg)
txmsg             876 drivers/gpu/drm/drm_dp_mst_topology.c 				 check_txmsg_state(mgr, txmsg),
txmsg             880 drivers/gpu/drm/drm_dp_mst_topology.c 		if (txmsg->state == DRM_DP_SIDEBAND_TX_TIMEOUT) {
txmsg             885 drivers/gpu/drm/drm_dp_mst_topology.c 		DRM_DEBUG_KMS("timedout msg send %p %d %d\n", txmsg, txmsg->state, txmsg->seqno);
txmsg             891 drivers/gpu/drm/drm_dp_mst_topology.c 		if (txmsg->state == DRM_DP_SIDEBAND_TX_QUEUED ||
txmsg             892 drivers/gpu/drm/drm_dp_mst_topology.c 		    txmsg->state == DRM_DP_SIDEBAND_TX_START_SEND) {
txmsg             893 drivers/gpu/drm/drm_dp_mst_topology.c 			list_del(&txmsg->next);
txmsg             896 drivers/gpu/drm/drm_dp_mst_topology.c 		if (txmsg->state == DRM_DP_SIDEBAND_TX_START_SEND ||
txmsg             897 drivers/gpu/drm/drm_dp_mst_topology.c 		    txmsg->state == DRM_DP_SIDEBAND_TX_SENT) {
txmsg             898 drivers/gpu/drm/drm_dp_mst_topology.c 			mstb->tx_slots[txmsg->seqno] = NULL;
txmsg            1957 drivers/gpu/drm/drm_dp_mst_topology.c 				  struct drm_dp_sideband_msg_tx *txmsg)
txmsg            1959 drivers/gpu/drm/drm_dp_mst_topology.c 	struct drm_dp_mst_branch *mstb = txmsg->dst;
txmsg            1963 drivers/gpu/drm/drm_dp_mst_topology.c 	if (txmsg->seqno == -1) {
txmsg            1969 drivers/gpu/drm/drm_dp_mst_topology.c 			txmsg->seqno = mstb->last_seqno;
txmsg            1972 drivers/gpu/drm/drm_dp_mst_topology.c 			txmsg->seqno = 0;
txmsg            1974 drivers/gpu/drm/drm_dp_mst_topology.c 			txmsg->seqno = 1;
txmsg            1975 drivers/gpu/drm/drm_dp_mst_topology.c 		mstb->tx_slots[txmsg->seqno] = txmsg;
txmsg            1978 drivers/gpu/drm/drm_dp_mst_topology.c 	req_type = txmsg->msg[0] & 0x7f;
txmsg            1984 drivers/gpu/drm/drm_dp_mst_topology.c 	hdr->path_msg = txmsg->path_msg;
txmsg            1989 drivers/gpu/drm/drm_dp_mst_topology.c 	hdr->seqno = txmsg->seqno;
txmsg            1996 drivers/gpu/drm/drm_dp_mst_topology.c 				   struct drm_dp_sideband_msg_tx *txmsg,
txmsg            2006 drivers/gpu/drm/drm_dp_mst_topology.c 	if (txmsg->state == DRM_DP_SIDEBAND_TX_QUEUED) {
txmsg            2007 drivers/gpu/drm/drm_dp_mst_topology.c 		txmsg->seqno = -1;
txmsg            2008 drivers/gpu/drm/drm_dp_mst_topology.c 		txmsg->state = DRM_DP_SIDEBAND_TX_START_SEND;
txmsg            2013 drivers/gpu/drm/drm_dp_mst_topology.c 	ret = set_hdr_from_dst_qlock(&hdr, txmsg);
txmsg            2018 drivers/gpu/drm/drm_dp_mst_topology.c 	len = txmsg->cur_len - txmsg->cur_offset;
txmsg            2024 drivers/gpu/drm/drm_dp_mst_topology.c 	if (len == txmsg->cur_len)
txmsg            2032 drivers/gpu/drm/drm_dp_mst_topology.c 	memcpy(&chunk[idx], &txmsg->msg[txmsg->cur_offset], tosend);
txmsg            2043 drivers/gpu/drm/drm_dp_mst_topology.c 	txmsg->cur_offset += tosend;
txmsg            2044 drivers/gpu/drm/drm_dp_mst_topology.c 	if (txmsg->cur_offset == txmsg->cur_len) {
txmsg            2045 drivers/gpu/drm/drm_dp_mst_topology.c 		txmsg->state = DRM_DP_SIDEBAND_TX_SENT;
txmsg            2053 drivers/gpu/drm/drm_dp_mst_topology.c 	struct drm_dp_sideband_msg_tx *txmsg;
txmsg            2062 drivers/gpu/drm/drm_dp_mst_topology.c 	txmsg = list_first_entry(&mgr->tx_msg_downq, struct drm_dp_sideband_msg_tx, next);
txmsg            2063 drivers/gpu/drm/drm_dp_mst_topology.c 	ret = process_single_tx_qlock(mgr, txmsg, false);
txmsg            2066 drivers/gpu/drm/drm_dp_mst_topology.c 		list_del(&txmsg->next);
txmsg            2069 drivers/gpu/drm/drm_dp_mst_topology.c 		list_del(&txmsg->next);
txmsg            2070 drivers/gpu/drm/drm_dp_mst_topology.c 		if (txmsg->seqno != -1)
txmsg            2071 drivers/gpu/drm/drm_dp_mst_topology.c 			txmsg->dst->tx_slots[txmsg->seqno] = NULL;
txmsg            2072 drivers/gpu/drm/drm_dp_mst_topology.c 		txmsg->state = DRM_DP_SIDEBAND_TX_TIMEOUT;
txmsg            2079 drivers/gpu/drm/drm_dp_mst_topology.c 				       struct drm_dp_sideband_msg_tx *txmsg)
txmsg            2084 drivers/gpu/drm/drm_dp_mst_topology.c 	ret = process_single_tx_qlock(mgr, txmsg, true);
txmsg            2089 drivers/gpu/drm/drm_dp_mst_topology.c 	if (txmsg->seqno != -1) {
txmsg            2090 drivers/gpu/drm/drm_dp_mst_topology.c 		WARN_ON((unsigned int)txmsg->seqno >
txmsg            2091 drivers/gpu/drm/drm_dp_mst_topology.c 			ARRAY_SIZE(txmsg->dst->tx_slots));
txmsg            2092 drivers/gpu/drm/drm_dp_mst_topology.c 		txmsg->dst->tx_slots[txmsg->seqno] = NULL;
txmsg            2097 drivers/gpu/drm/drm_dp_mst_topology.c 				 struct drm_dp_sideband_msg_tx *txmsg)
txmsg            2100 drivers/gpu/drm/drm_dp_mst_topology.c 	list_add_tail(&txmsg->next, &mgr->tx_msg_downq);
txmsg            2110 drivers/gpu/drm/drm_dp_mst_topology.c 	struct drm_dp_sideband_msg_tx *txmsg;
txmsg            2113 drivers/gpu/drm/drm_dp_mst_topology.c 	txmsg = kzalloc(sizeof(*txmsg), GFP_KERNEL);
txmsg            2114 drivers/gpu/drm/drm_dp_mst_topology.c 	if (!txmsg)
txmsg            2117 drivers/gpu/drm/drm_dp_mst_topology.c 	txmsg->dst = mstb;
txmsg            2118 drivers/gpu/drm/drm_dp_mst_topology.c 	len = build_link_address(txmsg);
txmsg            2121 drivers/gpu/drm/drm_dp_mst_topology.c 	drm_dp_queue_down_tx(mgr, txmsg);
txmsg            2123 drivers/gpu/drm/drm_dp_mst_topology.c 	ret = drm_dp_mst_wait_tx_reply(mstb, txmsg);
txmsg            2127 drivers/gpu/drm/drm_dp_mst_topology.c 		if (txmsg->reply.reply_type == DP_SIDEBAND_REPLY_NAK) {
txmsg            2130 drivers/gpu/drm/drm_dp_mst_topology.c 			DRM_DEBUG_KMS("link address reply: %d\n", txmsg->reply.u.link_addr.nports);
txmsg            2131 drivers/gpu/drm/drm_dp_mst_topology.c 			for (i = 0; i < txmsg->reply.u.link_addr.nports; i++) {
txmsg            2133 drivers/gpu/drm/drm_dp_mst_topology.c 				       txmsg->reply.u.link_addr.ports[i].input_port,
txmsg            2134 drivers/gpu/drm/drm_dp_mst_topology.c 				       txmsg->reply.u.link_addr.ports[i].peer_device_type,
txmsg            2135 drivers/gpu/drm/drm_dp_mst_topology.c 				       txmsg->reply.u.link_addr.ports[i].port_number,
txmsg            2136 drivers/gpu/drm/drm_dp_mst_topology.c 				       txmsg->reply.u.link_addr.ports[i].dpcd_revision,
txmsg            2137 drivers/gpu/drm/drm_dp_mst_topology.c 				       txmsg->reply.u.link_addr.ports[i].mcs,
txmsg            2138 drivers/gpu/drm/drm_dp_mst_topology.c 				       txmsg->reply.u.link_addr.ports[i].ddps,
txmsg            2139 drivers/gpu/drm/drm_dp_mst_topology.c 				       txmsg->reply.u.link_addr.ports[i].legacy_device_plug_status,
txmsg            2140 drivers/gpu/drm/drm_dp_mst_topology.c 				       txmsg->reply.u.link_addr.ports[i].num_sdp_streams,
txmsg            2141 drivers/gpu/drm/drm_dp_mst_topology.c 				       txmsg->reply.u.link_addr.ports[i].num_sdp_stream_sinks);
txmsg            2144 drivers/gpu/drm/drm_dp_mst_topology.c 			drm_dp_check_mstb_guid(mstb, txmsg->reply.u.link_addr.guid);
txmsg            2146 drivers/gpu/drm/drm_dp_mst_topology.c 			for (i = 0; i < txmsg->reply.u.link_addr.nports; i++) {
txmsg            2147 drivers/gpu/drm/drm_dp_mst_topology.c 				drm_dp_add_port(mstb, mgr->dev, &txmsg->reply.u.link_addr.ports[i]);
txmsg            2156 drivers/gpu/drm/drm_dp_mst_topology.c 	kfree(txmsg);
txmsg            2164 drivers/gpu/drm/drm_dp_mst_topology.c 	struct drm_dp_sideband_msg_tx *txmsg;
txmsg            2167 drivers/gpu/drm/drm_dp_mst_topology.c 	txmsg = kzalloc(sizeof(*txmsg), GFP_KERNEL);
txmsg            2168 drivers/gpu/drm/drm_dp_mst_topology.c 	if (!txmsg)
txmsg            2171 drivers/gpu/drm/drm_dp_mst_topology.c 	txmsg->dst = mstb;
txmsg            2172 drivers/gpu/drm/drm_dp_mst_topology.c 	len = build_enum_path_resources(txmsg, port->port_num);
txmsg            2174 drivers/gpu/drm/drm_dp_mst_topology.c 	drm_dp_queue_down_tx(mgr, txmsg);
txmsg            2176 drivers/gpu/drm/drm_dp_mst_topology.c 	ret = drm_dp_mst_wait_tx_reply(mstb, txmsg);
txmsg            2178 drivers/gpu/drm/drm_dp_mst_topology.c 		if (txmsg->reply.reply_type == DP_SIDEBAND_REPLY_NAK) {
txmsg            2181 drivers/gpu/drm/drm_dp_mst_topology.c 			if (port->port_num != txmsg->reply.u.path_resources.port_number)
txmsg            2183 drivers/gpu/drm/drm_dp_mst_topology.c 			DRM_DEBUG_KMS("enum path resources %d: %d %d\n", txmsg->reply.u.path_resources.port_number, txmsg->reply.u.path_resources.full_payload_bw_number,
txmsg            2184 drivers/gpu/drm/drm_dp_mst_topology.c 			       txmsg->reply.u.path_resources.avail_payload_bw_number);
txmsg            2185 drivers/gpu/drm/drm_dp_mst_topology.c 			port->available_pbn = txmsg->reply.u.path_resources.avail_payload_bw_number;
txmsg            2189 drivers/gpu/drm/drm_dp_mst_topology.c 	kfree(txmsg);
txmsg            2247 drivers/gpu/drm/drm_dp_mst_topology.c 	struct drm_dp_sideband_msg_tx *txmsg;
txmsg            2264 drivers/gpu/drm/drm_dp_mst_topology.c 	txmsg = kzalloc(sizeof(*txmsg), GFP_KERNEL);
txmsg            2265 drivers/gpu/drm/drm_dp_mst_topology.c 	if (!txmsg) {
txmsg            2273 drivers/gpu/drm/drm_dp_mst_topology.c 	txmsg->dst = mstb;
txmsg            2274 drivers/gpu/drm/drm_dp_mst_topology.c 	len = build_allocate_payload(txmsg, port_num,
txmsg            2278 drivers/gpu/drm/drm_dp_mst_topology.c 	drm_dp_queue_down_tx(mgr, txmsg);
txmsg            2288 drivers/gpu/drm/drm_dp_mst_topology.c 	ret = drm_dp_mst_wait_tx_reply(mstb, txmsg);
txmsg            2290 drivers/gpu/drm/drm_dp_mst_topology.c 		if (txmsg->reply.reply_type == DP_SIDEBAND_REPLY_NAK)
txmsg            2295 drivers/gpu/drm/drm_dp_mst_topology.c 	kfree(txmsg);
txmsg            2304 drivers/gpu/drm/drm_dp_mst_topology.c 	struct drm_dp_sideband_msg_tx *txmsg;
txmsg            2311 drivers/gpu/drm/drm_dp_mst_topology.c 	txmsg = kzalloc(sizeof(*txmsg), GFP_KERNEL);
txmsg            2312 drivers/gpu/drm/drm_dp_mst_topology.c 	if (!txmsg) {
txmsg            2317 drivers/gpu/drm/drm_dp_mst_topology.c 	txmsg->dst = port->parent;
txmsg            2318 drivers/gpu/drm/drm_dp_mst_topology.c 	len = build_power_updown_phy(txmsg, port->port_num, power_up);
txmsg            2319 drivers/gpu/drm/drm_dp_mst_topology.c 	drm_dp_queue_down_tx(mgr, txmsg);
txmsg            2321 drivers/gpu/drm/drm_dp_mst_topology.c 	ret = drm_dp_mst_wait_tx_reply(port->parent, txmsg);
txmsg            2323 drivers/gpu/drm/drm_dp_mst_topology.c 		if (txmsg->reply.reply_type == DP_SIDEBAND_REPLY_NAK)
txmsg            2328 drivers/gpu/drm/drm_dp_mst_topology.c 	kfree(txmsg);
txmsg            2542 drivers/gpu/drm/drm_dp_mst_topology.c 	struct drm_dp_sideband_msg_tx *txmsg;
txmsg            2549 drivers/gpu/drm/drm_dp_mst_topology.c 	txmsg = kzalloc(sizeof(*txmsg), GFP_KERNEL);
txmsg            2550 drivers/gpu/drm/drm_dp_mst_topology.c 	if (!txmsg) {
txmsg            2555 drivers/gpu/drm/drm_dp_mst_topology.c 	len = build_dpcd_read(txmsg, port->port_num, offset, size);
txmsg            2556 drivers/gpu/drm/drm_dp_mst_topology.c 	txmsg->dst = port->parent;
txmsg            2558 drivers/gpu/drm/drm_dp_mst_topology.c 	drm_dp_queue_down_tx(mgr, txmsg);
txmsg            2560 drivers/gpu/drm/drm_dp_mst_topology.c 	ret = drm_dp_mst_wait_tx_reply(mstb, txmsg);
txmsg            2565 drivers/gpu/drm/drm_dp_mst_topology.c 	if (txmsg->reply.reply_type == 1) {
txmsg            2572 drivers/gpu/drm/drm_dp_mst_topology.c 	if (txmsg->reply.u.remote_dpcd_read_ack.num_bytes != size) {
txmsg            2577 drivers/gpu/drm/drm_dp_mst_topology.c 	ret = min_t(size_t, txmsg->reply.u.remote_dpcd_read_ack.num_bytes,
txmsg            2579 drivers/gpu/drm/drm_dp_mst_topology.c 	memcpy(bytes, txmsg->reply.u.remote_dpcd_read_ack.bytes, ret);
txmsg            2582 drivers/gpu/drm/drm_dp_mst_topology.c 	kfree(txmsg);
txmsg            2595 drivers/gpu/drm/drm_dp_mst_topology.c 	struct drm_dp_sideband_msg_tx *txmsg;
txmsg            2602 drivers/gpu/drm/drm_dp_mst_topology.c 	txmsg = kzalloc(sizeof(*txmsg), GFP_KERNEL);
txmsg            2603 drivers/gpu/drm/drm_dp_mst_topology.c 	if (!txmsg) {
txmsg            2608 drivers/gpu/drm/drm_dp_mst_topology.c 	len = build_dpcd_write(txmsg, port->port_num, offset, size, bytes);
txmsg            2609 drivers/gpu/drm/drm_dp_mst_topology.c 	txmsg->dst = mstb;
txmsg            2611 drivers/gpu/drm/drm_dp_mst_topology.c 	drm_dp_queue_down_tx(mgr, txmsg);
txmsg            2613 drivers/gpu/drm/drm_dp_mst_topology.c 	ret = drm_dp_mst_wait_tx_reply(mstb, txmsg);
txmsg            2615 drivers/gpu/drm/drm_dp_mst_topology.c 		if (txmsg->reply.reply_type == DP_SIDEBAND_REPLY_NAK)
txmsg            2620 drivers/gpu/drm/drm_dp_mst_topology.c 	kfree(txmsg);
txmsg            2640 drivers/gpu/drm/drm_dp_mst_topology.c 	struct drm_dp_sideband_msg_tx *txmsg;
txmsg            2642 drivers/gpu/drm/drm_dp_mst_topology.c 	txmsg = kzalloc(sizeof(*txmsg), GFP_KERNEL);
txmsg            2643 drivers/gpu/drm/drm_dp_mst_topology.c 	if (!txmsg)
txmsg            2646 drivers/gpu/drm/drm_dp_mst_topology.c 	txmsg->dst = mstb;
txmsg            2647 drivers/gpu/drm/drm_dp_mst_topology.c 	txmsg->seqno = seqno;
txmsg            2648 drivers/gpu/drm/drm_dp_mst_topology.c 	drm_dp_encode_up_ack_reply(txmsg, req_type);
txmsg            2652 drivers/gpu/drm/drm_dp_mst_topology.c 	process_single_up_tx_qlock(mgr, txmsg);
txmsg            2656 drivers/gpu/drm/drm_dp_mst_topology.c 	kfree(txmsg);
txmsg            2909 drivers/gpu/drm/drm_dp_mst_topology.c 		struct drm_dp_sideband_msg_tx *txmsg;
txmsg            2925 drivers/gpu/drm/drm_dp_mst_topology.c 		txmsg = mstb->tx_slots[slot];
txmsg            2929 drivers/gpu/drm/drm_dp_mst_topology.c 		if (!txmsg) {
txmsg            2941 drivers/gpu/drm/drm_dp_mst_topology.c 		drm_dp_sideband_parse_reply(&mgr->down_rep_recv, &txmsg->reply);
txmsg            2943 drivers/gpu/drm/drm_dp_mst_topology.c 		if (txmsg->reply.reply_type == DP_SIDEBAND_REPLY_NAK)
txmsg            2945 drivers/gpu/drm/drm_dp_mst_topology.c 				      txmsg->reply.req_type,
txmsg            2946 drivers/gpu/drm/drm_dp_mst_topology.c 				      drm_dp_mst_req_type_str(txmsg->reply.req_type),
txmsg            2947 drivers/gpu/drm/drm_dp_mst_topology.c 				      txmsg->reply.u.nak.reason,
txmsg            2948 drivers/gpu/drm/drm_dp_mst_topology.c 				      drm_dp_mst_nak_reason_str(txmsg->reply.u.nak.reason),
txmsg            2949 drivers/gpu/drm/drm_dp_mst_topology.c 				      txmsg->reply.u.nak.nak_data);
txmsg            2955 drivers/gpu/drm/drm_dp_mst_topology.c 		txmsg->state = DRM_DP_SIDEBAND_TX_RX;
txmsg            4047 drivers/gpu/drm/drm_dp_mst_topology.c 	struct drm_dp_sideband_msg_tx *txmsg = NULL;
txmsg            4073 drivers/gpu/drm/drm_dp_mst_topology.c 	txmsg = kzalloc(sizeof(*txmsg), GFP_KERNEL);
txmsg            4074 drivers/gpu/drm/drm_dp_mst_topology.c 	if (!txmsg) {
txmsg            4079 drivers/gpu/drm/drm_dp_mst_topology.c 	txmsg->dst = mstb;
txmsg            4080 drivers/gpu/drm/drm_dp_mst_topology.c 	drm_dp_encode_sideband_req(&msg, txmsg);
txmsg            4082 drivers/gpu/drm/drm_dp_mst_topology.c 	drm_dp_queue_down_tx(mgr, txmsg);
txmsg            4084 drivers/gpu/drm/drm_dp_mst_topology.c 	ret = drm_dp_mst_wait_tx_reply(mstb, txmsg);
txmsg            4087 drivers/gpu/drm/drm_dp_mst_topology.c 		if (txmsg->reply.reply_type == DP_SIDEBAND_REPLY_NAK) {
txmsg            4091 drivers/gpu/drm/drm_dp_mst_topology.c 		if (txmsg->reply.u.remote_i2c_read_ack.num_bytes != msgs[num - 1].len) {
txmsg            4095 drivers/gpu/drm/drm_dp_mst_topology.c 		memcpy(msgs[num - 1].buf, txmsg->reply.u.remote_i2c_read_ack.bytes, msgs[num - 1].len);
txmsg            4099 drivers/gpu/drm/drm_dp_mst_topology.c 	kfree(txmsg);
txmsg             320 drivers/hsi/clients/cmt_speech.c 	struct hsi_msg *txmsg, *rxmsg;
txmsg             332 drivers/hsi/clients/cmt_speech.c 	txmsg = hsi_alloc_msg(1, GFP_KERNEL);
txmsg             333 drivers/hsi/clients/cmt_speech.c 	if (!txmsg) {
txmsg             337 drivers/hsi/clients/cmt_speech.c 	txmsg->channel = cs_char_data.channel_id_data;
txmsg             338 drivers/hsi/clients/cmt_speech.c 	txmsg->destructor = cs_hsi_data_destructor;
txmsg             339 drivers/hsi/clients/cmt_speech.c 	txmsg->context = hi;
txmsg             342 drivers/hsi/clients/cmt_speech.c 	hi->data_tx_msg = txmsg;
txmsg             694 drivers/hsi/clients/cmt_speech.c 	struct hsi_msg *txmsg;
txmsg             718 drivers/hsi/clients/cmt_speech.c 	txmsg = hi->data_tx_msg;
txmsg             719 drivers/hsi/clients/cmt_speech.c 	sg_init_one(txmsg->sgt.sgl, address, hi->buf_size);
txmsg             720 drivers/hsi/clients/cmt_speech.c 	txmsg->complete = cs_hsi_write_on_data_complete;
txmsg             721 drivers/hsi/clients/cmt_speech.c 	ret = hsi_async_write(hi->cl, txmsg);
txmsg             723 drivers/hsi/clients/cmt_speech.c 		cs_hsi_data_write_error(hi, txmsg);
txmsg             710 drivers/s390/net/netiucv.c 	struct iucv_message txmsg;
txmsg             770 drivers/s390/net/netiucv.c 	txmsg.class = 0;
txmsg             771 drivers/s390/net/netiucv.c 	txmsg.tag = 0;
txmsg             772 drivers/s390/net/netiucv.c 	rc = iucv_message_send(conn->path, &txmsg, 0, 0,
txmsg            1068 net/iucv/af_iucv.c 	struct iucv_message txmsg = {0};
txmsg            1103 net/iucv/af_iucv.c 	txmsg.class = 0;
txmsg            1129 net/iucv/af_iucv.c 			memcpy(&txmsg.class,
txmsg            1186 net/iucv/af_iucv.c 	txmsg.tag = iucv->send_tag++;
txmsg            1187 net/iucv/af_iucv.c 	IUCV_SKB_CB(skb)->tag = txmsg.tag;
txmsg            1191 net/iucv/af_iucv.c 		err = afiucv_hs_send(&txmsg, sk, skb, 0);
txmsg            1201 net/iucv/af_iucv.c 			err = iucv_send_iprm(iucv->path, &txmsg, skb);
txmsg            1232 net/iucv/af_iucv.c 			err = pr_iucv->message_send(iucv->path, &txmsg,
txmsg            1236 net/iucv/af_iucv.c 			err = pr_iucv->message_send(iucv->path, &txmsg,
txmsg            1557 net/iucv/af_iucv.c 	struct iucv_message txmsg;
txmsg            1579 net/iucv/af_iucv.c 			txmsg.class = 0;
txmsg            1580 net/iucv/af_iucv.c 			txmsg.tag = 0;
txmsg            1581 net/iucv/af_iucv.c 			err = pr_iucv->message_send(iucv->path, &txmsg,