ucmd               34 arch/mips/cavium-octeon/octeon-platform.c 	u32 ucmd;
ucmd               41 arch/mips/cavium-octeon/octeon-platform.c 		ucmd = cvmx_read64_uint32(CVMX_UAHCX_EHCI_USBCMD);
ucmd               42 arch/mips/cavium-octeon/octeon-platform.c 		ucmd &= ~CMD_RUN;
ucmd               43 arch/mips/cavium-octeon/octeon-platform.c 		cvmx_write64_uint32(CVMX_UAHCX_EHCI_USBCMD, ucmd);
ucmd               45 arch/mips/cavium-octeon/octeon-platform.c 		ucmd |= CMD_RESET;
ucmd               46 arch/mips/cavium-octeon/octeon-platform.c 		cvmx_write64_uint32(CVMX_UAHCX_EHCI_USBCMD, ucmd);
ucmd               47 arch/mips/cavium-octeon/octeon-platform.c 		ucmd = cvmx_read64_uint32(CVMX_UAHCX_OHCI_USBCMD);
ucmd               48 arch/mips/cavium-octeon/octeon-platform.c 		ucmd |= CMD_RUN;
ucmd               49 arch/mips/cavium-octeon/octeon-platform.c 		cvmx_write64_uint32(CVMX_UAHCX_OHCI_USBCMD, ucmd);
ucmd              664 drivers/block/rsxx/cregs.c 			struct rsxx_reg_access __user *ucmd,
ucmd              670 drivers/block/rsxx/cregs.c 	st = copy_from_user(&cmd, ucmd, sizeof(cmd));
ucmd              681 drivers/block/rsxx/cregs.c 	st = put_user(cmd.stat, &ucmd->stat);
ucmd              686 drivers/block/rsxx/cregs.c 		st = copy_to_user(ucmd->data, cmd.data, cmd.cnt);
ucmd              413 drivers/block/rsxx/rsxx_priv.h 			struct rsxx_reg_access __user *ucmd,
ucmd              998 drivers/infiniband/hw/cxgb4/cq.c 	struct c4iw_create_cq ucmd;
ucmd             1014 drivers/infiniband/hw/cxgb4/cq.c 		if (udata->inlen < sizeof(ucmd))
ucmd              305 drivers/infiniband/hw/hns/hns_roce_cq.c 	struct hns_roce_ib_create_cq ucmd;
ucmd              311 drivers/infiniband/hw/hns/hns_roce_cq.c 	if (ib_copy_from_udata(&ucmd, udata, sizeof(ucmd))) {
ucmd              318 drivers/infiniband/hw/hns/hns_roce_cq.c 				      &hr_cq->umem, ucmd.buf_addr,
ucmd              327 drivers/infiniband/hw/hns/hns_roce_cq.c 		ret = hns_roce_db_map_user(context, udata, ucmd.db_addr,
ucmd              329 drivers/infiniband/hw/hns/hns_roce_qp.c 					struct hns_roce_ib_create_qp *ucmd)
ucmd              335 drivers/infiniband/hw/hns/hns_roce_qp.c 	if (ucmd->log_sq_stride > max_sq_stride ||
ucmd              336 drivers/infiniband/hw/hns/hns_roce_qp.c 	    ucmd->log_sq_stride < HNS_ROCE_IB_MIN_SQ_STRIDE) {
ucmd              353 drivers/infiniband/hw/hns/hns_roce_qp.c 				     struct hns_roce_ib_create_qp *ucmd)
ucmd              360 drivers/infiniband/hw/hns/hns_roce_qp.c 	if (check_shl_overflow(1, ucmd->log_sq_bb_count, &hr_qp->sq.wqe_cnt) ||
ucmd              364 drivers/infiniband/hw/hns/hns_roce_qp.c 	ret = check_sq_size_with_integrity(hr_dev, cap, ucmd);
ucmd              370 drivers/infiniband/hw/hns/hns_roce_qp.c 	hr_qp->sq.wqe_shift = ucmd->log_sq_stride;
ucmd              692 drivers/infiniband/hw/hns/hns_roce_qp.c 	struct hns_roce_ib_create_qp ucmd;
ucmd              734 drivers/infiniband/hw/hns/hns_roce_qp.c 		if (ib_copy_from_udata(&ucmd, udata, sizeof(ucmd))) {
ucmd              741 drivers/infiniband/hw/hns/hns_roce_qp.c 						&ucmd);
ucmd              747 drivers/infiniband/hw/hns/hns_roce_qp.c 		hr_qp->umem = ib_umem_get(udata, ucmd.buf_addr,
ucmd              779 drivers/infiniband/hw/hns/hns_roce_qp.c 		    (udata->inlen >= sizeof(ucmd)) &&
ucmd              782 drivers/infiniband/hw/hns/hns_roce_qp.c 			ret = hns_roce_db_map_user(uctx, udata, ucmd.sdb_addr,
ucmd              797 drivers/infiniband/hw/hns/hns_roce_qp.c 			ret = hns_roce_db_map_user(uctx, udata, ucmd.db_addr,
ucmd              985 drivers/infiniband/hw/hns/hns_roce_qp.c 		    (udata->inlen >= sizeof(ucmd)) &&
ucmd              182 drivers/infiniband/hw/hns/hns_roce_srq.c 	struct hns_roce_ib_create_srq  ucmd;
ucmd              186 drivers/infiniband/hw/hns/hns_roce_srq.c 	if (ib_copy_from_udata(&ucmd, udata, sizeof(ucmd)))
ucmd              189 drivers/infiniband/hw/hns/hns_roce_srq.c 	srq->umem = ib_umem_get(udata, ucmd.buf_addr, srq_buf_size, 0, 0);
ucmd              208 drivers/infiniband/hw/hns/hns_roce_srq.c 	srq->idx_que.umem = ib_umem_get(udata, ucmd.que_addr,
ucmd              206 drivers/infiniband/hw/mlx4/cq.c 		struct mlx4_ib_create_cq ucmd;
ucmd              208 drivers/infiniband/hw/mlx4/cq.c 		if (ib_copy_from_udata(&ucmd, udata, sizeof ucmd)) {
ucmd              213 drivers/infiniband/hw/mlx4/cq.c 		buf_addr = (void *)(unsigned long)ucmd.buf_addr;
ucmd              215 drivers/infiniband/hw/mlx4/cq.c 					  ucmd.buf_addr, entries);
ucmd              219 drivers/infiniband/hw/mlx4/cq.c 		err = mlx4_ib_db_map_user(udata, ucmd.db_addr, &cq->db);
ucmd              318 drivers/infiniband/hw/mlx4/cq.c 	struct mlx4_ib_resize_cq ucmd;
ucmd              324 drivers/infiniband/hw/mlx4/cq.c 	if (ib_copy_from_udata(&ucmd, udata, sizeof ucmd))
ucmd              332 drivers/infiniband/hw/mlx4/cq.c 				  &cq->resize_umem, ucmd.buf_addr, entries);
ucmd              439 drivers/infiniband/hw/mlx4/qp.c 			    struct mlx4_ib_create_qp *ucmd)
ucmd              442 drivers/infiniband/hw/mlx4/qp.c 	if ((1 << ucmd->log_sq_bb_count) > dev->dev->caps.max_wqes	 ||
ucmd              443 drivers/infiniband/hw/mlx4/qp.c 	    ucmd->log_sq_stride >
ucmd              445 drivers/infiniband/hw/mlx4/qp.c 	    ucmd->log_sq_stride < MLX4_IB_MIN_SQ_STRIDE)
ucmd              448 drivers/infiniband/hw/mlx4/qp.c 	qp->sq.wqe_cnt   = 1 << ucmd->log_sq_bb_count;
ucmd              449 drivers/infiniband/hw/mlx4/qp.c 	qp->sq.wqe_shift = ucmd->log_sq_stride;
ucmd              541 drivers/infiniband/hw/mlx4/qp.c 		      struct mlx4_ib_create_qp_rss *ucmd)
ucmd              546 drivers/infiniband/hw/mlx4/qp.c 	if ((ucmd->rx_hash_function == MLX4_IB_RX_HASH_FUNC_TOEPLITZ) &&
ucmd              548 drivers/infiniband/hw/mlx4/qp.c 		memcpy(rss_ctx->rss_key, ucmd->rx_hash_key,
ucmd              555 drivers/infiniband/hw/mlx4/qp.c 	if (ucmd->rx_hash_fields_mask & ~(MLX4_IB_RX_HASH_SRC_IPV4	|
ucmd              565 drivers/infiniband/hw/mlx4/qp.c 			 ucmd->rx_hash_fields_mask);
ucmd              569 drivers/infiniband/hw/mlx4/qp.c 	if ((ucmd->rx_hash_fields_mask & MLX4_IB_RX_HASH_SRC_IPV4) &&
ucmd              570 drivers/infiniband/hw/mlx4/qp.c 	    (ucmd->rx_hash_fields_mask & MLX4_IB_RX_HASH_DST_IPV4)) {
ucmd              572 drivers/infiniband/hw/mlx4/qp.c 	} else if ((ucmd->rx_hash_fields_mask & MLX4_IB_RX_HASH_SRC_IPV4) ||
ucmd              573 drivers/infiniband/hw/mlx4/qp.c 		   (ucmd->rx_hash_fields_mask & MLX4_IB_RX_HASH_DST_IPV4)) {
ucmd              578 drivers/infiniband/hw/mlx4/qp.c 	if ((ucmd->rx_hash_fields_mask & MLX4_IB_RX_HASH_SRC_IPV6) &&
ucmd              579 drivers/infiniband/hw/mlx4/qp.c 	    (ucmd->rx_hash_fields_mask & MLX4_IB_RX_HASH_DST_IPV6)) {
ucmd              581 drivers/infiniband/hw/mlx4/qp.c 	} else if ((ucmd->rx_hash_fields_mask & MLX4_IB_RX_HASH_SRC_IPV6) ||
ucmd              582 drivers/infiniband/hw/mlx4/qp.c 		   (ucmd->rx_hash_fields_mask & MLX4_IB_RX_HASH_DST_IPV6)) {
ucmd              587 drivers/infiniband/hw/mlx4/qp.c 	if ((ucmd->rx_hash_fields_mask & MLX4_IB_RX_HASH_SRC_PORT_UDP) &&
ucmd              588 drivers/infiniband/hw/mlx4/qp.c 	    (ucmd->rx_hash_fields_mask & MLX4_IB_RX_HASH_DST_PORT_UDP)) {
ucmd              602 drivers/infiniband/hw/mlx4/qp.c 	} else if ((ucmd->rx_hash_fields_mask & MLX4_IB_RX_HASH_SRC_PORT_UDP) ||
ucmd              603 drivers/infiniband/hw/mlx4/qp.c 		   (ucmd->rx_hash_fields_mask & MLX4_IB_RX_HASH_DST_PORT_UDP)) {
ucmd              608 drivers/infiniband/hw/mlx4/qp.c 	if ((ucmd->rx_hash_fields_mask & MLX4_IB_RX_HASH_SRC_PORT_TCP) &&
ucmd              609 drivers/infiniband/hw/mlx4/qp.c 	    (ucmd->rx_hash_fields_mask & MLX4_IB_RX_HASH_DST_PORT_TCP)) {
ucmd              618 drivers/infiniband/hw/mlx4/qp.c 	} else if ((ucmd->rx_hash_fields_mask & MLX4_IB_RX_HASH_SRC_PORT_TCP) ||
ucmd              619 drivers/infiniband/hw/mlx4/qp.c 		   (ucmd->rx_hash_fields_mask & MLX4_IB_RX_HASH_DST_PORT_TCP)) {
ucmd              624 drivers/infiniband/hw/mlx4/qp.c 	if (ucmd->rx_hash_fields_mask & MLX4_IB_RX_HASH_INNER) {
ucmd              643 drivers/infiniband/hw/mlx4/qp.c 			 struct mlx4_ib_create_qp_rss *ucmd,
ucmd              681 drivers/infiniband/hw/mlx4/qp.c 	err = set_qp_rss(dev, qp->rss_ctx, init_attr, ucmd);
ucmd              704 drivers/infiniband/hw/mlx4/qp.c 	struct mlx4_ib_create_qp_rss ucmd = {};
ucmd              716 drivers/infiniband/hw/mlx4/qp.c 	required_cmd_sz = offsetof(typeof(ucmd), reserved1) +
ucmd              717 drivers/infiniband/hw/mlx4/qp.c 					sizeof(ucmd.reserved1);
ucmd              723 drivers/infiniband/hw/mlx4/qp.c 	if (ib_copy_from_udata(&ucmd, udata, min(sizeof(ucmd), udata->inlen))) {
ucmd              728 drivers/infiniband/hw/mlx4/qp.c 	if (memchr_inv(ucmd.reserved, 0, sizeof(ucmd.reserved)))
ucmd              731 drivers/infiniband/hw/mlx4/qp.c 	if (ucmd.comp_mask || ucmd.reserved1)
ucmd              734 drivers/infiniband/hw/mlx4/qp.c 	if (udata->inlen > sizeof(ucmd) &&
ucmd              735 drivers/infiniband/hw/mlx4/qp.c 	    !ib_is_udata_cleared(udata, sizeof(ucmd),
ucmd              736 drivers/infiniband/hw/mlx4/qp.c 				 udata->inlen - sizeof(ucmd))) {
ucmd              764 drivers/infiniband/hw/mlx4/qp.c 	err = create_qp_rss(to_mdev(pd->device), init_attr, &ucmd, qp);
ucmd             1077 drivers/infiniband/hw/mlx4/qp.c 		struct mlx4_ib_create_qp ucmd;
ucmd             1084 drivers/infiniband/hw/mlx4/qp.c 		if (ib_copy_from_udata(&ucmd, udata, copy_len)) {
ucmd             1089 drivers/infiniband/hw/mlx4/qp.c 		qp->inl_recv_sz = ucmd.inl_recv_sz;
ucmd             1107 drivers/infiniband/hw/mlx4/qp.c 		qp->sq_no_prefetch = ucmd.sq_no_prefetch;
ucmd             1109 drivers/infiniband/hw/mlx4/qp.c 		err = set_user_sq_size(dev, qp, &ucmd);
ucmd             1114 drivers/infiniband/hw/mlx4/qp.c 			ib_umem_get(udata, ucmd.buf_addr, qp->buf_size, 0, 0);
ucmd             1132 drivers/infiniband/hw/mlx4/qp.c 			err = mlx4_ib_db_map_user(udata, ucmd.db_addr, &qp->db);
ucmd             4136 drivers/infiniband/hw/mlx4/qp.c 	struct mlx4_ib_create_wq ucmd;
ucmd             4142 drivers/infiniband/hw/mlx4/qp.c 	required_cmd_sz = offsetof(typeof(ucmd), comp_mask) +
ucmd             4143 drivers/infiniband/hw/mlx4/qp.c 			  sizeof(ucmd.comp_mask);
ucmd             4149 drivers/infiniband/hw/mlx4/qp.c 	if (udata->inlen > sizeof(ucmd) &&
ucmd             4150 drivers/infiniband/hw/mlx4/qp.c 	    !ib_is_udata_cleared(udata, sizeof(ucmd),
ucmd             4151 drivers/infiniband/hw/mlx4/qp.c 				 udata->inlen - sizeof(ucmd))) {
ucmd             4273 drivers/infiniband/hw/mlx4/qp.c 	struct mlx4_ib_modify_wq ucmd = {};
ucmd             4278 drivers/infiniband/hw/mlx4/qp.c 	required_cmd_sz = offsetof(typeof(ucmd), reserved) +
ucmd             4279 drivers/infiniband/hw/mlx4/qp.c 				   sizeof(ucmd.reserved);
ucmd             4283 drivers/infiniband/hw/mlx4/qp.c 	if (udata->inlen > sizeof(ucmd) &&
ucmd             4284 drivers/infiniband/hw/mlx4/qp.c 	    !ib_is_udata_cleared(udata, sizeof(ucmd),
ucmd             4285 drivers/infiniband/hw/mlx4/qp.c 				 udata->inlen - sizeof(ucmd)))
ucmd             4288 drivers/infiniband/hw/mlx4/qp.c 	if (ib_copy_from_udata(&ucmd, udata, min(sizeof(ucmd), udata->inlen)))
ucmd             4291 drivers/infiniband/hw/mlx4/qp.c 	if (ucmd.comp_mask || ucmd.reserved)
ucmd              108 drivers/infiniband/hw/mlx4/srq.c 		struct mlx4_ib_create_srq ucmd;
ucmd              110 drivers/infiniband/hw/mlx4/srq.c 		if (ib_copy_from_udata(&ucmd, udata, sizeof(ucmd)))
ucmd              113 drivers/infiniband/hw/mlx4/srq.c 		srq->umem = ib_umem_get(udata, ucmd.buf_addr, buf_size, 0, 0);
ucmd              126 drivers/infiniband/hw/mlx4/srq.c 		err = mlx4_ib_db_map_user(udata, ucmd.db_addr, &srq->db);
ucmd              708 drivers/infiniband/hw/mlx5/cq.c 	struct mlx5_ib_create_cq ucmd = {};
ucmd              719 drivers/infiniband/hw/mlx5/cq.c 	ucmdlen = udata->inlen < sizeof(ucmd) ?
ucmd              720 drivers/infiniband/hw/mlx5/cq.c 		  (sizeof(ucmd) - sizeof(ucmd.flags)) : sizeof(ucmd);
ucmd              722 drivers/infiniband/hw/mlx5/cq.c 	if (ib_copy_from_udata(&ucmd, udata, ucmdlen))
ucmd              725 drivers/infiniband/hw/mlx5/cq.c 	if (ucmdlen == sizeof(ucmd) &&
ucmd              726 drivers/infiniband/hw/mlx5/cq.c 	    (ucmd.flags & ~(MLX5_IB_CREATE_CQ_FLAGS_CQE_128B_PAD)))
ucmd              729 drivers/infiniband/hw/mlx5/cq.c 	if (ucmd.cqe_size != 64 && ucmd.cqe_size != 128)
ucmd              732 drivers/infiniband/hw/mlx5/cq.c 	*cqe_size = ucmd.cqe_size;
ucmd              735 drivers/infiniband/hw/mlx5/cq.c 		ib_umem_get(udata, ucmd.buf_addr, entries * ucmd.cqe_size,
ucmd              742 drivers/infiniband/hw/mlx5/cq.c 	err = mlx5_ib_db_map_user(context, udata, ucmd.db_addr, &cq->db);
ucmd              746 drivers/infiniband/hw/mlx5/cq.c 	mlx5_ib_cont_pages(cq->buf.umem, ucmd.buf_addr, 0, &npages, &page_shift,
ucmd              749 drivers/infiniband/hw/mlx5/cq.c 		    ucmd.buf_addr, entries * ucmd.cqe_size, npages, page_shift, ncont);
ucmd              768 drivers/infiniband/hw/mlx5/cq.c 	if (ucmd.cqe_comp_en == 1) {
ucmd              783 drivers/infiniband/hw/mlx5/cq.c 						  ucmd.cqe_comp_res_format);
ucmd              787 drivers/infiniband/hw/mlx5/cq.c 				    ucmd.cqe_comp_res_format, err);
ucmd              795 drivers/infiniband/hw/mlx5/cq.c 	if (ucmd.flags & MLX5_IB_CREATE_CQ_FLAGS_CQE_128B_PAD) {
ucmd             1119 drivers/infiniband/hw/mlx5/cq.c 	struct mlx5_ib_resize_cq ucmd;
ucmd             1124 drivers/infiniband/hw/mlx5/cq.c 	err = ib_copy_from_udata(&ucmd, udata, sizeof(ucmd));
ucmd             1128 drivers/infiniband/hw/mlx5/cq.c 	if (ucmd.reserved0 || ucmd.reserved1)
ucmd             1132 drivers/infiniband/hw/mlx5/cq.c 	if (ucmd.cqe_size && SIZE_MAX / ucmd.cqe_size <= entries - 1)
ucmd             1135 drivers/infiniband/hw/mlx5/cq.c 	umem = ib_umem_get(udata, ucmd.buf_addr,
ucmd             1136 drivers/infiniband/hw/mlx5/cq.c 			   (size_t)ucmd.cqe_size * entries,
ucmd             1143 drivers/infiniband/hw/mlx5/cq.c 	mlx5_ib_cont_pages(umem, ucmd.buf_addr, 0, &npages, page_shift,
ucmd             1147 drivers/infiniband/hw/mlx5/cq.c 	*cqe_size = ucmd.cqe_size;
ucmd             3413 drivers/infiniband/hw/mlx5/main.c 				  struct mlx5_ib_create_flow *ucmd)
ucmd             3421 drivers/infiniband/hw/mlx5/main.c 	if (ucmd && ucmd->ncounters_data != 0) {
ucmd             3422 drivers/infiniband/hw/mlx5/main.c 		cntrs_data = ucmd->data;
ucmd             3519 drivers/infiniband/hw/mlx5/main.c 						      struct mlx5_ib_create_flow *ucmd)
ucmd             3591 drivers/infiniband/hw/mlx5/main.c 		err = flow_counters_set_data(flow_act.counters, ucmd);
ucmd             3794 drivers/infiniband/hw/mlx5/main.c 	struct mlx5_ib_create_flow *ucmd = NULL, ucmd_hdr;
ucmd             3821 drivers/infiniband/hw/mlx5/main.c 		ucmd = kzalloc(required_ucmd_sz, GFP_KERNEL);
ucmd             3822 drivers/infiniband/hw/mlx5/main.c 		if (!ucmd)
ucmd             3825 drivers/infiniband/hw/mlx5/main.c 		err = ib_copy_from_udata(ucmd, udata, required_ucmd_sz);
ucmd             3891 drivers/infiniband/hw/mlx5/main.c 						    dst, underlay_qpn, ucmd);
ucmd             3912 drivers/infiniband/hw/mlx5/main.c 	kfree(ucmd);
ucmd             3924 drivers/infiniband/hw/mlx5/main.c 	kfree(ucmd);
ucmd             1425 drivers/infiniband/hw/mlx5/mlx5_ib.h 				    struct mlx5_ib_create_qp *ucmd,
ucmd             1432 drivers/infiniband/hw/mlx5/mlx5_ib.h 	    !cqe_version && (ucmd->uidx == MLX5_IB_DEFAULT_UIDX))
ucmd             1439 drivers/infiniband/hw/mlx5/mlx5_ib.h 	return verify_assign_uidx(cqe_version, ucmd->uidx, user_index);
ucmd             1443 drivers/infiniband/hw/mlx5/mlx5_ib.h 				     struct mlx5_ib_create_srq *ucmd,
ucmd             1450 drivers/infiniband/hw/mlx5/mlx5_ib.h 	    !cqe_version && (ucmd->uidx == MLX5_IB_DEFAULT_UIDX))
ucmd             1457 drivers/infiniband/hw/mlx5/mlx5_ib.h 	return verify_assign_uidx(cqe_version, ucmd->uidx, user_index);
ucmd              331 drivers/infiniband/hw/mlx5/qp.c 		       int has_rq, struct mlx5_ib_qp *qp, struct mlx5_ib_create_qp *ucmd)
ucmd              347 drivers/infiniband/hw/mlx5/qp.c 		if (ucmd) {
ucmd              348 drivers/infiniband/hw/mlx5/qp.c 			qp->rq.wqe_cnt = ucmd->rq_wqe_count;
ucmd              349 drivers/infiniband/hw/mlx5/qp.c 			if (ucmd->rq_wqe_shift > BITS_PER_BYTE * sizeof(ucmd->rq_wqe_shift))
ucmd              351 drivers/infiniband/hw/mlx5/qp.c 			qp->rq.wqe_shift = ucmd->rq_wqe_shift;
ucmd              523 drivers/infiniband/hw/mlx5/qp.c 			    struct mlx5_ib_create_qp *ucmd,
ucmd              535 drivers/infiniband/hw/mlx5/qp.c 	if (ucmd->sq_wqe_count && !is_power_of_2(ucmd->sq_wqe_count)) {
ucmd              537 drivers/infiniband/hw/mlx5/qp.c 			     ucmd->sq_wqe_count);
ucmd              541 drivers/infiniband/hw/mlx5/qp.c 	qp->sq.wqe_cnt = ucmd->sq_wqe_count;
ucmd              796 drivers/infiniband/hw/mlx5/qp.c 			  struct mlx5_ib_create_wq *ucmd)
ucmd              806 drivers/infiniband/hw/mlx5/qp.c 	if (!ucmd->buf_addr)
ucmd              809 drivers/infiniband/hw/mlx5/qp.c 	rwq->umem = ib_umem_get(udata, ucmd->buf_addr, rwq->buf_size, 0, 0);
ucmd              816 drivers/infiniband/hw/mlx5/qp.c 	mlx5_ib_cont_pages(rwq->umem, ucmd->buf_addr, 0, &npages, &page_shift,
ucmd              818 drivers/infiniband/hw/mlx5/qp.c 	err = mlx5_ib_get_buf_offset(ucmd->buf_addr, page_shift,
ucmd              828 drivers/infiniband/hw/mlx5/qp.c 	rwq->wq_sig = !!(ucmd->flags & MLX5_WQ_FLAG_SIGNATURE);
ucmd              831 drivers/infiniband/hw/mlx5/qp.c 		    (unsigned long long)ucmd->buf_addr, rwq->buf_size,
ucmd              834 drivers/infiniband/hw/mlx5/qp.c 	err = mlx5_ib_db_map_user(ucontext, udata, ucmd->db_addr, &rwq->db);
ucmd              863 drivers/infiniband/hw/mlx5/qp.c 	struct mlx5_ib_create_qp ucmd;
ucmd              876 drivers/infiniband/hw/mlx5/qp.c 	err = ib_copy_from_udata(&ucmd, udata, sizeof(ucmd));
ucmd              884 drivers/infiniband/hw/mlx5/qp.c 	if (ucmd.flags & MLX5_QP_FLAG_BFREG_INDEX) {
ucmd              886 drivers/infiniband/hw/mlx5/qp.c 						ucmd.bfreg_index, true);
ucmd              913 drivers/infiniband/hw/mlx5/qp.c 	err = set_user_buf_size(dev, qp, &ucmd, base, attr);
ucmd              917 drivers/infiniband/hw/mlx5/qp.c 	if (ucmd.buf_addr && ubuffer->buf_size) {
ucmd              918 drivers/infiniband/hw/mlx5/qp.c 		ubuffer->buf_addr = ucmd.buf_addr;
ucmd              955 drivers/infiniband/hw/mlx5/qp.c 	err = mlx5_ib_db_map_user(context, udata, ucmd.db_addr, &qp->db);
ucmd             1605 drivers/infiniband/hw/mlx5/qp.c 	struct mlx5_ib_create_qp_rss ucmd = {};
ucmd             1619 drivers/infiniband/hw/mlx5/qp.c 	required_cmd_sz = offsetof(typeof(ucmd), flags) + sizeof(ucmd.flags);
ucmd             1625 drivers/infiniband/hw/mlx5/qp.c 	if (udata->inlen > sizeof(ucmd) &&
ucmd             1626 drivers/infiniband/hw/mlx5/qp.c 	    !ib_is_udata_cleared(udata, sizeof(ucmd),
ucmd             1627 drivers/infiniband/hw/mlx5/qp.c 				 udata->inlen - sizeof(ucmd))) {
ucmd             1632 drivers/infiniband/hw/mlx5/qp.c 	if (ib_copy_from_udata(&ucmd, udata, min(sizeof(ucmd), udata->inlen))) {
ucmd             1637 drivers/infiniband/hw/mlx5/qp.c 	if (ucmd.comp_mask) {
ucmd             1642 drivers/infiniband/hw/mlx5/qp.c 	if (ucmd.flags & ~(MLX5_QP_FLAG_TUNNEL_OFFLOADS |
ucmd             1649 drivers/infiniband/hw/mlx5/qp.c 	if (ucmd.flags & MLX5_QP_FLAG_TUNNEL_OFFLOADS &&
ucmd             1655 drivers/infiniband/hw/mlx5/qp.c 	if (ucmd.rx_hash_fields_mask & MLX5_RX_HASH_INNER &&
ucmd             1656 drivers/infiniband/hw/mlx5/qp.c 	    !(ucmd.flags & MLX5_QP_FLAG_TUNNEL_OFFLOADS)) {
ucmd             1661 drivers/infiniband/hw/mlx5/qp.c 	if (ucmd.flags & MLX5_QP_FLAG_TIR_ALLOW_SELF_LB_UC || dev->is_rep) {
ucmd             1666 drivers/infiniband/hw/mlx5/qp.c 	if (ucmd.flags & MLX5_QP_FLAG_TIR_ALLOW_SELF_LB_MC) {
ucmd             1694 drivers/infiniband/hw/mlx5/qp.c 	if (ucmd.flags & MLX5_QP_FLAG_TUNNEL_OFFLOADS)
ucmd             1699 drivers/infiniband/hw/mlx5/qp.c 	if (ucmd.rx_hash_fields_mask & MLX5_RX_HASH_INNER)
ucmd             1704 drivers/infiniband/hw/mlx5/qp.c 	switch (ucmd.rx_hash_function) {
ucmd             1710 drivers/infiniband/hw/mlx5/qp.c 		if (len != ucmd.rx_key_len) {
ucmd             1716 drivers/infiniband/hw/mlx5/qp.c 		memcpy(rss_key, ucmd.rx_hash_key, len);
ucmd             1724 drivers/infiniband/hw/mlx5/qp.c 	if (!ucmd.rx_hash_fields_mask) {
ucmd             1732 drivers/infiniband/hw/mlx5/qp.c 	if (((ucmd.rx_hash_fields_mask & MLX5_RX_HASH_SRC_IPV4) ||
ucmd             1733 drivers/infiniband/hw/mlx5/qp.c 	     (ucmd.rx_hash_fields_mask & MLX5_RX_HASH_DST_IPV4)) &&
ucmd             1734 drivers/infiniband/hw/mlx5/qp.c 	     ((ucmd.rx_hash_fields_mask & MLX5_RX_HASH_SRC_IPV6) ||
ucmd             1735 drivers/infiniband/hw/mlx5/qp.c 	     (ucmd.rx_hash_fields_mask & MLX5_RX_HASH_DST_IPV6))) {
ucmd             1741 drivers/infiniband/hw/mlx5/qp.c 	if ((ucmd.rx_hash_fields_mask & MLX5_RX_HASH_SRC_IPV4) ||
ucmd             1742 drivers/infiniband/hw/mlx5/qp.c 	    (ucmd.rx_hash_fields_mask & MLX5_RX_HASH_DST_IPV4))
ucmd             1745 drivers/infiniband/hw/mlx5/qp.c 	else if ((ucmd.rx_hash_fields_mask & MLX5_RX_HASH_SRC_IPV6) ||
ucmd             1746 drivers/infiniband/hw/mlx5/qp.c 		 (ucmd.rx_hash_fields_mask & MLX5_RX_HASH_DST_IPV6))
ucmd             1750 drivers/infiniband/hw/mlx5/qp.c 	outer_l4 = ((ucmd.rx_hash_fields_mask & MLX5_RX_HASH_SRC_PORT_TCP) ||
ucmd             1751 drivers/infiniband/hw/mlx5/qp.c 		    (ucmd.rx_hash_fields_mask & MLX5_RX_HASH_DST_PORT_TCP)) << 0 |
ucmd             1752 drivers/infiniband/hw/mlx5/qp.c 		   ((ucmd.rx_hash_fields_mask & MLX5_RX_HASH_SRC_PORT_UDP) ||
ucmd             1753 drivers/infiniband/hw/mlx5/qp.c 		    (ucmd.rx_hash_fields_mask & MLX5_RX_HASH_DST_PORT_UDP)) << 1 |
ucmd             1754 drivers/infiniband/hw/mlx5/qp.c 		   (ucmd.rx_hash_fields_mask & MLX5_RX_HASH_IPSEC_SPI) << 2;
ucmd             1763 drivers/infiniband/hw/mlx5/qp.c 	if ((ucmd.rx_hash_fields_mask & MLX5_RX_HASH_SRC_PORT_TCP) ||
ucmd             1764 drivers/infiniband/hw/mlx5/qp.c 	    (ucmd.rx_hash_fields_mask & MLX5_RX_HASH_DST_PORT_TCP))
ucmd             1767 drivers/infiniband/hw/mlx5/qp.c 	else if ((ucmd.rx_hash_fields_mask & MLX5_RX_HASH_SRC_PORT_UDP) ||
ucmd             1768 drivers/infiniband/hw/mlx5/qp.c 		 (ucmd.rx_hash_fields_mask & MLX5_RX_HASH_DST_PORT_UDP))
ucmd             1772 drivers/infiniband/hw/mlx5/qp.c 	if ((ucmd.rx_hash_fields_mask & MLX5_RX_HASH_SRC_IPV4) ||
ucmd             1773 drivers/infiniband/hw/mlx5/qp.c 	    (ucmd.rx_hash_fields_mask & MLX5_RX_HASH_SRC_IPV6))
ucmd             1776 drivers/infiniband/hw/mlx5/qp.c 	if ((ucmd.rx_hash_fields_mask & MLX5_RX_HASH_DST_IPV4) ||
ucmd             1777 drivers/infiniband/hw/mlx5/qp.c 	    (ucmd.rx_hash_fields_mask & MLX5_RX_HASH_DST_IPV6))
ucmd             1780 drivers/infiniband/hw/mlx5/qp.c 	if ((ucmd.rx_hash_fields_mask & MLX5_RX_HASH_SRC_PORT_TCP) ||
ucmd             1781 drivers/infiniband/hw/mlx5/qp.c 	    (ucmd.rx_hash_fields_mask & MLX5_RX_HASH_SRC_PORT_UDP))
ucmd             1784 drivers/infiniband/hw/mlx5/qp.c 	if ((ucmd.rx_hash_fields_mask & MLX5_RX_HASH_DST_PORT_TCP) ||
ucmd             1785 drivers/infiniband/hw/mlx5/qp.c 	    (ucmd.rx_hash_fields_mask & MLX5_RX_HASH_DST_PORT_UDP))
ucmd             1788 drivers/infiniband/hw/mlx5/qp.c 	if (ucmd.rx_hash_fields_mask & MLX5_RX_HASH_IPSEC_SPI)
ucmd             1866 drivers/infiniband/hw/mlx5/qp.c 					 struct mlx5_ib_create_qp *ucmd,
ucmd             1876 drivers/infiniband/hw/mlx5/qp.c 	if (ucmd)
ucmd             1877 drivers/infiniband/hw/mlx5/qp.c 		allow_scat_cqe = ucmd->flags & MLX5_QP_FLAG_ALLOW_SCATTER_CQE;
ucmd             1959 drivers/infiniband/hw/mlx5/qp.c 	struct mlx5_ib_create_qp ucmd;
ucmd             2039 drivers/infiniband/hw/mlx5/qp.c 		if (ib_copy_from_udata(&ucmd, udata, sizeof(ucmd))) {
ucmd             2044 drivers/infiniband/hw/mlx5/qp.c 		if (!check_flags_mask(ucmd.flags,
ucmd             2057 drivers/infiniband/hw/mlx5/qp.c 		err = get_qp_user_index(ucontext, &ucmd, udata->inlen, &uidx);
ucmd             2061 drivers/infiniband/hw/mlx5/qp.c 		qp->wq_sig = !!(ucmd.flags & MLX5_QP_FLAG_SIGNATURE);
ucmd             2063 drivers/infiniband/hw/mlx5/qp.c 			qp->scat_cqe = !!(ucmd.flags & MLX5_QP_FLAG_SCATTER_CQE);
ucmd             2064 drivers/infiniband/hw/mlx5/qp.c 		if (ucmd.flags & MLX5_QP_FLAG_TUNNEL_OFFLOADS) {
ucmd             2073 drivers/infiniband/hw/mlx5/qp.c 		if (ucmd.flags & MLX5_QP_FLAG_TIR_ALLOW_SELF_LB_UC) {
ucmd             2081 drivers/infiniband/hw/mlx5/qp.c 		if (ucmd.flags & MLX5_QP_FLAG_TIR_ALLOW_SELF_LB_MC) {
ucmd             2089 drivers/infiniband/hw/mlx5/qp.c 		if (ucmd.flags & MLX5_QP_FLAG_PACKET_BASED_CREDIT_MODE) {
ucmd             2121 drivers/infiniband/hw/mlx5/qp.c 			  qp, udata ? &ucmd : NULL);
ucmd             2131 drivers/infiniband/hw/mlx5/qp.c 			mlx5_ib_dbg(dev, "requested sq_wqe_count (%d)\n", ucmd.sq_wqe_count);
ucmd             2132 drivers/infiniband/hw/mlx5/qp.c 			if (ucmd.rq_wqe_shift != qp->rq.wqe_shift ||
ucmd             2133 drivers/infiniband/hw/mlx5/qp.c 			    ucmd.rq_wqe_count != qp->rq.wqe_cnt) {
ucmd             2137 drivers/infiniband/hw/mlx5/qp.c 			if (ucmd.sq_wqe_count > max_wqes) {
ucmd             2139 drivers/infiniband/hw/mlx5/qp.c 					    ucmd.sq_wqe_count, max_wqes);
ucmd             2199 drivers/infiniband/hw/mlx5/qp.c 					     udata ? &ucmd : NULL,
ucmd             2282 drivers/infiniband/hw/mlx5/qp.c 		qp->raw_packet_qp.sq.ubuffer.buf_addr = ucmd.sq_buf_addr;
ucmd             2542 drivers/infiniband/hw/mlx5/qp.c 					struct mlx5_ib_create_qp *ucmd,
ucmd             2555 drivers/infiniband/hw/mlx5/qp.c 	err = get_qp_user_index(ucontext, ucmd, sizeof(*ucmd), &uidx);
ucmd             2575 drivers/infiniband/hw/mlx5/qp.c 	MLX5_SET64(dctc, dctc, dc_access_key, ucmd->access_key);
ucmd             2578 drivers/infiniband/hw/mlx5/qp.c 	if (ucmd->flags & MLX5_QP_FLAG_SCATTER_CQE)
ucmd             2591 drivers/infiniband/hw/mlx5/qp.c 			   struct mlx5_ib_create_qp *ucmd,
ucmd             2600 drivers/infiniband/hw/mlx5/qp.c 	if (udata->inlen < sizeof(*ucmd)) {
ucmd             2604 drivers/infiniband/hw/mlx5/qp.c 	err = ib_copy_from_udata(ucmd, udata, sizeof(*ucmd));
ucmd             2608 drivers/infiniband/hw/mlx5/qp.c 	if ((ucmd->flags & MLX_QP_FLAGS) == MLX5_QP_FLAG_TYPE_DCI) {
ucmd             2611 drivers/infiniband/hw/mlx5/qp.c 		if ((ucmd->flags & MLX_QP_FLAGS) == MLX5_QP_FLAG_TYPE_DCT) {
ucmd             2664 drivers/infiniband/hw/mlx5/qp.c 		struct mlx5_ib_create_qp ucmd;
ucmd             2668 drivers/infiniband/hw/mlx5/qp.c 		err = set_mlx_qp_type(dev, init_attr, &ucmd, udata);
ucmd             2679 drivers/infiniband/hw/mlx5/qp.c 			return mlx5_ib_create_dct(pd, init_attr, &ucmd, udata);
ucmd             3412 drivers/infiniband/hw/mlx5/qp.c 			       const struct mlx5_ib_modify_qp *ucmd,
ucmd             3665 drivers/infiniband/hw/mlx5/qp.c 			if (ucmd->burst_info.max_burst_sz) {
ucmd             3669 drivers/infiniband/hw/mlx5/qp.c 						ucmd->burst_info.max_burst_sz;
ucmd             3676 drivers/infiniband/hw/mlx5/qp.c 			if (ucmd->burst_info.typical_pkt_sz) {
ucmd             3680 drivers/infiniband/hw/mlx5/qp.c 						ucmd->burst_info.typical_pkt_sz;
ucmd             3894 drivers/infiniband/hw/mlx5/qp.c 	struct mlx5_ib_modify_qp ucmd = {};
ucmd             3905 drivers/infiniband/hw/mlx5/qp.c 		required_cmd_sz = offsetof(typeof(ucmd), reserved) +
ucmd             3906 drivers/infiniband/hw/mlx5/qp.c 			sizeof(ucmd.reserved);
ucmd             3910 drivers/infiniband/hw/mlx5/qp.c 		if (udata->inlen > sizeof(ucmd) &&
ucmd             3911 drivers/infiniband/hw/mlx5/qp.c 		    !ib_is_udata_cleared(udata, sizeof(ucmd),
ucmd             3912 drivers/infiniband/hw/mlx5/qp.c 					 udata->inlen - sizeof(ucmd)))
ucmd             3915 drivers/infiniband/hw/mlx5/qp.c 		if (ib_copy_from_udata(&ucmd, udata,
ucmd             3916 drivers/infiniband/hw/mlx5/qp.c 				       min(udata->inlen, sizeof(ucmd))))
ucmd             3919 drivers/infiniband/hw/mlx5/qp.c 		if (ucmd.comp_mask ||
ucmd             3920 drivers/infiniband/hw/mlx5/qp.c 		    memchr_inv(&ucmd.reserved, 0, sizeof(ucmd.reserved)) ||
ucmd             3921 drivers/infiniband/hw/mlx5/qp.c 		    memchr_inv(&ucmd.burst_info.reserved, 0,
ucmd             3922 drivers/infiniband/hw/mlx5/qp.c 			       sizeof(ucmd.burst_info.reserved)))
ucmd             4007 drivers/infiniband/hw/mlx5/qp.c 				  new_state, &ucmd, udata);
ucmd             6020 drivers/infiniband/hw/mlx5/qp.c 			    struct mlx5_ib_create_wq *ucmd,
ucmd             6027 drivers/infiniband/hw/mlx5/qp.c 	if (!ucmd->rq_wqe_count)
ucmd             6030 drivers/infiniband/hw/mlx5/qp.c 	rwq->wqe_count = ucmd->rq_wqe_count;
ucmd             6031 drivers/infiniband/hw/mlx5/qp.c 	rwq->wqe_shift = ucmd->rq_wqe_shift;
ucmd             6046 drivers/infiniband/hw/mlx5/qp.c 	struct mlx5_ib_create_wq ucmd = {};
ucmd             6050 drivers/infiniband/hw/mlx5/qp.c 	required_cmd_sz = offsetof(typeof(ucmd), single_stride_log_num_of_bytes)
ucmd             6051 drivers/infiniband/hw/mlx5/qp.c 		+ sizeof(ucmd.single_stride_log_num_of_bytes);
ucmd             6057 drivers/infiniband/hw/mlx5/qp.c 	if (udata->inlen > sizeof(ucmd) &&
ucmd             6058 drivers/infiniband/hw/mlx5/qp.c 	    !ib_is_udata_cleared(udata, sizeof(ucmd),
ucmd             6059 drivers/infiniband/hw/mlx5/qp.c 				 udata->inlen - sizeof(ucmd))) {
ucmd             6064 drivers/infiniband/hw/mlx5/qp.c 	if (ib_copy_from_udata(&ucmd, udata, min(sizeof(ucmd), udata->inlen))) {
ucmd             6069 drivers/infiniband/hw/mlx5/qp.c 	if (ucmd.comp_mask & (~MLX5_IB_CREATE_WQ_STRIDING_RQ)) {
ucmd             6072 drivers/infiniband/hw/mlx5/qp.c 	} else if (ucmd.comp_mask & MLX5_IB_CREATE_WQ_STRIDING_RQ) {
ucmd             6077 drivers/infiniband/hw/mlx5/qp.c 		if ((ucmd.single_stride_log_num_of_bytes <
ucmd             6079 drivers/infiniband/hw/mlx5/qp.c 		    (ucmd.single_stride_log_num_of_bytes >
ucmd             6082 drivers/infiniband/hw/mlx5/qp.c 				    ucmd.single_stride_log_num_of_bytes,
ucmd             6087 drivers/infiniband/hw/mlx5/qp.c 		if ((ucmd.single_wqe_log_num_of_strides >
ucmd             6089 drivers/infiniband/hw/mlx5/qp.c 		     (ucmd.single_wqe_log_num_of_strides <
ucmd             6092 drivers/infiniband/hw/mlx5/qp.c 				    ucmd.single_wqe_log_num_of_strides,
ucmd             6098 drivers/infiniband/hw/mlx5/qp.c 			ucmd.single_stride_log_num_of_bytes;
ucmd             6099 drivers/infiniband/hw/mlx5/qp.c 		rwq->log_num_strides = ucmd.single_wqe_log_num_of_strides;
ucmd             6100 drivers/infiniband/hw/mlx5/qp.c 		rwq->two_byte_shift_en = !!ucmd.two_byte_shift_en;
ucmd             6104 drivers/infiniband/hw/mlx5/qp.c 	err = set_user_rq_size(dev, init_attr, &ucmd, rwq);
ucmd             6110 drivers/infiniband/hw/mlx5/qp.c 	err = create_user_rq(dev, pd, udata, rwq, &ucmd);
ucmd             6116 drivers/infiniband/hw/mlx5/qp.c 	rwq->user_index = ucmd.user_index;
ucmd             6287 drivers/infiniband/hw/mlx5/qp.c 	struct mlx5_ib_modify_wq ucmd = {};
ucmd             6296 drivers/infiniband/hw/mlx5/qp.c 	required_cmd_sz = offsetof(typeof(ucmd), reserved) + sizeof(ucmd.reserved);
ucmd             6300 drivers/infiniband/hw/mlx5/qp.c 	if (udata->inlen > sizeof(ucmd) &&
ucmd             6301 drivers/infiniband/hw/mlx5/qp.c 	    !ib_is_udata_cleared(udata, sizeof(ucmd),
ucmd             6302 drivers/infiniband/hw/mlx5/qp.c 				 udata->inlen - sizeof(ucmd)))
ucmd             6305 drivers/infiniband/hw/mlx5/qp.c 	if (ib_copy_from_udata(&ucmd, udata, min(sizeof(ucmd), udata->inlen)))
ucmd             6308 drivers/infiniband/hw/mlx5/qp.c 	if (ucmd.comp_mask || ucmd.reserved)
ucmd               49 drivers/infiniband/hw/mlx5/srq.c 	struct mlx5_ib_create_srq ucmd = {};
ucmd               60 drivers/infiniband/hw/mlx5/srq.c 	ucmdlen = min(udata->inlen, sizeof(ucmd));
ucmd               62 drivers/infiniband/hw/mlx5/srq.c 	if (ib_copy_from_udata(&ucmd, udata, ucmdlen)) {
ucmd               67 drivers/infiniband/hw/mlx5/srq.c 	if (ucmd.reserved0 || ucmd.reserved1)
ucmd               70 drivers/infiniband/hw/mlx5/srq.c 	if (udata->inlen > sizeof(ucmd) &&
ucmd               71 drivers/infiniband/hw/mlx5/srq.c 	    !ib_is_udata_cleared(udata, sizeof(ucmd),
ucmd               72 drivers/infiniband/hw/mlx5/srq.c 				 udata->inlen - sizeof(ucmd)))
ucmd               76 drivers/infiniband/hw/mlx5/srq.c 		err = get_srq_user_index(ucontext, &ucmd, udata->inlen, &uidx);
ucmd               81 drivers/infiniband/hw/mlx5/srq.c 	srq->wq_sig = !!(ucmd.flags & MLX5_SRQ_FLAG_SIGNATURE);
ucmd               83 drivers/infiniband/hw/mlx5/srq.c 	srq->umem = ib_umem_get(udata, ucmd.buf_addr, buf_size, 0, 0);
ucmd               90 drivers/infiniband/hw/mlx5/srq.c 	mlx5_ib_cont_pages(srq->umem, ucmd.buf_addr, 0, &npages,
ucmd               92 drivers/infiniband/hw/mlx5/srq.c 	err = mlx5_ib_get_buf_offset(ucmd.buf_addr, page_shift,
ucmd              107 drivers/infiniband/hw/mlx5/srq.c 	err = mlx5_ib_db_map_user(ucontext, udata, ucmd.db_addr, &srq->db);
ucmd              410 drivers/infiniband/hw/mthca/mthca_provider.c 	struct mthca_create_srq ucmd;
ucmd              420 drivers/infiniband/hw/mthca/mthca_provider.c 		if (ib_copy_from_udata(&ucmd, udata, sizeof(ucmd)))
ucmd              424 drivers/infiniband/hw/mthca/mthca_provider.c 					context->db_tab, ucmd.db_index,
ucmd              425 drivers/infiniband/hw/mthca/mthca_provider.c 					ucmd.db_page);
ucmd              430 drivers/infiniband/hw/mthca/mthca_provider.c 		srq->mr.ibmr.lkey = ucmd.lkey;
ucmd              431 drivers/infiniband/hw/mthca/mthca_provider.c 		srq->db_index     = ucmd.db_index;
ucmd              439 drivers/infiniband/hw/mthca/mthca_provider.c 				    context->db_tab, ucmd.db_index);
ucmd              474 drivers/infiniband/hw/mthca/mthca_provider.c 	struct mthca_create_qp ucmd;
ucmd              491 drivers/infiniband/hw/mthca/mthca_provider.c 			if (ib_copy_from_udata(&ucmd, udata, sizeof ucmd)) {
ucmd              498 drivers/infiniband/hw/mthca/mthca_provider.c 						ucmd.sq_db_index, ucmd.sq_db_page);
ucmd              506 drivers/infiniband/hw/mthca/mthca_provider.c 						ucmd.rq_db_index, ucmd.rq_db_page);
ucmd              511 drivers/infiniband/hw/mthca/mthca_provider.c 						    ucmd.sq_db_index);
ucmd              516 drivers/infiniband/hw/mthca/mthca_provider.c 			qp->mr.ibmr.lkey = ucmd.lkey;
ucmd              517 drivers/infiniband/hw/mthca/mthca_provider.c 			qp->sq.db_index  = ucmd.sq_db_index;
ucmd              518 drivers/infiniband/hw/mthca/mthca_provider.c 			qp->rq.db_index  = ucmd.rq_db_index;
ucmd              531 drivers/infiniband/hw/mthca/mthca_provider.c 					    ucmd.sq_db_index);
ucmd              535 drivers/infiniband/hw/mthca/mthca_provider.c 					    ucmd.rq_db_index);
ucmd              610 drivers/infiniband/hw/mthca/mthca_provider.c 	struct mthca_create_cq ucmd;
ucmd              624 drivers/infiniband/hw/mthca/mthca_provider.c 		if (ib_copy_from_udata(&ucmd, udata, sizeof(ucmd)))
ucmd              628 drivers/infiniband/hw/mthca/mthca_provider.c 					context->db_tab, ucmd.set_db_index,
ucmd              629 drivers/infiniband/hw/mthca/mthca_provider.c 					ucmd.set_db_page);
ucmd              634 drivers/infiniband/hw/mthca/mthca_provider.c 					context->db_tab, ucmd.arm_db_index,
ucmd              635 drivers/infiniband/hw/mthca/mthca_provider.c 					ucmd.arm_db_page);
ucmd              643 drivers/infiniband/hw/mthca/mthca_provider.c 		cq->buf.mr.ibmr.lkey = ucmd.lkey;
ucmd              644 drivers/infiniband/hw/mthca/mthca_provider.c 		cq->set_ci_db_index  = ucmd.set_db_index;
ucmd              645 drivers/infiniband/hw/mthca/mthca_provider.c 		cq->arm_db_index     = ucmd.arm_db_index;
ucmd              652 drivers/infiniband/hw/mthca/mthca_provider.c 			    udata ? ucmd.pdn : to_mdev(ibdev)->driver_pd.pd_num,
ucmd              670 drivers/infiniband/hw/mthca/mthca_provider.c 				    context->db_tab, ucmd.arm_db_index);
ucmd              675 drivers/infiniband/hw/mthca/mthca_provider.c 				    context->db_tab, ucmd.set_db_index);
ucmd              729 drivers/infiniband/hw/mthca/mthca_provider.c 	struct mthca_resize_cq ucmd;
ucmd              750 drivers/infiniband/hw/mthca/mthca_provider.c 		if (ib_copy_from_udata(&ucmd, udata, sizeof ucmd)) {
ucmd              754 drivers/infiniband/hw/mthca/mthca_provider.c 		lkey = ucmd.lkey;
ucmd              862 drivers/infiniband/hw/mthca/mthca_provider.c 	struct mthca_reg_mr ucmd;
ucmd              868 drivers/infiniband/hw/mthca/mthca_provider.c 	if (udata->inlen < sizeof ucmd) {
ucmd              875 drivers/infiniband/hw/mthca/mthca_provider.c 		ucmd.mr_attrs = 0;
ucmd              876 drivers/infiniband/hw/mthca/mthca_provider.c 	} else if (ib_copy_from_udata(&ucmd, udata, sizeof ucmd))
ucmd              884 drivers/infiniband/hw/mthca/mthca_provider.c 			       ucmd.mr_attrs & MTHCA_MR_DMASYNC);
ucmd             2036 drivers/infiniband/hw/qib/qib_file_ops.c 	const struct qib_cmd __user *ucmd;
ucmd             2055 drivers/infiniband/hw/qib/qib_file_ops.c 	ucmd = (const struct qib_cmd __user *) data;
ucmd             2057 drivers/infiniband/hw/qib/qib_file_ops.c 	if (copy_from_user(&cmd.type, &ucmd->type, sizeof(cmd.type))) {
ucmd             2069 drivers/infiniband/hw/qib/qib_file_ops.c 		src = &ucmd->cmd.user_info;
ucmd             2075 drivers/infiniband/hw/qib/qib_file_ops.c 		src = &ucmd->cmd.recv_ctrl;
ucmd             2081 drivers/infiniband/hw/qib/qib_file_ops.c 		src = &ucmd->cmd.ctxt_info;
ucmd             2088 drivers/infiniband/hw/qib/qib_file_ops.c 		src = &ucmd->cmd.tid_info;
ucmd             2094 drivers/infiniband/hw/qib/qib_file_ops.c 		src = &ucmd->cmd.part_key;
ucmd             2107 drivers/infiniband/hw/qib/qib_file_ops.c 		src = &ucmd->cmd.poll_type;
ucmd             2113 drivers/infiniband/hw/qib/qib_file_ops.c 		src = &ucmd->cmd.armlaunch_ctrl;
ucmd             2119 drivers/infiniband/hw/qib/qib_file_ops.c 		src = &ucmd->cmd.sdma_inflight;
ucmd             2125 drivers/infiniband/hw/qib/qib_file_ops.c 		src = &ucmd->cmd.sdma_complete;
ucmd             2131 drivers/infiniband/hw/qib/qib_file_ops.c 		src = &ucmd->cmd.event_mask;
ucmd              116 drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c 	struct pvrdma_create_cq ucmd;
ucmd              133 drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c 		if (ib_copy_from_udata(&ucmd, udata, sizeof(ucmd))) {
ucmd              138 drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c 		cq->umem = ib_umem_get(udata, ucmd.buf_addr, ucmd.buf_size,
ucmd              198 drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c 	struct pvrdma_create_qp ucmd;
ucmd              258 drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c 			if (ib_copy_from_udata(&ucmd, udata, sizeof(ucmd))) {
ucmd              265 drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c 				qp->rumem = ib_umem_get(udata, ucmd.rbuf_addr,
ucmd              266 drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c 							ucmd.rbuf_size, 0, 0);
ucmd              277 drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c 			qp->sumem = ib_umem_get(udata, ucmd.sbuf_addr,
ucmd              278 drivers/infiniband/hw/vmw_pvrdma/pvrdma_qp.c 						ucmd.sbuf_size, 0, 0);
ucmd              109 drivers/infiniband/hw/vmw_pvrdma/pvrdma_srq.c 	struct pvrdma_create_srq ucmd;
ucmd              144 drivers/infiniband/hw/vmw_pvrdma/pvrdma_srq.c 	if (ib_copy_from_udata(&ucmd, udata, sizeof(ucmd))) {
ucmd              149 drivers/infiniband/hw/vmw_pvrdma/pvrdma_srq.c 	srq->umem = ib_umem_get(udata, ucmd.buf_addr, ucmd.buf_size, 0, 0);
ucmd              231 drivers/infiniband/sw/rxe/rxe_loc.h 		      struct rxe_modify_srq_cmd *ucmd, struct ib_udata *udata);
ucmd              151 drivers/infiniband/sw/rxe/rxe_srq.c 		      struct rxe_modify_srq_cmd *ucmd, struct ib_udata *udata)
ucmd              162 drivers/infiniband/sw/rxe/rxe_srq.c 		mi = u64_to_user_ptr(ucmd->mmap_info_addr);
ucmd              338 drivers/infiniband/sw/rxe/rxe_verbs.c 	struct rxe_modify_srq_cmd ucmd = {};
ucmd              341 drivers/infiniband/sw/rxe/rxe_verbs.c 		if (udata->inlen < sizeof(ucmd))
ucmd              344 drivers/infiniband/sw/rxe/rxe_verbs.c 		err = ib_copy_from_udata(&ucmd, udata, sizeof(ucmd));
ucmd              353 drivers/infiniband/sw/rxe/rxe_verbs.c 	err = rxe_srq_from_attr(rxe, srq, attr, mask, &ucmd, udata);
ucmd             1401 drivers/nvme/host/core.c 			struct nvme_passthru_cmd __user *ucmd)
ucmd             1412 drivers/nvme/host/core.c 	if (copy_from_user(&cmd, ucmd, sizeof(cmd)))
ucmd             1441 drivers/nvme/host/core.c 		if (put_user(result, &ucmd->result))
ucmd             1449 drivers/nvme/host/core.c 			struct nvme_passthru_cmd64 __user *ucmd)
ucmd             1459 drivers/nvme/host/core.c 	if (copy_from_user(&cmd, ucmd, sizeof(cmd)))
ucmd             1488 drivers/nvme/host/core.c 		if (put_user(cmd.result, &ucmd->result))
ucmd              129 drivers/scsi/sym53c8xx_2/sym_glue.c 	struct sym_ucmd *ucmd = SYM_UCMD_PTR(cmd);
ucmd              132 drivers/scsi/sym53c8xx_2/sym_glue.c 	if (ucmd->eh_done)
ucmd              133 drivers/scsi/sym53c8xx_2/sym_glue.c 		complete(ucmd->eh_done);
ucmd              580 drivers/scsi/sym53c8xx_2/sym_glue.c 	struct sym_ucmd *ucmd = SYM_UCMD_PTR(cmd);
ucmd              660 drivers/scsi/sym53c8xx_2/sym_glue.c 		ucmd->eh_done = &eh_done;
ucmd              663 drivers/scsi/sym53c8xx_2/sym_glue.c 			ucmd->eh_done = NULL;