qps                44 drivers/gpu/drm/amd/display/dc/dsc/rc_calc.c void get_qp_set(qp_set qps, enum colour_mode cm, enum bits_per_comp bpc, enum max_min max_min, float bpp)
qps                86 drivers/gpu/drm/amd/display/dc/dsc/rc_calc.c 	memcpy(qps, table[index].qps, sizeof(qp_set));
qps                76 drivers/gpu/drm/amd/display/dc/dsc/rc_calc.h 	const qp_set  qps;
qps                70 drivers/infiniband/hw/cxgb3/iwch.c 	xa_lock_irq(&rnicp->qps);
qps                71 drivers/infiniband/hw/cxgb3/iwch.c 	xa_for_each(&rnicp->qps, index, qhp)
qps                73 drivers/infiniband/hw/cxgb3/iwch.c 	xa_unlock_irq(&rnicp->qps);
qps                81 drivers/infiniband/hw/cxgb3/iwch.c 	xa_lock_irq(&rnicp->qps);
qps                82 drivers/infiniband/hw/cxgb3/iwch.c 	xa_for_each(&rnicp->qps, index, qhp) {
qps                88 drivers/infiniband/hw/cxgb3/iwch.c 	xa_unlock_irq(&rnicp->qps);
qps               102 drivers/infiniband/hw/cxgb3/iwch.c 	xa_init_flags(&rnicp->qps, XA_FLAGS_LOCK_IRQ);
qps               186 drivers/infiniband/hw/cxgb3/iwch.c 			WARN_ON(!xa_empty(&dev->qps));
qps               110 drivers/infiniband/hw/cxgb3/iwch.h 	struct xarray qps;
qps               143 drivers/infiniband/hw/cxgb3/iwch.h 	return xa_load(&rhp->qps, qpid);
qps                51 drivers/infiniband/hw/cxgb3/iwch_ev.c 	xa_lock(&rnicp->qps);
qps                52 drivers/infiniband/hw/cxgb3/iwch_ev.c 	qhp = xa_load(&rnicp->qps, CQE_QPID(rsp_msg->cqe));
qps                58 drivers/infiniband/hw/cxgb3/iwch_ev.c 		xa_unlock(&rnicp->qps);
qps                68 drivers/infiniband/hw/cxgb3/iwch_ev.c 		xa_unlock(&rnicp->qps);
qps                79 drivers/infiniband/hw/cxgb3/iwch_ev.c 	xa_unlock(&rnicp->qps);
qps               117 drivers/infiniband/hw/cxgb3/iwch_ev.c 	xa_lock(&rnicp->qps);
qps               119 drivers/infiniband/hw/cxgb3/iwch_ev.c 	qhp = xa_load(&rnicp->qps, CQE_QPID(rsp_msg->cqe));
qps               126 drivers/infiniband/hw/cxgb3/iwch_ev.c 		xa_unlock(&rnicp->qps);
qps               131 drivers/infiniband/hw/cxgb3/iwch_ev.c 	xa_unlock(&rnicp->qps);
qps               679 drivers/infiniband/hw/cxgb3/iwch_provider.c 	xa_erase_irq(&rhp->qps, qhp->wq.qpid);
qps               795 drivers/infiniband/hw/cxgb3/iwch_provider.c 	if (xa_store_irq(&rhp->qps, qhp->wq.qpid, qhp, GFP_KERNEL)) {
qps               344 drivers/infiniband/hw/cxgb4/device.c 	xa_for_each(&qpd->devp->qps, index, qp)
qps               354 drivers/infiniband/hw/cxgb4/device.c 	xa_lock_irq(&qpd->devp->qps);
qps               355 drivers/infiniband/hw/cxgb4/device.c 	xa_for_each(&qpd->devp->qps, index, qp)
qps               357 drivers/infiniband/hw/cxgb4/device.c 	xa_unlock_irq(&qpd->devp->qps);
qps               940 drivers/infiniband/hw/cxgb4/device.c 	WARN_ON(!xa_empty(&ctx->dev->qps));
qps              1049 drivers/infiniband/hw/cxgb4/device.c 	xa_init_flags(&devp->qps, XA_FLAGS_LOCK_IRQ);
qps              1272 drivers/infiniband/hw/cxgb4/device.c 	xa_lock_irqsave(&ctx->dev->qps, flags);
qps              1276 drivers/infiniband/hw/cxgb4/device.c 		xa_for_each(&ctx->dev->qps, index, qp)
qps              1281 drivers/infiniband/hw/cxgb4/device.c 	xa_unlock_irqrestore(&ctx->dev->qps, flags);
qps              1311 drivers/infiniband/hw/cxgb4/device.c 	xa_lock_irq(&ctx->dev->qps);
qps              1324 drivers/infiniband/hw/cxgb4/device.c 				xa_for_each(&ctx->dev->qps, index, qp)
qps              1337 drivers/infiniband/hw/cxgb4/device.c 				xa_unlock_irq(&ctx->dev->qps);
qps              1342 drivers/infiniband/hw/cxgb4/device.c 				xa_lock_irq(&ctx->dev->qps);
qps              1351 drivers/infiniband/hw/cxgb4/device.c 	xa_unlock_irq(&ctx->dev->qps);
qps              1356 drivers/infiniband/hw/cxgb4/device.c 	struct c4iw_qp **qps;
qps              1364 drivers/infiniband/hw/cxgb4/device.c 		c4iw_qp_rem_ref(&qp_list->qps[idx]->ibqp);
qps              1373 drivers/infiniband/hw/cxgb4/device.c 		struct c4iw_qp *qp = qp_list->qps[idx];
qps              1375 drivers/infiniband/hw/cxgb4/device.c 		xa_lock_irq(&qp->rhp->qps);
qps              1385 drivers/infiniband/hw/cxgb4/device.c 			xa_unlock_irq(&qp->rhp->qps);
qps              1399 drivers/infiniband/hw/cxgb4/device.c 			xa_unlock_irq(&qp->rhp->qps);
qps              1404 drivers/infiniband/hw/cxgb4/device.c 		xa_unlock_irq(&qp->rhp->qps);
qps              1435 drivers/infiniband/hw/cxgb4/device.c 	xa_lock_irq(&ctx->dev->qps);
qps              1438 drivers/infiniband/hw/cxgb4/device.c 	xa_for_each(&ctx->dev->qps, index, qp)
qps              1441 drivers/infiniband/hw/cxgb4/device.c 	qp_list.qps = kcalloc(count, sizeof(*qp_list.qps), GFP_ATOMIC);
qps              1442 drivers/infiniband/hw/cxgb4/device.c 	if (!qp_list.qps) {
qps              1443 drivers/infiniband/hw/cxgb4/device.c 		xa_unlock_irq(&ctx->dev->qps);
qps              1449 drivers/infiniband/hw/cxgb4/device.c 	xa_for_each(&ctx->dev->qps, index, qp) {
qps              1451 drivers/infiniband/hw/cxgb4/device.c 		qp_list.qps[qp_list.idx++] = qp;
qps              1454 drivers/infiniband/hw/cxgb4/device.c 	xa_unlock_irq(&ctx->dev->qps);
qps              1461 drivers/infiniband/hw/cxgb4/device.c 	kfree(qp_list.qps);
qps              1463 drivers/infiniband/hw/cxgb4/device.c 	xa_lock_irq(&ctx->dev->qps);
qps              1466 drivers/infiniband/hw/cxgb4/device.c 	xa_unlock_irq(&ctx->dev->qps);
qps               126 drivers/infiniband/hw/cxgb4/ev.c 	xa_lock_irq(&dev->qps);
qps               127 drivers/infiniband/hw/cxgb4/ev.c 	qhp = xa_load(&dev->qps, CQE_QPID(err_cqe));
qps               134 drivers/infiniband/hw/cxgb4/ev.c 		xa_unlock_irq(&dev->qps);
qps               149 drivers/infiniband/hw/cxgb4/ev.c 		xa_unlock_irq(&dev->qps);
qps               155 drivers/infiniband/hw/cxgb4/ev.c 	xa_unlock_irq(&dev->qps);
qps               319 drivers/infiniband/hw/cxgb4/iw_cxgb4.h 	struct xarray qps;
qps               356 drivers/infiniband/hw/cxgb4/iw_cxgb4.h 	return xa_load(&rhp->qps, qpid);
qps                66 drivers/infiniband/hw/cxgb4/qp.c 	xa_lock_irq(&dev->qps);
qps                71 drivers/infiniband/hw/cxgb4/qp.c 	xa_unlock_irq(&dev->qps);
qps                82 drivers/infiniband/hw/cxgb4/qp.c 	xa_lock_irq(&dev->qps);
qps                84 drivers/infiniband/hw/cxgb4/qp.c 	xa_unlock_irq(&dev->qps);
qps               915 drivers/infiniband/hw/cxgb4/qp.c 	xa_lock_irqsave(&qhp->rhp->qps, flags);
qps               924 drivers/infiniband/hw/cxgb4/qp.c 	xa_unlock_irqrestore(&qhp->rhp->qps, flags);
qps               932 drivers/infiniband/hw/cxgb4/qp.c 	xa_lock_irqsave(&qhp->rhp->qps, flags);
qps               941 drivers/infiniband/hw/cxgb4/qp.c 	xa_unlock_irqrestore(&qhp->rhp->qps, flags);
qps              2088 drivers/infiniband/hw/cxgb4/qp.c 	xa_lock_irq(&rhp->qps);
qps              2089 drivers/infiniband/hw/cxgb4/qp.c 	__xa_erase(&rhp->qps, qhp->wq.sq.qid);
qps              2092 drivers/infiniband/hw/cxgb4/qp.c 	xa_unlock_irq(&rhp->qps);
qps              2220 drivers/infiniband/hw/cxgb4/qp.c 	ret = xa_insert_irq(&rhp->qps, qhp->wq.sq.qid, qhp, GFP_KERNEL);
qps              2356 drivers/infiniband/hw/cxgb4/qp.c 	xa_erase_irq(&rhp->qps, qhp->wq.sq.qid);
qps              1613 drivers/infiniband/hw/hfi1/pio.c 	struct rvt_qp *qps[PIO_WAIT_BATCH_SIZE];
qps              1633 drivers/infiniband/hw/hfi1/pio.c 		if (n == ARRAY_SIZE(qps))
qps              1642 drivers/infiniband/hw/hfi1/pio.c 			priv = qps[top_idx]->priv;
qps              1649 drivers/infiniband/hw/hfi1/pio.c 		qps[n++] = qp;
qps              1664 drivers/infiniband/hw/hfi1/pio.c 		hfi1_qp_wakeup(qps[top_idx],
qps              1668 drivers/infiniband/hw/hfi1/pio.c 			hfi1_qp_wakeup(qps[i],
qps              1695 drivers/infiniband/hw/mlx5/main.c 		dev->lb.qps++;
qps              1698 drivers/infiniband/hw/mlx5/main.c 	    dev->lb.qps == 1) {
qps              1716 drivers/infiniband/hw/mlx5/main.c 		dev->lb.qps--;
qps              1719 drivers/infiniband/hw/mlx5/main.c 	    dev->lb.qps == 0) {
qps               932 drivers/infiniband/hw/mlx5/mlx5_ib.h 	int			qps;
qps               151 drivers/infiniband/hw/ocrdma/ocrdma_hw.c enum ib_qp_state get_ibqp_state(enum ocrdma_qp_state qps)
qps               153 drivers/infiniband/hw/ocrdma/ocrdma_hw.c 	switch (qps) {
qps               173 drivers/infiniband/hw/ocrdma/ocrdma_hw.c static enum ocrdma_qp_state get_ocrdma_qp_state(enum ib_qp_state qps)
qps               175 drivers/infiniband/hw/ocrdma/ocrdma_hw.c 	switch (qps) {
qps               104 drivers/infiniband/hw/ocrdma/ocrdma_hw.h enum ib_qp_state get_ibqp_state(enum ocrdma_qp_state qps);
qps               363 drivers/infiniband/hw/qedr/main.c 		xa_init(&dev->qps);
qps               172 drivers/infiniband/hw/qedr/qedr.h 	struct xarray		qps;
qps               513 drivers/infiniband/hw/qedr/qedr_iw_cm.c 	xa_lock(&dev->qps);
qps               514 drivers/infiniband/hw/qedr/qedr_iw_cm.c 	qp = xa_load(&dev->qps, qpn);
qps               517 drivers/infiniband/hw/qedr/qedr_iw_cm.c 	xa_unlock(&dev->qps);
qps               803 drivers/infiniband/hw/qedr/qedr_iw_cm.c 	return xa_load(&dev->qps, qpn);
qps              1935 drivers/infiniband/hw/qedr/verbs.c 		rc = xa_insert(&dev->qps, qp->qp_id, qp, GFP_KERNEL);
qps              2515 drivers/infiniband/hw/qedr/verbs.c 		xa_erase(&dev->qps, qp->qp_id);
qps               672 drivers/infiniband/hw/qib/qib_verbs.c 	struct rvt_qp *qps[20];
qps               685 drivers/infiniband/hw/qib/qib_verbs.c 		if (n == ARRAY_SIZE(qps))
qps               692 drivers/infiniband/hw/qib/qib_verbs.c 		qps[n++] = qp;
qps               698 drivers/infiniband/hw/qib/qib_verbs.c 		qp = qps[i];
qps              1148 drivers/infiniband/hw/qib/qib_verbs.c 	struct rvt_qp *qps[5];
qps              1165 drivers/infiniband/hw/qib/qib_verbs.c 		if (n == ARRAY_SIZE(qps))
qps              1171 drivers/infiniband/hw/qib/qib_verbs.c 		qps[n++] = qp;
qps              1178 drivers/infiniband/hw/qib/qib_verbs.c 		qp = qps[i];
qps               858 drivers/net/ethernet/huawei/hinic/hinic_hw_dev.c 	struct hinic_qp *qp = &func_to_io->qps[i];
qps               876 drivers/net/ethernet/huawei/hinic/hinic_hw_dev.c 	struct hinic_qp *qp = &func_to_io->qps[i];
qps               126 drivers/net/ethernet/huawei/hinic/hinic_hw_io.c 		qp = &func_to_io->qps[i];
qps               170 drivers/net/ethernet/huawei/hinic/hinic_hw_io.c 		qp = &func_to_io->qps[i];
qps               377 drivers/net/ethernet/huawei/hinic/hinic_hw_io.c 	qps_size = num_qps * sizeof(*func_to_io->qps);
qps               378 drivers/net/ethernet/huawei/hinic/hinic_hw_io.c 	func_to_io->qps = devm_kzalloc(&pdev->dev, qps_size, GFP_KERNEL);
qps               379 drivers/net/ethernet/huawei/hinic/hinic_hw_io.c 	if (!func_to_io->qps)
qps               415 drivers/net/ethernet/huawei/hinic/hinic_hw_io.c 		err = init_qp(func_to_io, &func_to_io->qps[i], i,
qps               440 drivers/net/ethernet/huawei/hinic/hinic_hw_io.c 		destroy_qp(func_to_io, &func_to_io->qps[j]);
qps               455 drivers/net/ethernet/huawei/hinic/hinic_hw_io.c 	devm_kfree(&pdev->dev, func_to_io->qps);
qps               474 drivers/net/ethernet/huawei/hinic/hinic_hw_io.c 		destroy_qp(func_to_io, &func_to_io->qps[i]);
qps               484 drivers/net/ethernet/huawei/hinic/hinic_hw_io.c 	devm_kfree(&pdev->dev, func_to_io->qps);
qps               507 drivers/net/ethernet/huawei/hinic/hinic_hw_io.c 	func_to_io->qps = NULL;
qps                58 drivers/net/ethernet/huawei/hinic/hinic_hw_io.h 	struct hinic_qp         *qps;
qps               804 drivers/net/ethernet/intel/i40e/i40e_virtchnl_pf.c 	u16 vsi_id, qps;
qps               812 drivers/net/ethernet/intel/i40e/i40e_virtchnl_pf.c 			qps = vf->ch[i].num_qps;
qps               815 drivers/net/ethernet/intel/i40e/i40e_virtchnl_pf.c 			qps = pf->vsi[vf->lan_vsi_idx]->alloc_queue_pairs;
qps               820 drivers/net/ethernet/intel/i40e/i40e_virtchnl_pf.c 			if (j * 2 >= qps) {
qps               851 drivers/net/ethernet/intel/i40e/i40e_virtchnl_pf.c 	u32 qps, num_tc = 1; /* VF has at least one traffic class */
qps               860 drivers/net/ethernet/intel/i40e/i40e_virtchnl_pf.c 			qps = vf->ch[i].num_qps;
qps               863 drivers/net/ethernet/intel/i40e/i40e_virtchnl_pf.c 			qps = pf->vsi[vf->lan_vsi_idx]->alloc_queue_pairs;
qps               867 drivers/net/ethernet/intel/i40e/i40e_virtchnl_pf.c 		for (j = 0; j < qps; j++) {
qps              1616 drivers/net/ethernet/mellanox/mlx4/en_ethtool.c 		qpn = priv->rss_map.qps[cmd->fs.ring_cookie].qpn;
qps               139 drivers/net/ethernet/mellanox/mlx4/en_main.c 						      &priv->rss_map.qps[i],
qps               249 drivers/net/ethernet/mellanox/mlx4/en_netdev.c 	rule.qpn = priv->rss_map.qps[filter->rxq_index].qpn;
qps              1166 drivers/net/ethernet/mellanox/mlx4/en_rx.c 					    &rss_map->qps[i]);
qps              1174 drivers/net/ethernet/mellanox/mlx4/en_rx.c 		rss_map->indir_qp = &rss_map->qps[0];
qps              1250 drivers/net/ethernet/mellanox/mlx4/en_rx.c 			       MLX4_QP_STATE_RST, NULL, 0, 0, &rss_map->qps[i]);
qps              1251 drivers/net/ethernet/mellanox/mlx4/en_rx.c 		mlx4_qp_remove(mdev->dev, &rss_map->qps[i]);
qps              1252 drivers/net/ethernet/mellanox/mlx4/en_rx.c 		mlx4_qp_free(mdev->dev, &rss_map->qps[i]);
qps              1276 drivers/net/ethernet/mellanox/mlx4/en_rx.c 			       MLX4_QP_STATE_RST, NULL, 0, 0, &rss_map->qps[i]);
qps              1277 drivers/net/ethernet/mellanox/mlx4/en_rx.c 		mlx4_qp_remove(mdev->dev, &rss_map->qps[i]);
qps              1278 drivers/net/ethernet/mellanox/mlx4/en_rx.c 		mlx4_qp_free(mdev->dev, &rss_map->qps[i]);
qps               444 drivers/net/ethernet/mellanox/mlx4/mlx4_en.h 	struct mlx4_qp qps[MAX_RX_RINGS];