cq_table           90 drivers/infiniband/hw/hns/hns_roce_cq.c 	struct hns_roce_cq_table *cq_table;
cq_table           96 drivers/infiniband/hw/hns/hns_roce_cq.c 	cq_table = &hr_dev->cq_table;
cq_table          117 drivers/infiniband/hw/hns/hns_roce_cq.c 	ret = hns_roce_bitmap_alloc(&cq_table->bitmap, &hr_cq->cqn);
cq_table          124 drivers/infiniband/hw/hns/hns_roce_cq.c 	ret = hns_roce_table_get(hr_dev, &cq_table->table, hr_cq->cqn);
cq_table          130 drivers/infiniband/hw/hns/hns_roce_cq.c 	ret = xa_err(xa_store(&cq_table->array, hr_cq->cqn, hr_cq, GFP_KERNEL));
cq_table          163 drivers/infiniband/hw/hns/hns_roce_cq.c 	xa_erase(&cq_table->array, hr_cq->cqn);
cq_table          166 drivers/infiniband/hw/hns/hns_roce_cq.c 	hns_roce_table_put(hr_dev, &cq_table->table, hr_cq->cqn);
cq_table          169 drivers/infiniband/hw/hns/hns_roce_cq.c 	hns_roce_bitmap_free(&cq_table->bitmap, hr_cq->cqn, BITMAP_NO_RR);
cq_table          184 drivers/infiniband/hw/hns/hns_roce_cq.c 	struct hns_roce_cq_table *cq_table = &hr_dev->cq_table;
cq_table          193 drivers/infiniband/hw/hns/hns_roce_cq.c 	xa_erase(&cq_table->array, hr_cq->cqn);
cq_table          203 drivers/infiniband/hw/hns/hns_roce_cq.c 	hns_roce_table_put(hr_dev, &cq_table->table, hr_cq->cqn);
cq_table          204 drivers/infiniband/hw/hns/hns_roce_cq.c 	hns_roce_bitmap_free(&cq_table->bitmap, hr_cq->cqn, BITMAP_NO_RR);
cq_table          526 drivers/infiniband/hw/hns/hns_roce_cq.c 	cq = xa_load(&hr_dev->cq_table.array, cqn & (hr_dev->caps.num_cqs - 1));
cq_table          538 drivers/infiniband/hw/hns/hns_roce_cq.c 	struct hns_roce_cq_table *cq_table = &hr_dev->cq_table;
cq_table          542 drivers/infiniband/hw/hns/hns_roce_cq.c 	cq = xa_load(&cq_table->array, cqn & (hr_dev->caps.num_cqs - 1));
cq_table          559 drivers/infiniband/hw/hns/hns_roce_cq.c 	struct hns_roce_cq_table *cq_table = &hr_dev->cq_table;
cq_table          561 drivers/infiniband/hw/hns/hns_roce_cq.c 	xa_init(&cq_table->array);
cq_table          563 drivers/infiniband/hw/hns/hns_roce_cq.c 	return hns_roce_bitmap_init(&cq_table->bitmap, hr_dev->caps.num_cqs,
cq_table          570 drivers/infiniband/hw/hns/hns_roce_cq.c 	hns_roce_bitmap_cleanup(&hr_dev->cq_table.bitmap);
cq_table         1029 drivers/infiniband/hw/hns/hns_roce_device.h 	struct hns_roce_cq_table  cq_table;
cq_table         1096 drivers/infiniband/hw/hns/hns_roce_hem.c 	hns_roce_cleanup_hem_table(hr_dev, &hr_dev->cq_table.table);
cq_table          646 drivers/infiniband/hw/hns/hns_roce_main.c 	ret = hns_roce_init_hem_table(hr_dev, &hr_dev->cq_table.table,
cq_table          758 drivers/infiniband/hw/hns/hns_roce_main.c 	hns_roce_cleanup_hem_table(hr_dev, &hr_dev->cq_table.table);
cq_table          221 drivers/infiniband/hw/mthca/mthca_cq.c 	cq = mthca_array_get(&dev->cq_table.cq, cqn & (dev->limits.num_cqs - 1));
cq_table          239 drivers/infiniband/hw/mthca/mthca_cq.c 	spin_lock(&dev->cq_table.lock);
cq_table          241 drivers/infiniband/hw/mthca/mthca_cq.c 	cq = mthca_array_get(&dev->cq_table.cq, cqn & (dev->limits.num_cqs - 1));
cq_table          245 drivers/infiniband/hw/mthca/mthca_cq.c 	spin_unlock(&dev->cq_table.lock);
cq_table          258 drivers/infiniband/hw/mthca/mthca_cq.c 	spin_lock(&dev->cq_table.lock);
cq_table          261 drivers/infiniband/hw/mthca/mthca_cq.c 	spin_unlock(&dev->cq_table.lock);
cq_table          779 drivers/infiniband/hw/mthca/mthca_cq.c 	cq->cqn = mthca_alloc(&dev->cq_table.alloc);
cq_table          784 drivers/infiniband/hw/mthca/mthca_cq.c 		err = mthca_table_get(dev, dev->cq_table.table, cq->cqn);
cq_table          848 drivers/infiniband/hw/mthca/mthca_cq.c 	spin_lock_irq(&dev->cq_table.lock);
cq_table          849 drivers/infiniband/hw/mthca/mthca_cq.c 	if (mthca_array_set(&dev->cq_table.cq,
cq_table          852 drivers/infiniband/hw/mthca/mthca_cq.c 		spin_unlock_irq(&dev->cq_table.lock);
cq_table          855 drivers/infiniband/hw/mthca/mthca_cq.c 	spin_unlock_irq(&dev->cq_table.lock);
cq_table          879 drivers/infiniband/hw/mthca/mthca_cq.c 	mthca_table_put(dev, dev->cq_table.table, cq->cqn);
cq_table          882 drivers/infiniband/hw/mthca/mthca_cq.c 	mthca_free(&dev->cq_table.alloc, cq->cqn);
cq_table          891 drivers/infiniband/hw/mthca/mthca_cq.c 	spin_lock_irq(&dev->cq_table.lock);
cq_table          893 drivers/infiniband/hw/mthca/mthca_cq.c 	spin_unlock_irq(&dev->cq_table.lock);
cq_table          925 drivers/infiniband/hw/mthca/mthca_cq.c 	spin_lock_irq(&dev->cq_table.lock);
cq_table          926 drivers/infiniband/hw/mthca/mthca_cq.c 	mthca_array_clear(&dev->cq_table.cq,
cq_table          929 drivers/infiniband/hw/mthca/mthca_cq.c 	spin_unlock_irq(&dev->cq_table.lock);
cq_table          946 drivers/infiniband/hw/mthca/mthca_cq.c 	mthca_table_put(dev, dev->cq_table.table, cq->cqn);
cq_table          947 drivers/infiniband/hw/mthca/mthca_cq.c 	mthca_free(&dev->cq_table.alloc, cq->cqn);
cq_table          955 drivers/infiniband/hw/mthca/mthca_cq.c 	spin_lock_init(&dev->cq_table.lock);
cq_table          957 drivers/infiniband/hw/mthca/mthca_cq.c 	err = mthca_alloc_init(&dev->cq_table.alloc,
cq_table          964 drivers/infiniband/hw/mthca/mthca_cq.c 	err = mthca_array_init(&dev->cq_table.cq,
cq_table          967 drivers/infiniband/hw/mthca/mthca_cq.c 		mthca_alloc_cleanup(&dev->cq_table.alloc);
cq_table          974 drivers/infiniband/hw/mthca/mthca_cq.c 	mthca_array_cleanup(&dev->cq_table.cq, dev->limits.num_cqs);
cq_table          975 drivers/infiniband/hw/mthca/mthca_cq.c 	mthca_alloc_cleanup(&dev->cq_table.alloc);
cq_table          343 drivers/infiniband/hw/mthca/mthca_dev.h 	struct mthca_cq_table  cq_table;
cq_table          476 drivers/infiniband/hw/mthca/mthca_main.c 	mdev->cq_table.table = mthca_alloc_icm_table(mdev, init_hca->cqc_base,
cq_table          481 drivers/infiniband/hw/mthca/mthca_main.c 	if (!mdev->cq_table.table) {
cq_table          527 drivers/infiniband/hw/mthca/mthca_main.c 	mthca_free_icm_table(mdev, mdev->cq_table.table);
cq_table          562 drivers/infiniband/hw/mthca/mthca_main.c 	mthca_free_icm_table(mdev, mdev->cq_table.table);
cq_table          109 drivers/net/ethernet/mellanox/mlx4/cq.c 	cq = radix_tree_lookup(&mlx4_priv(dev)->cq_table.tree,
cq_table          128 drivers/net/ethernet/mellanox/mlx4/cq.c 	struct mlx4_cq_table *cq_table = &mlx4_priv(dev)->cq_table;
cq_table          132 drivers/net/ethernet/mellanox/mlx4/cq.c 	cq = radix_tree_lookup(&cq_table->tree, cqn & (dev->caps.num_cqs - 1));
cq_table          220 drivers/net/ethernet/mellanox/mlx4/cq.c 	struct mlx4_cq_table *cq_table = &priv->cq_table;
cq_table          223 drivers/net/ethernet/mellanox/mlx4/cq.c 	*cqn = mlx4_bitmap_alloc(&cq_table->bitmap);
cq_table          227 drivers/net/ethernet/mellanox/mlx4/cq.c 	err = mlx4_table_get(dev, &cq_table->table, *cqn);
cq_table          231 drivers/net/ethernet/mellanox/mlx4/cq.c 	err = mlx4_table_get(dev, &cq_table->cmpt_table, *cqn);
cq_table          237 drivers/net/ethernet/mellanox/mlx4/cq.c 	mlx4_table_put(dev, &cq_table->table, *cqn);
cq_table          240 drivers/net/ethernet/mellanox/mlx4/cq.c 	mlx4_bitmap_free(&cq_table->bitmap, *cqn, MLX4_NO_RR);
cq_table          267 drivers/net/ethernet/mellanox/mlx4/cq.c 	struct mlx4_cq_table *cq_table = &priv->cq_table;
cq_table          269 drivers/net/ethernet/mellanox/mlx4/cq.c 	mlx4_table_put(dev, &cq_table->cmpt_table, cqn);
cq_table          270 drivers/net/ethernet/mellanox/mlx4/cq.c 	mlx4_table_put(dev, &cq_table->table, cqn);
cq_table          271 drivers/net/ethernet/mellanox/mlx4/cq.c 	mlx4_bitmap_free(&cq_table->bitmap, cqn, MLX4_NO_RR);
cq_table          348 drivers/net/ethernet/mellanox/mlx4/cq.c 	struct mlx4_cq_table *cq_table = &priv->cq_table;
cq_table          363 drivers/net/ethernet/mellanox/mlx4/cq.c 	spin_lock(&cq_table->lock);
cq_table          364 drivers/net/ethernet/mellanox/mlx4/cq.c 	err = radix_tree_insert(&cq_table->tree, cq->cqn, cq);
cq_table          365 drivers/net/ethernet/mellanox/mlx4/cq.c 	spin_unlock(&cq_table->lock);
cq_table          424 drivers/net/ethernet/mellanox/mlx4/cq.c 	spin_lock(&cq_table->lock);
cq_table          425 drivers/net/ethernet/mellanox/mlx4/cq.c 	radix_tree_delete(&cq_table->tree, cq->cqn);
cq_table          426 drivers/net/ethernet/mellanox/mlx4/cq.c 	spin_unlock(&cq_table->lock);
cq_table          438 drivers/net/ethernet/mellanox/mlx4/cq.c 	struct mlx4_cq_table *cq_table = &priv->cq_table;
cq_table          445 drivers/net/ethernet/mellanox/mlx4/cq.c 	spin_lock(&cq_table->lock);
cq_table          446 drivers/net/ethernet/mellanox/mlx4/cq.c 	radix_tree_delete(&cq_table->tree, cq->cqn);
cq_table          447 drivers/net/ethernet/mellanox/mlx4/cq.c 	spin_unlock(&cq_table->lock);
cq_table          464 drivers/net/ethernet/mellanox/mlx4/cq.c 	struct mlx4_cq_table *cq_table = &mlx4_priv(dev)->cq_table;
cq_table          467 drivers/net/ethernet/mellanox/mlx4/cq.c 	spin_lock_init(&cq_table->lock);
cq_table          468 drivers/net/ethernet/mellanox/mlx4/cq.c 	INIT_RADIX_TREE(&cq_table->tree, GFP_ATOMIC);
cq_table          472 drivers/net/ethernet/mellanox/mlx4/cq.c 	err = mlx4_bitmap_init(&cq_table->bitmap, dev->caps.num_cqs,
cq_table          485 drivers/net/ethernet/mellanox/mlx4/cq.c 	mlx4_bitmap_cleanup(&mlx4_priv(dev)->cq_table.bitmap);
cq_table         1608 drivers/net/ethernet/mellanox/mlx4/main.c 	err = mlx4_init_icm_table(dev, &priv->cq_table.cmpt_table,
cq_table         1629 drivers/net/ethernet/mellanox/mlx4/main.c 	mlx4_cleanup_icm_table(dev, &priv->cq_table.cmpt_table);
cq_table         1763 drivers/net/ethernet/mellanox/mlx4/main.c 	err = mlx4_init_icm_table(dev, &priv->cq_table.table,
cq_table         1807 drivers/net/ethernet/mellanox/mlx4/main.c 	mlx4_cleanup_icm_table(dev, &priv->cq_table.table);
cq_table         1832 drivers/net/ethernet/mellanox/mlx4/main.c 	mlx4_cleanup_icm_table(dev, &priv->cq_table.cmpt_table);
cq_table         1851 drivers/net/ethernet/mellanox/mlx4/main.c 	mlx4_cleanup_icm_table(dev, &priv->cq_table.table);
cq_table         1860 drivers/net/ethernet/mellanox/mlx4/main.c 	mlx4_cleanup_icm_table(dev, &priv->cq_table.cmpt_table);
cq_table          895 drivers/net/ethernet/mellanox/mlx4/mlx4.h 	struct mlx4_cq_table	cq_table;
cq_table          116 drivers/net/ethernet/mellanox/mlx5/core/eq.c 	struct mlx5_cq_table *table = &eq->cq_table;
cq_table          246 drivers/net/ethernet/mellanox/mlx5/core/eq.c 	struct mlx5_cq_table *cq_table = &eq->cq_table;
cq_table          258 drivers/net/ethernet/mellanox/mlx5/core/eq.c 	memset(cq_table, 0, sizeof(*cq_table));
cq_table          259 drivers/net/ethernet/mellanox/mlx5/core/eq.c 	spin_lock_init(&cq_table->lock);
cq_table          260 drivers/net/ethernet/mellanox/mlx5/core/eq.c 	INIT_RADIX_TREE(&cq_table->tree, GFP_ATOMIC);
cq_table          385 drivers/net/ethernet/mellanox/mlx5/core/eq.c 	struct mlx5_cq_table *table = &eq->cq_table;
cq_table          397 drivers/net/ethernet/mellanox/mlx5/core/eq.c 	struct mlx5_cq_table *table = &eq->cq_table;
cq_table           26 drivers/net/ethernet/mellanox/mlx5/core/lib/eq.h 	struct mlx5_cq_table    cq_table;