dct              1293 arch/ia64/include/asm/pal.h 				dct	:4,	/* Date cache tracking */
dct                90 drivers/edac/amd64_edac.c static void f15h_select_dct(struct amd64_pvt *pvt, u8 dct)
dct                96 drivers/edac/amd64_edac.c 	reg |= dct;
dct               114 drivers/edac/amd64_edac.c static inline int amd64_read_dct_pci_cfg(struct amd64_pvt *pvt, u8 dct,
dct               119 drivers/edac/amd64_edac.c 		if (dct || offset >= 0x100)
dct               124 drivers/edac/amd64_edac.c 		if (dct) {
dct               142 drivers/edac/amd64_edac.c 		dct = (dct && pvt->model == 0x30) ? 3 : dct;
dct               143 drivers/edac/amd64_edac.c 		f15h_select_dct(pvt, dct);
dct               147 drivers/edac/amd64_edac.c 		if (dct)
dct               391 drivers/edac/amd64_edac.c static void get_cs_base_and_mask(struct amd64_pvt *pvt, int csrow, u8 dct,
dct               398 drivers/edac/amd64_edac.c 		csbase		= pvt->csels[dct].csbases[csrow];
dct               399 drivers/edac/amd64_edac.c 		csmask		= pvt->csels[dct].csmasks[csrow];
dct               410 drivers/edac/amd64_edac.c 		csbase          = pvt->csels[dct].csbases[csrow];
dct               411 drivers/edac/amd64_edac.c 		csmask          = pvt->csels[dct].csmasks[csrow >> 1];
dct               426 drivers/edac/amd64_edac.c 		csbase		= pvt->csels[dct].csbases[csrow];
dct               427 drivers/edac/amd64_edac.c 		csmask		= pvt->csels[dct].csmasks[csrow >> 1];
dct               447 drivers/edac/amd64_edac.c #define for_each_chip_select(i, dct, pvt) \
dct               448 drivers/edac/amd64_edac.c 	for (i = 0; i < pvt->csels[dct].b_cnt; i++)
dct               450 drivers/edac/amd64_edac.c #define chip_select_base(i, dct, pvt) \
dct               451 drivers/edac/amd64_edac.c 	pvt->csels[dct].csbases[i]
dct               453 drivers/edac/amd64_edac.c #define for_each_chip_select_mask(i, dct, pvt) \
dct               454 drivers/edac/amd64_edac.c 	for (i = 0; i < pvt->csels[dct].m_cnt; i++)
dct              1346 drivers/edac/amd64_edac.c static int k8_dbam_to_chip_select(struct amd64_pvt *pvt, u8 dct,
dct              1349 drivers/edac/amd64_edac.c 	u32 dclr = dct ? pvt->dclr1 : pvt->dclr0;
dct              1513 drivers/edac/amd64_edac.c static int f10_dbam_to_chip_select(struct amd64_pvt *pvt, u8 dct,
dct              1516 drivers/edac/amd64_edac.c 	u32 dclr = dct ? pvt->dclr1 : pvt->dclr0;
dct              1529 drivers/edac/amd64_edac.c static int f15_dbam_to_chip_select(struct amd64_pvt *pvt, u8 dct,
dct              1538 drivers/edac/amd64_edac.c static int f15_m60h_dbam_to_chip_select(struct amd64_pvt *pvt, u8 dct,
dct              1542 drivers/edac/amd64_edac.c 	u32 dcsm = pvt->csels[dct].csmasks[cs_mask_nr];
dct              1571 drivers/edac/amd64_edac.c static int f16_dbam_to_chip_select(struct amd64_pvt *pvt, u8 dct,
dct              1800 drivers/edac/amd64_edac.c static int f10_process_possible_spare(struct amd64_pvt *pvt, u8 dct, int csrow)
dct              1804 drivers/edac/amd64_edac.c 	if (online_spare_swap_done(pvt, dct) &&
dct              1805 drivers/edac/amd64_edac.c 	    csrow == online_spare_bad_dramcs(pvt, dct)) {
dct              1807 drivers/edac/amd64_edac.c 		for_each_chip_select(tmp_cs, dct, pvt) {
dct              1808 drivers/edac/amd64_edac.c 			if (chip_select_base(tmp_cs, dct, pvt) & 0x2) {
dct              1825 drivers/edac/amd64_edac.c static int f1x_lookup_addr_in_dct(u64 in_addr, u8 nid, u8 dct)
dct              1839 drivers/edac/amd64_edac.c 	edac_dbg(1, "input addr: 0x%llx, DCT: %d\n", in_addr, dct);
dct              1841 drivers/edac/amd64_edac.c 	for_each_chip_select(csrow, dct, pvt) {
dct              1842 drivers/edac/amd64_edac.c 		if (!csrow_enabled(csrow, dct, pvt))
dct              1845 drivers/edac/amd64_edac.c 		get_cs_base_and_mask(pvt, csrow, dct, &cs_base, &cs_mask);
dct              1860 drivers/edac/amd64_edac.c 			cs_found = f10_process_possible_spare(pvt, dct, csrow);
dct              2879 drivers/edac/amd64_edac.c static u32 get_csrow_nr_pages(struct amd64_pvt *pvt, u8 dct, int csrow_nr_orig)
dct              2881 drivers/edac/amd64_edac.c 	u32 dbam = dct ? pvt->dbam1 : pvt->dbam0;
dct              2889 drivers/edac/amd64_edac.c 		cs_mode = f17_get_cs_mode(csrow_nr >> 1, dct, pvt);
dct              2892 drivers/edac/amd64_edac.c 	nr_pages   = pvt->ops->dbam_to_cs(pvt, dct, cs_mode, csrow_nr);
dct              2896 drivers/edac/amd64_edac.c 		    csrow_nr_orig, dct,  cs_mode);
dct               174 drivers/edac/amd64_edac.h #define csrow_enabled(i, dct, pvt)	((pvt)->csels[(dct)].csbases[(i)]     & DCSB_CS_ENABLE)
dct               175 drivers/edac/amd64_edac.h #define csrow_sec_enabled(i, dct, pvt)	((pvt)->csels[(dct)].csbases_sec[(i)] & DCSB_CS_ENABLE)
dct               475 drivers/edac/amd64_edac.h 	int (*dbam_to_cs)		(struct amd64_pvt *pvt, u8 dct,
dct               630 drivers/infiniband/hw/mlx5/devx.c 					      qp->dct.mdct.mqp.qpn) == obj_id;
dct              2290 drivers/infiniband/hw/mlx5/devx.c 		obj_id = be32_to_cpu(eqe->data.dct.dctn) & 0xffffff;
dct               406 drivers/infiniband/hw/mlx5/mlx5_ib.h 		struct mlx5_ib_dct dct;
dct              2563 drivers/infiniband/hw/mlx5/qp.c 	qp->dct.in = kzalloc(MLX5_ST_SZ_BYTES(create_dct_in), GFP_KERNEL);
dct              2564 drivers/infiniband/hw/mlx5/qp.c 	if (!qp->dct.in) {
dct              2569 drivers/infiniband/hw/mlx5/qp.c 	MLX5_SET(create_dct_in, qp->dct.in, uid, to_mpd(pd)->uid);
dct              2570 drivers/infiniband/hw/mlx5/qp.c 	dctc = MLX5_ADDR_OF(create_dct_in, qp->dct.in, dct_context_entry);
dct              2619 drivers/infiniband/hw/mlx5/qp.c 	if (!MLX5_CAP_GEN(dev->mdev, dct)) {
dct              2758 drivers/infiniband/hw/mlx5/qp.c 		err = mlx5_core_destroy_dct(dev->mdev, &mqp->dct.mdct);
dct              2765 drivers/infiniband/hw/mlx5/qp.c 	kfree(mqp->dct.in);
dct              3813 drivers/infiniband/hw/mlx5/qp.c 	dctc = MLX5_ADDR_OF(create_dct_in, qp->dct.in, dct_context_entry);
dct              3867 drivers/infiniband/hw/mlx5/qp.c 		err = mlx5_core_create_dct(dev->mdev, &qp->dct.mdct, qp->dct.in,
dct              3872 drivers/infiniband/hw/mlx5/qp.c 		resp.dctn = qp->dct.mdct.mqp.qpn;
dct              3875 drivers/infiniband/hw/mlx5/qp.c 			mlx5_core_destroy_dct(dev->mdev, &qp->dct.mdct);
dct              5696 drivers/infiniband/hw/mlx5/qp.c 	struct mlx5_core_dct	*dct = &mqp->dct.mdct;
dct              5719 drivers/infiniband/hw/mlx5/qp.c 	err = mlx5_core_dct_query(dev->mdev, dct, out, outlen);
dct               544 drivers/net/ethernet/mellanox/mlx5/core/eq.c 	if (MLX5_CAP_GEN_MAX(dev, dct))
dct               571 drivers/net/ethernet/mellanox/mlx5/core/main.c 	if (MLX5_CAP_GEN_MAX(dev, dct))
dct               572 drivers/net/ethernet/mellanox/mlx5/core/main.c 		MLX5_SET(cmd_hca_cap, set_hca_cap, dct, 1);
dct                44 drivers/net/ethernet/mellanox/mlx5/core/qp.c 			       struct mlx5_core_dct *dct);
dct               128 drivers/net/ethernet/mellanox/mlx5/core/qp.c 	struct mlx5_core_dct *dct;
dct               138 drivers/net/ethernet/mellanox/mlx5/core/qp.c 		rsn = be32_to_cpu(eqe->data.dct.dctn) & 0xffffff;
dct               183 drivers/net/ethernet/mellanox/mlx5/core/qp.c 		dct = (struct mlx5_core_dct *)common;
dct               185 drivers/net/ethernet/mellanox/mlx5/core/qp.c 			complete(&dct->drained);
dct               234 drivers/net/ethernet/mellanox/mlx5/core/qp.c 				  struct mlx5_core_dct *dct, bool need_cleanup)
dct               238 drivers/net/ethernet/mellanox/mlx5/core/qp.c 	struct mlx5_core_qp *qp = &dct->mqp;
dct               241 drivers/net/ethernet/mellanox/mlx5/core/qp.c 	err = mlx5_core_drain_dct(dev, dct);
dct               252 drivers/net/ethernet/mellanox/mlx5/core/qp.c 	wait_for_completion(&dct->drained);
dct               255 drivers/net/ethernet/mellanox/mlx5/core/qp.c 		destroy_resource_common(dev, &dct->mqp);
dct               265 drivers/net/ethernet/mellanox/mlx5/core/qp.c 			 struct mlx5_core_dct *dct,
dct               269 drivers/net/ethernet/mellanox/mlx5/core/qp.c 	struct mlx5_core_qp *qp = &dct->mqp;
dct               272 drivers/net/ethernet/mellanox/mlx5/core/qp.c 	init_completion(&dct->drained);
dct               289 drivers/net/ethernet/mellanox/mlx5/core/qp.c 	_mlx5_core_destroy_dct(dev, dct, false);
dct               338 drivers/net/ethernet/mellanox/mlx5/core/qp.c 			       struct mlx5_core_dct *dct)
dct               342 drivers/net/ethernet/mellanox/mlx5/core/qp.c 	struct mlx5_core_qp *qp = &dct->mqp;
dct               352 drivers/net/ethernet/mellanox/mlx5/core/qp.c 			  struct mlx5_core_dct *dct)
dct               354 drivers/net/ethernet/mellanox/mlx5/core/qp.c 	return _mlx5_core_destroy_dct(dev, dct, true);
dct               556 drivers/net/ethernet/mellanox/mlx5/core/qp.c int mlx5_core_dct_query(struct mlx5_core_dev *dev, struct mlx5_core_dct *dct,
dct               560 drivers/net/ethernet/mellanox/mlx5/core/qp.c 	struct mlx5_core_qp *qp = &dct->mqp;
dct               626 drivers/soc/fsl/dpio/qbman-portal.c 			    enum qbman_pull_type_e dct)
dct               628 drivers/soc/fsl/dpio/qbman-portal.c 	d->verb |= dct << QB_VDQCR_VERB_DCT_SHIFT;
dct               640 drivers/soc/fsl/dpio/qbman-portal.c 				 enum qbman_pull_type_e dct)
dct               642 drivers/soc/fsl/dpio/qbman-portal.c 	d->verb |= dct << QB_VDQCR_VERB_DCT_SHIFT;
dct               157 drivers/soc/fsl/dpio/qbman-portal.h 			    enum qbman_pull_type_e dct);
dct               159 drivers/soc/fsl/dpio/qbman-portal.h 				 enum qbman_pull_type_e dct);
dct               707 include/linux/mlx5/device.h 	struct mlx5_eqe_dct             dct;
dct              1287 include/linux/mlx5/mlx5_ifc.h 	u8         dct[0x1];
dct               575 include/linux/mlx5/qp.h 			  struct mlx5_core_dct *dct);
dct               578 include/linux/mlx5/qp.h int mlx5_core_dct_query(struct mlx5_core_dev *dev, struct mlx5_core_dct *dct,