tcm                19 arch/arm/include/asm/tcm.h #define __tcmdata __section(.tcm.data)
tcm                21 arch/arm/include/asm/tcm.h #define __tcmconst __section(.tcm.rodata)
tcm                23 arch/arm/include/asm/tcm.h #define __tcmfunc __attribute__((long_call)) __section(.tcm.text) noinline
tcm                25 arch/arm/include/asm/tcm.h #define __tcmlocalfunc __section(.tcm.text)
tcm               123 arch/arm/kernel/vmlinux.lds.h 		*(.tcm.text)						\
tcm               124 arch/arm/kernel/vmlinux.lds.h 		*(.tcm.rodata)						\
tcm               133 arch/arm/kernel/vmlinux.lds.h 		*(.tcm.data)						\
tcm               130 drivers/gpu/drm/omapdrm/omap_dmm_priv.h 	struct tcm *tcm;
tcm               141 drivers/gpu/drm/omapdrm/omap_dmm_priv.h 	struct tcm *tcm;
tcm               187 drivers/gpu/drm/omapdrm/omap_dmm_priv.h 	struct tcm **tcm;
tcm                42 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c static struct tcm *containers[TILFMT_NFORMATS];
tcm               314 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c static struct dmm_txn *dmm_txn_init(struct dmm *dmm, struct tcm *tcm)
tcm               340 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c 	engine->tcm = tcm;
tcm               372 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c 	pat->area.y0 += engine->tcm->y_offset;
tcm               373 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c 	pat->area.y1 += engine->tcm->y_offset;
tcm               377 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c 			.lut_id = engine->tcm->lut_id,
tcm               490 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c 	txn = dmm_txn_init(omap_dmm, area->tcm);
tcm               609 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c 	if (block->area.tcm)
tcm               756 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c 			if (omap_dmm->tcm && omap_dmm->tcm[i])
tcm               757 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c 				omap_dmm->tcm[i]->deinit(omap_dmm->tcm[i]);
tcm               758 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c 		kfree(omap_dmm->tcm);
tcm               917 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c 	omap_dmm->tcm = kcalloc(omap_dmm->num_lut, sizeof(*omap_dmm->tcm),
tcm               919 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c 	if (!omap_dmm->tcm) {
tcm               929 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c 		omap_dmm->tcm[i] = sita_init(omap_dmm->container_width,
tcm               932 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c 		if (!omap_dmm->tcm[i]) {
tcm               938 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c 		omap_dmm->tcm[i]->lut_id = i;
tcm               944 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c 	containers[TILFMT_8BIT] = omap_dmm->tcm[0];
tcm               945 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c 	containers[TILFMT_16BIT] = omap_dmm->tcm[0];
tcm               946 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c 	containers[TILFMT_32BIT] = omap_dmm->tcm[0];
tcm               952 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c 		containers[TILFMT_PAGE] = omap_dmm->tcm[1];
tcm               953 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c 		omap_dmm->tcm[1]->y_offset = OMAP5_LUT_OFFSET;
tcm               954 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c 		omap_dmm->tcm[1]->lut_id = 0;
tcm               956 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c 		containers[TILFMT_PAGE] = omap_dmm->tcm[0];
tcm               960 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c 		.tcm = NULL,
tcm               985 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c 		area.tcm = omap_dmm->tcm[i];
tcm              1118 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c 			if (block->area.tcm == omap_dmm->tcm[lut_idx]) {
tcm              1184 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c 		.tcm = NULL,
tcm              1191 drivers/gpu/drm/omapdrm/omap_dmm_tiler.c 		area.tcm = omap_dmm->tcm[i];
tcm               162 drivers/gpu/drm/omapdrm/tcm-sita.c static s32 sita_reserve_1d(struct tcm *tcm, u32 num_slots,
tcm               168 drivers/gpu/drm/omapdrm/tcm-sita.c 	spin_lock(&(tcm->lock));
tcm               169 drivers/gpu/drm/omapdrm/tcm-sita.c 	ret = r2l_b2t_1d(num_slots, &pos, tcm->bitmap, tcm->map_size);
tcm               171 drivers/gpu/drm/omapdrm/tcm-sita.c 		area->p0.x = pos % tcm->width;
tcm               172 drivers/gpu/drm/omapdrm/tcm-sita.c 		area->p0.y = pos / tcm->width;
tcm               173 drivers/gpu/drm/omapdrm/tcm-sita.c 		area->p1.x = (pos + num_slots - 1) % tcm->width;
tcm               174 drivers/gpu/drm/omapdrm/tcm-sita.c 		area->p1.y = (pos + num_slots - 1) / tcm->width;
tcm               176 drivers/gpu/drm/omapdrm/tcm-sita.c 	spin_unlock(&(tcm->lock));
tcm               181 drivers/gpu/drm/omapdrm/tcm-sita.c static s32 sita_reserve_2d(struct tcm *tcm, u16 h, u16 w, u16 align,
tcm               188 drivers/gpu/drm/omapdrm/tcm-sita.c 	spin_lock(&(tcm->lock));
tcm               189 drivers/gpu/drm/omapdrm/tcm-sita.c 	ret = l2r_t2b(w, h, align, offset, &pos, slot_bytes, tcm->bitmap,
tcm               190 drivers/gpu/drm/omapdrm/tcm-sita.c 			tcm->map_size, tcm->width);
tcm               193 drivers/gpu/drm/omapdrm/tcm-sita.c 		area->p0.x = pos % tcm->width;
tcm               194 drivers/gpu/drm/omapdrm/tcm-sita.c 		area->p0.y = pos / tcm->width;
tcm               198 drivers/gpu/drm/omapdrm/tcm-sita.c 	spin_unlock(&(tcm->lock));
tcm               203 drivers/gpu/drm/omapdrm/tcm-sita.c static void sita_deinit(struct tcm *tcm)
tcm               205 drivers/gpu/drm/omapdrm/tcm-sita.c 	kfree(tcm);
tcm               208 drivers/gpu/drm/omapdrm/tcm-sita.c static s32 sita_free(struct tcm *tcm, struct tcm_area *area)
tcm               213 drivers/gpu/drm/omapdrm/tcm-sita.c 	pos = area->p0.x + area->p0.y * tcm->width;
tcm               218 drivers/gpu/drm/omapdrm/tcm-sita.c 		w = area->p1.x + area->p1.y * tcm->width - pos + 1;
tcm               222 drivers/gpu/drm/omapdrm/tcm-sita.c 	spin_lock(&(tcm->lock));
tcm               223 drivers/gpu/drm/omapdrm/tcm-sita.c 	free_slots(pos, w, h, tcm->bitmap, tcm->width);
tcm               224 drivers/gpu/drm/omapdrm/tcm-sita.c 	spin_unlock(&(tcm->lock));
tcm               228 drivers/gpu/drm/omapdrm/tcm-sita.c struct tcm *sita_init(u16 width, u16 height)
tcm               230 drivers/gpu/drm/omapdrm/tcm-sita.c 	struct tcm *tcm;
tcm               236 drivers/gpu/drm/omapdrm/tcm-sita.c 	tcm = kzalloc(sizeof(*tcm) + map_size, GFP_KERNEL);
tcm               237 drivers/gpu/drm/omapdrm/tcm-sita.c 	if (!tcm)
tcm               241 drivers/gpu/drm/omapdrm/tcm-sita.c 	tcm->height = height;
tcm               242 drivers/gpu/drm/omapdrm/tcm-sita.c 	tcm->width = width;
tcm               243 drivers/gpu/drm/omapdrm/tcm-sita.c 	tcm->reserve_2d = sita_reserve_2d;
tcm               244 drivers/gpu/drm/omapdrm/tcm-sita.c 	tcm->reserve_1d = sita_reserve_1d;
tcm               245 drivers/gpu/drm/omapdrm/tcm-sita.c 	tcm->free = sita_free;
tcm               246 drivers/gpu/drm/omapdrm/tcm-sita.c 	tcm->deinit = sita_deinit;
tcm               248 drivers/gpu/drm/omapdrm/tcm-sita.c 	spin_lock_init(&tcm->lock);
tcm               249 drivers/gpu/drm/omapdrm/tcm-sita.c 	tcm->bitmap = (unsigned long *)(tcm + 1);
tcm               250 drivers/gpu/drm/omapdrm/tcm-sita.c 	bitmap_clear(tcm->bitmap, 0, width*height);
tcm               252 drivers/gpu/drm/omapdrm/tcm-sita.c 	tcm->map_size = width*height;
tcm               254 drivers/gpu/drm/omapdrm/tcm-sita.c 	return tcm;
tcm               257 drivers/gpu/drm/omapdrm/tcm-sita.c 	kfree(tcm);
tcm                40 drivers/gpu/drm/omapdrm/tcm.h struct tcm;
tcm                51 drivers/gpu/drm/omapdrm/tcm.h 	struct tcm    *tcm;	/* parent */
tcm                67 drivers/gpu/drm/omapdrm/tcm.h 	s32 (*reserve_2d)(struct tcm *tcm, u16 height, u16 width, u16 align,
tcm                70 drivers/gpu/drm/omapdrm/tcm.h 	s32 (*reserve_1d)(struct tcm *tcm, u32 slots, struct tcm_area *area);
tcm                71 drivers/gpu/drm/omapdrm/tcm.h 	s32 (*free)(struct tcm *tcm, struct tcm_area *area);
tcm                72 drivers/gpu/drm/omapdrm/tcm.h 	void (*deinit)(struct tcm *tcm);
tcm                91 drivers/gpu/drm/omapdrm/tcm.h struct tcm *sita_init(u16 width, u16 height);
tcm               104 drivers/gpu/drm/omapdrm/tcm.h static inline void tcm_deinit(struct tcm *tcm)
tcm               106 drivers/gpu/drm/omapdrm/tcm.h 	if (tcm)
tcm               107 drivers/gpu/drm/omapdrm/tcm.h 		tcm->deinit(tcm);
tcm               131 drivers/gpu/drm/omapdrm/tcm.h static inline s32 tcm_reserve_2d(struct tcm *tcm, u16 width, u16 height,
tcm               136 drivers/gpu/drm/omapdrm/tcm.h 	s32 res = tcm  == NULL ? -ENODEV :
tcm               140 drivers/gpu/drm/omapdrm/tcm.h 		(height > tcm->height || width > tcm->width) ? -ENOMEM : 0;
tcm               144 drivers/gpu/drm/omapdrm/tcm.h 		res = tcm->reserve_2d(tcm, height, width, align, offset,
tcm               146 drivers/gpu/drm/omapdrm/tcm.h 		area->tcm = res ? NULL : tcm;
tcm               165 drivers/gpu/drm/omapdrm/tcm.h static inline s32 tcm_reserve_1d(struct tcm *tcm, u32 slots,
tcm               169 drivers/gpu/drm/omapdrm/tcm.h 	s32 res = tcm  == NULL ? -ENODEV :
tcm               171 drivers/gpu/drm/omapdrm/tcm.h 		slots > (tcm->width * (u32) tcm->height) ? -ENOMEM : 0;
tcm               175 drivers/gpu/drm/omapdrm/tcm.h 		res = tcm->reserve_1d(tcm, slots, area);
tcm               176 drivers/gpu/drm/omapdrm/tcm.h 		area->tcm = res ? NULL : tcm;
tcm               199 drivers/gpu/drm/omapdrm/tcm.h 	if (area && area->tcm) {
tcm               200 drivers/gpu/drm/omapdrm/tcm.h 		res = area->tcm->free(area->tcm, area);
tcm               202 drivers/gpu/drm/omapdrm/tcm.h 			area->tcm = NULL;
tcm               227 drivers/gpu/drm/omapdrm/tcm.h 	if (slice->tcm && !slice->is2d &&
tcm               229 drivers/gpu/drm/omapdrm/tcm.h 		(slice->p0.x || (slice->p1.x != slice->tcm->width - 1))) {
tcm               231 drivers/gpu/drm/omapdrm/tcm.h 		slice->p1.x = slice->tcm->width - 1;
tcm               238 drivers/gpu/drm/omapdrm/tcm.h 		parent->tcm = NULL;
tcm               245 drivers/gpu/drm/omapdrm/tcm.h 	return area && area->tcm &&
tcm               247 drivers/gpu/drm/omapdrm/tcm.h 		area->p1.x < area->tcm->width &&
tcm               248 drivers/gpu/drm/omapdrm/tcm.h 		area->p1.y < area->tcm->height &&
tcm               252 drivers/gpu/drm/omapdrm/tcm.h 		  area->p0.x < area->tcm->width &&
tcm               253 drivers/gpu/drm/omapdrm/tcm.h 		  area->p0.x + area->p0.y * area->tcm->width <=
tcm               254 drivers/gpu/drm/omapdrm/tcm.h 		  area->p1.x + area->p1.y * area->tcm->width) ||
tcm               269 drivers/gpu/drm/omapdrm/tcm.h 		i = p->x + p->y * a->tcm->width;
tcm               270 drivers/gpu/drm/omapdrm/tcm.h 		return i >= a->p0.x + a->p0.y * a->tcm->width &&
tcm               271 drivers/gpu/drm/omapdrm/tcm.h 		       i <= a->p1.x + a->p1.y * a->tcm->width;
tcm               293 drivers/gpu/drm/omapdrm/tcm.h 							area->tcm->width;
tcm               308 drivers/gpu/drm/omapdrm/tcm.h 	a->p1.x = (a->p0.x + num_pg - 1) % a->tcm->width;
tcm               309 drivers/gpu/drm/omapdrm/tcm.h 	a->p1.y = a->p0.y + ((a->p0.x + num_pg - 1) / a->tcm->width);
tcm               328 drivers/gpu/drm/omapdrm/tcm.h 	     var.tcm; tcm_slice(&safe, &var))
tcm                96 drivers/media/platform/mtk-vpu/mtk_vpu.c 	void __iomem *tcm;
tcm               443 drivers/media/platform/mtk-vpu/mtk_vpu.c 		return (__force void *)(dtcm_dmem_addr + vpu->reg.tcm +
tcm               512 drivers/media/platform/mtk-vpu/mtk_vpu.c 	dest = (__force void *)vpu->reg.tcm;
tcm               725 drivers/media/platform/mtk-vpu/mtk_vpu.c 	vpu->recv_buf = (__force struct share_obj *)(vpu->reg.tcm +
tcm               784 drivers/media/platform/mtk-vpu/mtk_vpu.c 	vpu->reg.tcm = devm_ioremap_resource(dev, res);
tcm               785 drivers/media/platform/mtk-vpu/mtk_vpu.c 	if (IS_ERR((__force void *)vpu->reg.tcm))
tcm               786 drivers/media/platform/mtk-vpu/mtk_vpu.c 		return PTR_ERR((__force void *)vpu->reg.tcm);
tcm               254 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c 	void __iomem *tcm;
tcm               365 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c 	void __iomem *address = devinfo->tcm + mem_offset;
tcm               374 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c 	void __iomem *address = devinfo->tcm + mem_offset;
tcm               384 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c 	void __iomem *address = devinfo->tcm + mem_offset;
tcm               412 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c 	void __iomem *address = devinfo->tcm + mem_offset;
tcm               422 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c 	void __iomem *address = devinfo->tcm + mem_offset;
tcm               431 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c 	void __iomem *addr = devinfo->tcm + devinfo->ci->rambase + mem_offset;
tcm               441 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c 	void __iomem *addr = devinfo->tcm + devinfo->ci->rambase + mem_offset;
tcm               451 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c 	void __iomem *address = devinfo->tcm + mem_offset;
tcm               492 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c 	void __iomem *address = devinfo->tcm + mem_offset;
tcm              1140 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c 	memcpy_fromio(&ringinfo, devinfo->tcm + devinfo->shared.ring_info_addr,
tcm              1210 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c 		memcpy_toio(devinfo->tcm + devinfo->shared.ring_info_addr,
tcm              1649 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c 	devinfo->tcm = ioremap_nocache(bar1_addr, bar1_size);
tcm              1651 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c 	if (!devinfo->regs || !devinfo->tcm) {
tcm              1653 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c 			  devinfo->tcm);
tcm              1659 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c 		  devinfo->tcm, (unsigned long long)bar1_addr,
tcm              1668 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c 	if (devinfo->tcm)
tcm              1669 drivers/net/wireless/broadcom/brcm80211/brcmfmac/pcie.c 		iounmap(devinfo->tcm);
tcm               413 drivers/net/wireless/intel/iwlwifi/mvm/coex.c 			data->primary_load = mvm->tcm.result.load[mvmvif->id];
tcm               415 drivers/net/wireless/intel/iwlwifi/mvm/coex.c 			data->secondary_load = mvm->tcm.result.load[mvmvif->id];
tcm               430 drivers/net/wireless/intel/iwlwifi/mvm/coex.c 		data->primary_load = mvm->tcm.result.load[mvmvif->id];
tcm               432 drivers/net/wireless/intel/iwlwifi/mvm/coex.c 		data->secondary_load = mvm->tcm.result.load[mvmvif->id];
tcm               267 drivers/net/wireless/intel/iwlwifi/mvm/debugfs-vif.c 			 mvm->tcm.result.load[mvmvif->id]);
tcm               981 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c 			struct iwl_mvm_tcm_mac *mdata = &mvm->tcm.data[macid];
tcm              2909 drivers/net/wireless/intel/iwlwifi/mvm/mac80211.c 		mdata = &mvm->tcm.data[iwl_mvm_vif_from_mac80211(vif)->id];
tcm              1051 drivers/net/wireless/intel/iwlwifi/mvm/mvm.h 	struct iwl_mvm_tcm tcm;
tcm               717 drivers/net/wireless/intel/iwlwifi/mvm/ops.c 	spin_lock_init(&mvm->tcm.lock);
tcm               718 drivers/net/wireless/intel/iwlwifi/mvm/ops.c 	INIT_DELAYED_WORK(&mvm->tcm.work, iwl_mvm_tcm_work);
tcm               719 drivers/net/wireless/intel/iwlwifi/mvm/ops.c 	mvm->tcm.ts = jiffies;
tcm               720 drivers/net/wireless/intel/iwlwifi/mvm/ops.c 	mvm->tcm.ll_ts = jiffies;
tcm               721 drivers/net/wireless/intel/iwlwifi/mvm/ops.c 	mvm->tcm.uapsd_nonagg_ts = jiffies;
tcm               899 drivers/net/wireless/intel/iwlwifi/mvm/ops.c 	cancel_delayed_work_sync(&mvm->tcm.work);
tcm               279 drivers/net/wireless/intel/iwlwifi/mvm/rx.c 	if (time_after(jiffies, mvm->tcm.ts + MVM_TCM_PERIOD))
tcm               280 drivers/net/wireless/intel/iwlwifi/mvm/rx.c 		schedule_delayed_work(&mvm->tcm.work, 0);
tcm               281 drivers/net/wireless/intel/iwlwifi/mvm/rx.c 	mdata = &mvm->tcm.data[mac];
tcm               473 drivers/net/wireless/intel/iwlwifi/mvm/rx.c 		if (!mvm->tcm.paused && len >= sizeof(*hdr) &&
tcm               822 drivers/net/wireless/intel/iwlwifi/mvm/rx.c 	spin_lock(&mvm->tcm.lock);
tcm               824 drivers/net/wireless/intel/iwlwifi/mvm/rx.c 		struct iwl_mvm_tcm_mac *mdata = &mvm->tcm.data[i];
tcm               838 drivers/net/wireless/intel/iwlwifi/mvm/rx.c 	spin_unlock(&mvm->tcm.lock);
tcm              1741 drivers/net/wireless/intel/iwlwifi/mvm/rxmq.c 		if (!mvm->tcm.paused && len >= sizeof(*hdr) &&
tcm              1744 drivers/net/wireless/intel/iwlwifi/mvm/rxmq.c 		    time_after(jiffies, mvm->tcm.ts + MVM_TCM_PERIOD))
tcm              1745 drivers/net/wireless/intel/iwlwifi/mvm/rxmq.c 			schedule_delayed_work(&mvm->tcm.work, 0);
tcm               236 drivers/net/wireless/intel/iwlwifi/mvm/scan.c 	return mvm->tcm.result.global_load;
tcm               242 drivers/net/wireless/intel/iwlwifi/mvm/scan.c 	return mvm->tcm.result.band_load[band];
tcm              1034 drivers/net/wireless/intel/iwlwifi/mvm/tx.c 	mdata = &mvm->tcm.data[mac];
tcm              1036 drivers/net/wireless/intel/iwlwifi/mvm/tx.c 	if (mvm->tcm.paused)
tcm              1039 drivers/net/wireless/intel/iwlwifi/mvm/tx.c 	if (time_after(jiffies, mvm->tcm.ts + MVM_TCM_PERIOD))
tcm              1040 drivers/net/wireless/intel/iwlwifi/mvm/tx.c 		schedule_delayed_work(&mvm->tcm.work, 0);
tcm              1055 drivers/net/wireless/intel/iwlwifi/mvm/tx.c 	mdata = &mvm->tcm.data[mac];
tcm              1074 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 	low_latency = mvm->tcm.result.low_latency[mvmvif->id];
tcm              1076 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 	if (!mvm->tcm.result.change[mvmvif->id] &&
tcm              1123 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 	if (mvm->tcm.data[mvmvif->id].opened_rx_ba_sessions)
tcm              1154 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 	if (mvm->tcm.data[mvmvif->id].uapsd_nonagg_detect.detected)
tcm              1157 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 	mvm->tcm.data[mvmvif->id].uapsd_nonagg_detect.detected = true;
tcm              1167 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 	u64 bytes = mvm->tcm.data[mac].uapsd_nonagg_detect.rx_bytes;
tcm              1172 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 	rate = ewma_rate_read(&mvm->tcm.data[mac].uapsd_nonagg_detect.rate);
tcm              1174 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 	if (!rate || mvm->tcm.data[mac].opened_rx_ba_sessions ||
tcm              1175 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 	    mvm->tcm.data[mac].uapsd_nonagg_detect.detected)
tcm              1222 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 	unsigned int elapsed = jiffies_to_msecs(ts - mvm->tcm.ts);
tcm              1224 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 		jiffies_to_msecs(ts - mvm->tcm.uapsd_nonagg_ts);
tcm              1231 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 	bool handle_ll = time_after(ts, mvm->tcm.ll_ts + MVM_LL_PERIOD);
tcm              1234 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 		mvm->tcm.ll_ts = ts;
tcm              1236 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 		mvm->tcm.uapsd_nonagg_ts = ts;
tcm              1238 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 	mvm->tcm.result.elapsed = elapsed;
tcm              1246 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 		struct iwl_mvm_tcm_mac *mdata = &mvm->tcm.data[mac];
tcm              1254 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 		mvm->tcm.result.change[mac] = load != mvm->tcm.result.load[mac];
tcm              1255 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 		mvm->tcm.result.load[mac] = load;
tcm              1256 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 		mvm->tcm.result.airtime[mac] = airtime;
tcm              1264 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 			mvm->tcm.result.low_latency[mac] = true;
tcm              1266 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 			mvm->tcm.result.low_latency[mac] = false;
tcm              1273 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 		low_latency |= mvm->tcm.result.low_latency[mac];
tcm              1275 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 		if (!mvm->tcm.result.low_latency[mac] && handle_uapsd)
tcm              1286 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 	mvm->tcm.result.global_change = load != mvm->tcm.result.global_load;
tcm              1287 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 	mvm->tcm.result.global_load = load;
tcm              1291 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 		mvm->tcm.result.band_load[i] = band_load;
tcm              1327 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 		time_after(ts, mvm->tcm.uapsd_nonagg_ts +
tcm              1330 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 	spin_lock(&mvm->tcm.lock);
tcm              1331 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 	if (mvm->tcm.paused || !time_after(ts, mvm->tcm.ts + MVM_TCM_PERIOD)) {
tcm              1332 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 		spin_unlock(&mvm->tcm.lock);
tcm              1335 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 	spin_unlock(&mvm->tcm.lock);
tcm              1344 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 	spin_lock(&mvm->tcm.lock);
tcm              1346 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 	if (!mvm->tcm.paused && time_after(ts, mvm->tcm.ts + MVM_TCM_PERIOD)) {
tcm              1353 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 		mvm->tcm.ts = ts;
tcm              1355 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 			schedule_delayed_work(&mvm->tcm.work, work_delay);
tcm              1357 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 	spin_unlock(&mvm->tcm.lock);
tcm              1366 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 					   tcm.work);
tcm              1373 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 	spin_lock_bh(&mvm->tcm.lock);
tcm              1374 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 	mvm->tcm.paused = true;
tcm              1375 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 	spin_unlock_bh(&mvm->tcm.lock);
tcm              1377 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 		cancel_delayed_work_sync(&mvm->tcm.work);
tcm              1385 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 	spin_lock_bh(&mvm->tcm.lock);
tcm              1386 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 	mvm->tcm.ts = jiffies;
tcm              1387 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 	mvm->tcm.ll_ts = jiffies;
tcm              1389 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 		struct iwl_mvm_tcm_mac *mdata = &mvm->tcm.data[mac];
tcm              1396 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 		if (mvm->tcm.result.low_latency[mac])
tcm              1401 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 	mvm->tcm.paused = false;
tcm              1407 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 	if (mvm->tcm.result.global_load > IWL_MVM_TRAFFIC_LOW)
tcm              1408 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 		schedule_delayed_work(&mvm->tcm.work, MVM_TCM_PERIOD);
tcm              1410 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 		schedule_delayed_work(&mvm->tcm.work, MVM_LL_PERIOD);
tcm              1412 drivers/net/wireless/intel/iwlwifi/mvm/utils.c 	spin_unlock_bh(&mvm->tcm.lock);
tcm                62 drivers/thunderbolt/tb.c 	struct tb_cm *tcm = tb_priv(tb);
tcm                95 drivers/thunderbolt/tb.c 		list_add_tail(&tunnel->list, &tcm->tunnel_list);
tcm               143 drivers/thunderbolt/tb.c 	struct tb_cm *tcm = tb_priv(port->sw->tb);
tcm               203 drivers/thunderbolt/tb.c 	if (!tcm->hotplug_active)
tcm               226 drivers/thunderbolt/tb.c 	struct tb_cm *tcm = tb_priv(tb);
tcm               229 drivers/thunderbolt/tb.c 	list_for_each_entry(tunnel, &tcm->tunnel_list, list) {
tcm               248 drivers/thunderbolt/tb.c 	struct tb_cm *tcm = tb_priv(tb);
tcm               252 drivers/thunderbolt/tb.c 	list_for_each_entry_safe(tunnel, n, &tcm->tunnel_list, list) {
tcm               363 drivers/thunderbolt/tb.c 	struct tb_cm *tcm = tb_priv(tb);
tcm               390 drivers/thunderbolt/tb.c 	list_add_tail(&tunnel->list, &tcm->tunnel_list);
tcm               402 drivers/thunderbolt/tb.c 	struct tb_cm *tcm = tb_priv(tb);
tcm               431 drivers/thunderbolt/tb.c 	list_add_tail(&tunnel->list, &tcm->tunnel_list);
tcm               437 drivers/thunderbolt/tb.c 	struct tb_cm *tcm = tb_priv(tb);
tcm               463 drivers/thunderbolt/tb.c 	list_add_tail(&tunnel->list, &tcm->tunnel_list);
tcm               505 drivers/thunderbolt/tb.c 	struct tb_cm *tcm = tb_priv(tb);
tcm               509 drivers/thunderbolt/tb.c 	if (!tcm->hotplug_active)
tcm               611 drivers/thunderbolt/tb.c 	struct tb_cm *tcm = tb_priv(tb);
tcm               616 drivers/thunderbolt/tb.c 	list_for_each_entry_safe(tunnel, n, &tcm->tunnel_list, list) {
tcm               627 drivers/thunderbolt/tb.c 	tcm->hotplug_active = false; /* signal tb_handle_hotplug to quit */
tcm               653 drivers/thunderbolt/tb.c 	struct tb_cm *tcm = tb_priv(tb);
tcm               689 drivers/thunderbolt/tb.c 	tcm->hotplug_active = true;
tcm               695 drivers/thunderbolt/tb.c 	struct tb_cm *tcm = tb_priv(tb);
tcm               699 drivers/thunderbolt/tb.c 	tcm->hotplug_active = false; /* signal tb_handle_hotplug to quit */
tcm               707 drivers/thunderbolt/tb.c 	struct tb_cm *tcm = tb_priv(tb);
tcm               718 drivers/thunderbolt/tb.c 	list_for_each_entry_safe(tunnel, n, &tcm->tunnel_list, list)
tcm               720 drivers/thunderbolt/tb.c 	if (!list_empty(&tcm->tunnel_list)) {
tcm               729 drivers/thunderbolt/tb.c 	tcm->hotplug_active = true;
tcm               783 drivers/thunderbolt/tb.c 	struct tb_cm *tcm;
tcm               789 drivers/thunderbolt/tb.c 	tb = tb_domain_alloc(nhi, sizeof(*tcm));
tcm               796 drivers/thunderbolt/tb.c 	tcm = tb_priv(tb);
tcm               797 drivers/thunderbolt/tb.c 	INIT_LIST_HEAD(&tcm->tunnel_list);
tcm              2269 drivers/usb/gadget/function/f_tcm.c 	struct f_uas *tcm = to_f_uas(f);
tcm              2271 drivers/usb/gadget/function/f_tcm.c 	kfree(tcm);
tcm              2312 drivers/usb/gadget/function/f_tcm.c DECLARE_USB_FUNCTION(tcm, tcm_alloc_inst, tcm_alloc);
tcm              1787 net/sched/cls_api.c 	struct tcmsg *tcm;
tcm              1791 net/sched/cls_api.c 	nlh = nlmsg_put(skb, portid, seq, event, sizeof(*tcm), flags);
tcm              1794 net/sched/cls_api.c 	tcm = nlmsg_data(nlh);
tcm              1795 net/sched/cls_api.c 	tcm->tcm_family = AF_UNSPEC;
tcm              1796 net/sched/cls_api.c 	tcm->tcm__pad1 = 0;
tcm              1797 net/sched/cls_api.c 	tcm->tcm__pad2 = 0;
tcm              1799 net/sched/cls_api.c 		tcm->tcm_ifindex = qdisc_dev(q)->ifindex;
tcm              1800 net/sched/cls_api.c 		tcm->tcm_parent = parent;
tcm              1802 net/sched/cls_api.c 		tcm->tcm_ifindex = TCM_IFINDEX_MAGIC_BLOCK;
tcm              1803 net/sched/cls_api.c 		tcm->tcm_block_index = block->index;
tcm              1805 net/sched/cls_api.c 	tcm->tcm_info = TC_H_MAKE(tp->prio, tp->protocol);
tcm              1811 net/sched/cls_api.c 		tcm->tcm_handle = 0;
tcm              1814 net/sched/cls_api.c 		    tp->ops->dump(net, tp, fh, skb, tcm, rtnl_held) < 0)
tcm              2454 net/sched/cls_api.c 	struct tcmsg *tcm = nlmsg_data(cb->nlh);
tcm              2466 net/sched/cls_api.c 		if (TC_H_MAJ(tcm->tcm_info) &&
tcm              2467 net/sched/cls_api.c 		    TC_H_MAJ(tcm->tcm_info) != tp->prio)
tcm              2469 net/sched/cls_api.c 		if (TC_H_MIN(tcm->tcm_info) &&
tcm              2470 net/sched/cls_api.c 		    TC_H_MIN(tcm->tcm_info) != tp->protocol)
tcm              2516 net/sched/cls_api.c 	struct tcmsg *tcm = nlmsg_data(cb->nlh);
tcm              2522 net/sched/cls_api.c 	if (nlmsg_len(cb->nlh) < sizeof(*tcm))
tcm              2525 net/sched/cls_api.c 	err = nlmsg_parse_deprecated(cb->nlh, sizeof(*tcm), tca, TCA_MAX,
tcm              2530 net/sched/cls_api.c 	if (tcm->tcm_ifindex == TCM_IFINDEX_MAGIC_BLOCK) {
tcm              2531 net/sched/cls_api.c 		block = tcf_block_refcnt_get(net, tcm->tcm_block_index);
tcm              2546 net/sched/cls_api.c 		dev = __dev_get_by_index(net, tcm->tcm_ifindex);
tcm              2550 net/sched/cls_api.c 		parent = tcm->tcm_parent;
tcm              2554 net/sched/cls_api.c 			q = qdisc_lookup(dev, TC_H_MAJ(tcm->tcm_parent));
tcm              2562 net/sched/cls_api.c 		if (TC_H_MIN(tcm->tcm_parent)) {
tcm              2563 net/sched/cls_api.c 			cl = cops->find(q, tcm->tcm_parent);
tcm              2594 net/sched/cls_api.c 	if (tcm->tcm_ifindex == TCM_IFINDEX_MAGIC_BLOCK)
tcm              2614 net/sched/cls_api.c 	struct tcmsg *tcm;
tcm              2620 net/sched/cls_api.c 	nlh = nlmsg_put(skb, portid, seq, event, sizeof(*tcm), flags);
tcm              2623 net/sched/cls_api.c 	tcm = nlmsg_data(nlh);
tcm              2624 net/sched/cls_api.c 	tcm->tcm_family = AF_UNSPEC;
tcm              2625 net/sched/cls_api.c 	tcm->tcm__pad1 = 0;
tcm              2626 net/sched/cls_api.c 	tcm->tcm__pad2 = 0;
tcm              2627 net/sched/cls_api.c 	tcm->tcm_handle = 0;
tcm              2629 net/sched/cls_api.c 		tcm->tcm_ifindex = qdisc_dev(block->q)->ifindex;
tcm              2630 net/sched/cls_api.c 		tcm->tcm_parent = block->q->handle;
tcm              2632 net/sched/cls_api.c 		tcm->tcm_ifindex = TCM_IFINDEX_MAGIC_BLOCK;
tcm              2633 net/sched/cls_api.c 		tcm->tcm_block_index = block->index;
tcm              2900 net/sched/cls_api.c 	struct tcmsg *tcm = nlmsg_data(cb->nlh);
tcm              2907 net/sched/cls_api.c 	if (nlmsg_len(cb->nlh) < sizeof(*tcm))
tcm              2910 net/sched/cls_api.c 	err = nlmsg_parse_deprecated(cb->nlh, sizeof(*tcm), tca, TCA_MAX,
tcm              2915 net/sched/cls_api.c 	if (tcm->tcm_ifindex == TCM_IFINDEX_MAGIC_BLOCK) {
tcm              2916 net/sched/cls_api.c 		block = tcf_block_refcnt_get(net, tcm->tcm_block_index);
tcm              2931 net/sched/cls_api.c 		dev = __dev_get_by_index(net, tcm->tcm_ifindex);
tcm              2935 net/sched/cls_api.c 		parent = tcm->tcm_parent;
tcm              2940 net/sched/cls_api.c 			q = qdisc_lookup(dev, TC_H_MAJ(tcm->tcm_parent));
tcm              2949 net/sched/cls_api.c 		if (TC_H_MIN(tcm->tcm_parent)) {
tcm              2950 net/sched/cls_api.c 			cl = cops->find(q, tcm->tcm_parent);
tcm              2986 net/sched/cls_api.c 	if (tcm->tcm_ifindex == TCM_IFINDEX_MAGIC_BLOCK)
tcm               872 net/sched/sch_api.c 	struct tcmsg *tcm;
tcm               881 net/sched/sch_api.c 	nlh = nlmsg_put(skb, portid, seq, event, sizeof(*tcm), flags);
tcm               884 net/sched/sch_api.c 	tcm = nlmsg_data(nlh);
tcm               885 net/sched/sch_api.c 	tcm->tcm_family = AF_UNSPEC;
tcm               886 net/sched/sch_api.c 	tcm->tcm__pad1 = 0;
tcm               887 net/sched/sch_api.c 	tcm->tcm__pad2 = 0;
tcm               888 net/sched/sch_api.c 	tcm->tcm_ifindex = qdisc_dev(q)->ifindex;
tcm               889 net/sched/sch_api.c 	tcm->tcm_parent = clid;
tcm               890 net/sched/sch_api.c 	tcm->tcm_handle = q->handle;
tcm               891 net/sched/sch_api.c 	tcm->tcm_info = refcount_read(&q->refcnt);
tcm              1411 net/sched/sch_api.c 	struct tcmsg *tcm = nlmsg_data(n);
tcm              1423 net/sched/sch_api.c 	err = nlmsg_parse_deprecated(n, sizeof(*tcm), tca, TCA_MAX,
tcm              1428 net/sched/sch_api.c 	dev = __dev_get_by_index(net, tcm->tcm_ifindex);
tcm              1432 net/sched/sch_api.c 	clid = tcm->tcm_parent;
tcm              1453 net/sched/sch_api.c 		if (tcm->tcm_handle && q->handle != tcm->tcm_handle) {
tcm              1458 net/sched/sch_api.c 		q = qdisc_lookup(dev, tcm->tcm_handle);
tcm              1496 net/sched/sch_api.c 	struct tcmsg *tcm;
tcm              1508 net/sched/sch_api.c 	err = nlmsg_parse_deprecated(n, sizeof(*tcm), tca, TCA_MAX,
tcm              1513 net/sched/sch_api.c 	tcm = nlmsg_data(n);
tcm              1514 net/sched/sch_api.c 	clid = tcm->tcm_parent;
tcm              1517 net/sched/sch_api.c 	dev = __dev_get_by_index(net, tcm->tcm_ifindex);
tcm              1542 net/sched/sch_api.c 		if (!q || !tcm->tcm_handle || q->handle != tcm->tcm_handle) {
tcm              1543 net/sched/sch_api.c 			if (tcm->tcm_handle) {
tcm              1548 net/sched/sch_api.c 				if (TC_H_MIN(tcm->tcm_handle)) {
tcm              1552 net/sched/sch_api.c 				q = qdisc_lookup(dev, tcm->tcm_handle);
tcm              1603 net/sched/sch_api.c 		if (!tcm->tcm_handle) {
tcm              1607 net/sched/sch_api.c 		q = qdisc_lookup(dev, tcm->tcm_handle);
tcm              1636 net/sched/sch_api.c 					 tcm->tcm_parent, tcm->tcm_parent,
tcm              1646 net/sched/sch_api.c 			dev_queue = p->ops->cl_ops->select_queue(p, tcm);
tcm              1653 net/sched/sch_api.c 				 tcm->tcm_parent, tcm->tcm_handle,
tcm              1789 net/sched/sch_api.c 	struct tcmsg *tcm;
tcm              1796 net/sched/sch_api.c 	nlh = nlmsg_put(skb, portid, seq, event, sizeof(*tcm), flags);
tcm              1799 net/sched/sch_api.c 	tcm = nlmsg_data(nlh);
tcm              1800 net/sched/sch_api.c 	tcm->tcm_family = AF_UNSPEC;
tcm              1801 net/sched/sch_api.c 	tcm->tcm__pad1 = 0;
tcm              1802 net/sched/sch_api.c 	tcm->tcm__pad2 = 0;
tcm              1803 net/sched/sch_api.c 	tcm->tcm_ifindex = qdisc_dev(q)->ifindex;
tcm              1804 net/sched/sch_api.c 	tcm->tcm_parent = q->handle;
tcm              1805 net/sched/sch_api.c 	tcm->tcm_handle = q->handle;
tcm              1806 net/sched/sch_api.c 	tcm->tcm_info = 0;
tcm              1809 net/sched/sch_api.c 	if (cl_ops->dump && cl_ops->dump(q, cl, skb, tcm) < 0)
tcm              1979 net/sched/sch_api.c 	struct tcmsg *tcm = nlmsg_data(n);
tcm              1995 net/sched/sch_api.c 	err = nlmsg_parse_deprecated(n, sizeof(*tcm), tca, TCA_MAX,
tcm              2000 net/sched/sch_api.c 	dev = __dev_get_by_index(net, tcm->tcm_ifindex);
tcm              2019 net/sched/sch_api.c 	portid = tcm->tcm_parent;
tcm              2020 net/sched/sch_api.c 	clid = tcm->tcm_handle;
tcm              2129 net/sched/sch_api.c 				struct tcmsg *tcm, struct netlink_callback *cb,
tcm              2136 net/sched/sch_api.c 	    (tcm->tcm_parent &&
tcm              2137 net/sched/sch_api.c 	     TC_H_MAJ(tcm->tcm_parent) != q->handle)) {
tcm              2158 net/sched/sch_api.c 			       struct tcmsg *tcm, struct netlink_callback *cb,
tcm              2167 net/sched/sch_api.c 	if (tc_dump_tclass_qdisc(root, skb, tcm, cb, t_p, s_t) < 0)
tcm              2173 net/sched/sch_api.c 	if (tcm->tcm_parent) {
tcm              2174 net/sched/sch_api.c 		q = qdisc_match_from_root(root, TC_H_MAJ(tcm->tcm_parent));
tcm              2176 net/sched/sch_api.c 		    tc_dump_tclass_qdisc(q, skb, tcm, cb, t_p, s_t) < 0)
tcm              2181 net/sched/sch_api.c 		if (tc_dump_tclass_qdisc(q, skb, tcm, cb, t_p, s_t) < 0)
tcm              2190 net/sched/sch_api.c 	struct tcmsg *tcm = nlmsg_data(cb->nlh);
tcm              2196 net/sched/sch_api.c 	if (nlmsg_len(cb->nlh) < sizeof(*tcm))
tcm              2198 net/sched/sch_api.c 	dev = dev_get_by_index(net, tcm->tcm_ifindex);
tcm              2205 net/sched/sch_api.c 	if (tc_dump_tclass_root(dev->qdisc, skb, tcm, cb, &t, s_t) < 0)
tcm              2210 net/sched/sch_api.c 	    tc_dump_tclass_root(dev_queue->qdisc_sleeping, skb, tcm, cb,
tcm               601 net/sched/sch_atm.c 			     struct sk_buff *skb, struct tcmsg *tcm)
tcm               608 net/sched/sch_atm.c 		sch, p, flow, skb, tcm);
tcm               611 net/sched/sch_atm.c 	tcm->tcm_handle = flow->common.classid;
tcm               612 net/sched/sch_atm.c 	tcm->tcm_info = flow->q->handle;
tcm              2939 net/sched/sch_cake.c 			   struct sk_buff *skb, struct tcmsg *tcm)
tcm              2941 net/sched/sch_cake.c 	tcm->tcm_handle |= TC_H_MIN(cl);
tcm              1348 net/sched/sch_cbq.c 	       struct sk_buff *skb, struct tcmsg *tcm)
tcm              1354 net/sched/sch_cbq.c 		tcm->tcm_parent = cl->tparent->common.classid;
tcm              1356 net/sched/sch_cbq.c 		tcm->tcm_parent = TC_H_ROOT;
tcm              1357 net/sched/sch_cbq.c 	tcm->tcm_handle = cl->common.classid;
tcm              1358 net/sched/sch_cbq.c 	tcm->tcm_info = cl->q->handle;
tcm               478 net/sched/sch_cbs.c 			  struct sk_buff *skb, struct tcmsg *tcm)
tcm               485 net/sched/sch_cbs.c 	tcm->tcm_handle |= TC_H_MIN(1);
tcm               486 net/sched/sch_cbs.c 	tcm->tcm_info = q->qdisc->handle;
tcm               236 net/sched/sch_drr.c 			  struct sk_buff *skb, struct tcmsg *tcm)
tcm               241 net/sched/sch_drr.c 	tcm->tcm_parent	= TC_H_ROOT;
tcm               242 net/sched/sch_drr.c 	tcm->tcm_handle	= cl->common.classid;
tcm               243 net/sched/sch_drr.c 	tcm->tcm_info	= cl->qdisc->handle;
tcm               427 net/sched/sch_dsmark.c 			     struct sk_buff *skb, struct tcmsg *tcm)
tcm               437 net/sched/sch_dsmark.c 	tcm->tcm_handle = TC_H_MAKE(TC_H_MAJ(sch->handle), cl - 1);
tcm               438 net/sched/sch_dsmark.c 	tcm->tcm_info = p->q->handle;
tcm               609 net/sched/sch_fq_codel.c 			  struct sk_buff *skb, struct tcmsg *tcm)
tcm               611 net/sched/sch_fq_codel.c 	tcm->tcm_handle |= TC_H_MIN(cl);
tcm              1293 net/sched/sch_hfsc.c 		struct tcmsg *tcm)
tcm              1298 net/sched/sch_hfsc.c 	tcm->tcm_parent = cl->cl_parent ? cl->cl_parent->cl_common.classid :
tcm              1300 net/sched/sch_hfsc.c 	tcm->tcm_handle = cl->cl_common.classid;
tcm              1302 net/sched/sch_hfsc.c 		tcm->tcm_info = cl->qdisc->handle;
tcm              1075 net/sched/sch_htb.c 			  struct sk_buff *skb, struct tcmsg *tcm)
tcm              1084 net/sched/sch_htb.c 	tcm->tcm_parent = cl->parent ? cl->parent->common.classid : TC_H_ROOT;
tcm              1085 net/sched/sch_htb.c 	tcm->tcm_handle = cl->common.classid;
tcm              1087 net/sched/sch_htb.c 		tcm->tcm_info = cl->leaf.q->handle;
tcm               185 net/sched/sch_mq.c 					    struct tcmsg *tcm)
tcm               187 net/sched/sch_mq.c 	return mq_queue_get(sch, TC_H_MIN(tcm->tcm_parent));
tcm               233 net/sched/sch_mq.c 			 struct sk_buff *skb, struct tcmsg *tcm)
tcm               237 net/sched/sch_mq.c 	tcm->tcm_parent = TC_H_ROOT;
tcm               238 net/sched/sch_mq.c 	tcm->tcm_handle |= TC_H_MIN(cl);
tcm               239 net/sched/sch_mq.c 	tcm->tcm_info = dev_queue->qdisc_sleeping->handle;
tcm               489 net/sched/sch_mqprio.c 			 struct sk_buff *skb, struct tcmsg *tcm)
tcm               496 net/sched/sch_mqprio.c 		tcm->tcm_parent = (tc < 0) ? 0 :
tcm               499 net/sched/sch_mqprio.c 		tcm->tcm_info = dev_queue->qdisc_sleeping->handle;
tcm               501 net/sched/sch_mqprio.c 		tcm->tcm_parent = TC_H_ROOT;
tcm               502 net/sched/sch_mqprio.c 		tcm->tcm_info = 0;
tcm               504 net/sched/sch_mqprio.c 	tcm->tcm_handle |= TC_H_MIN(cl);
tcm               604 net/sched/sch_mqprio.c 						struct tcmsg *tcm)
tcm               606 net/sched/sch_mqprio.c 	return mqprio_queue_get(sch, TC_H_MIN(tcm->tcm_parent));
tcm               325 net/sched/sch_multiq.c 			     struct sk_buff *skb, struct tcmsg *tcm)
tcm               329 net/sched/sch_multiq.c 	tcm->tcm_handle |= TC_H_MIN(cl);
tcm               330 net/sched/sch_multiq.c 	tcm->tcm_info = q->queues[cl - 1]->handle;
tcm              1214 net/sched/sch_netem.c 			  struct sk_buff *skb, struct tcmsg *tcm)
tcm              1221 net/sched/sch_netem.c 	tcm->tcm_handle |= TC_H_MIN(1);
tcm              1222 net/sched/sch_netem.c 	tcm->tcm_info = q->qdisc->handle;
tcm               348 net/sched/sch_prio.c 			   struct tcmsg *tcm)
tcm               352 net/sched/sch_prio.c 	tcm->tcm_handle |= TC_H_MIN(cl);
tcm               353 net/sched/sch_prio.c 	tcm->tcm_info = q->queues[cl-1]->handle;
tcm               610 net/sched/sch_qfq.c 			  struct sk_buff *skb, struct tcmsg *tcm)
tcm               615 net/sched/sch_qfq.c 	tcm->tcm_parent	= TC_H_ROOT;
tcm               616 net/sched/sch_qfq.c 	tcm->tcm_handle	= cl->common.classid;
tcm               617 net/sched/sch_qfq.c 	tcm->tcm_info	= cl->qdisc->handle;
tcm               358 net/sched/sch_red.c 			  struct sk_buff *skb, struct tcmsg *tcm)
tcm               362 net/sched/sch_red.c 	tcm->tcm_handle |= TC_H_MIN(1);
tcm               363 net/sched/sch_red.c 	tcm->tcm_info = q->qdisc->handle;
tcm               612 net/sched/sch_sfb.c 			  struct sk_buff *skb, struct tcmsg *tcm)
tcm               856 net/sched/sch_sfq.c 			  struct sk_buff *skb, struct tcmsg *tcm)
tcm               858 net/sched/sch_sfq.c 	tcm->tcm_handle |= TC_H_MIN(cl);
tcm               247 net/sched/sch_skbprio.c 			     struct sk_buff *skb, struct tcmsg *tcm)
tcm               249 net/sched/sch_skbprio.c 	tcm->tcm_handle |= TC_H_MIN(cl);
tcm              1867 net/sched/sch_taprio.c 			     struct sk_buff *skb, struct tcmsg *tcm)
tcm              1871 net/sched/sch_taprio.c 	tcm->tcm_parent = TC_H_ROOT;
tcm              1872 net/sched/sch_taprio.c 	tcm->tcm_handle |= TC_H_MIN(cl);
tcm              1873 net/sched/sch_taprio.c 	tcm->tcm_info = dev_queue->qdisc_sleeping->handle;
tcm              1911 net/sched/sch_taprio.c 						struct tcmsg *tcm)
tcm              1913 net/sched/sch_taprio.c 	return taprio_queue_get(sch, TC_H_MIN(tcm->tcm_parent));
tcm               479 net/sched/sch_tbf.c 			  struct sk_buff *skb, struct tcmsg *tcm)
tcm               483 net/sched/sch_tbf.c 	tcm->tcm_handle |= TC_H_MIN(1);
tcm               484 net/sched/sch_tbf.c 	tcm->tcm_info = q->qdisc->handle;