tl                421 arch/ia64/include/asm/pal.h 			tl		: 1,	/* 1 => MC occurred
tl                498 arch/ia64/include/asm/pal.h 			tl		: 1,	/* Failure in tag part
tl                702 arch/ia64/include/asm/pal.h #define pmci_proc_trap_lost			pme_processor.tl
tl                720 arch/ia64/include/asm/pal.h #define pmci_cache_line_tag_fail		pme_cache.tl
tl                 31 arch/s390/kvm/gaccess.c 		unsigned long tl : 2; /* Region- or Segment-Table Length */
tl                 53 arch/s390/kvm/gaccess.c 		unsigned long tl : 2; /* Region-Second-Table Length */
tl                 68 arch/s390/kvm/gaccess.c 		unsigned long tl : 2; /* Region-Third-Table Length */
tl                 82 arch/s390/kvm/gaccess.c 	unsigned long tl : 2; /* Segment-Table Length */
tl                636 arch/s390/kvm/gaccess.c 		if (vaddr.rfx01 > asce.tl)
tl                643 arch/s390/kvm/gaccess.c 		if (vaddr.rsx01 > asce.tl)
tl                650 arch/s390/kvm/gaccess.c 		if (vaddr.rtx01 > asce.tl)
tl                657 arch/s390/kvm/gaccess.c 		if (vaddr.sx01 > asce.tl)
tl                674 arch/s390/kvm/gaccess.c 		if (vaddr.rsx01 < rfte.tf || vaddr.rsx01 > rfte.tl)
tl                692 arch/s390/kvm/gaccess.c 		if (vaddr.rtx01 < rste.tf || vaddr.rtx01 > rste.tl)
tl                720 arch/s390/kvm/gaccess.c 		if (vaddr.sx01 > rtte.fc0.tl)
tl               1005 arch/s390/kvm/gaccess.c 		if (vaddr.rfx01 > asce.tl && !*fake)
tl               1011 arch/s390/kvm/gaccess.c 		if (vaddr.rsx01 > asce.tl)
tl               1017 arch/s390/kvm/gaccess.c 		if (vaddr.rtx01 > asce.tl)
tl               1023 arch/s390/kvm/gaccess.c 		if (vaddr.sx01 > asce.tl)
tl               1044 arch/s390/kvm/gaccess.c 		if (vaddr.rsx01 < rfte.tf || vaddr.rsx01 > rfte.tl)
tl               1069 arch/s390/kvm/gaccess.c 		if (vaddr.rtx01 < rste.tf || vaddr.rtx01 > rste.tl)
tl               1104 arch/s390/kvm/gaccess.c 		if (vaddr.sx01 < rtte.fc0.tf || vaddr.sx01 > rtte.fc0.tl)
tl               1374 arch/sparc/include/asm/hypervisor.h 	unsigned char	tl;		/* Trap level			*/
tl                172 arch/sparc/kernel/entry.h void sun4v_itlb_error_report(struct pt_regs *regs, int tl);
tl                179 arch/sparc/kernel/entry.h void sun4v_dtlb_error_report(struct pt_regs *regs, int tl);
tl                 66 arch/sparc/kernel/traps_64.c 	unsigned long tl;
tl                 74 arch/sparc/kernel/traps_64.c 	       "dumping track stack.\n", p->tl);
tl               2265 arch/sparc/kernel/traps_64.c void sun4v_itlb_error_report(struct pt_regs *regs, int tl)
tl               2270 arch/sparc/kernel/traps_64.c 	       regs->tpc, tl);
tl               2288 arch/sparc/kernel/traps_64.c void sun4v_dtlb_error_report(struct pt_regs *regs, int tl)
tl               2293 arch/sparc/kernel/traps_64.c 	       regs->tpc, tl);
tl                281 arch/um/drivers/net_kern.c 	struct uml_net_private *lp = from_timer(lp, t, tl);
tl                459 arch/um/drivers/net_kern.c 	timer_setup(&lp->tl, uml_net_user_timer_expire, 0);
tl               1075 arch/um/drivers/vector_kern.c 		mod_timer(&vp->tl, vp->coalesce);
tl               1126 arch/um/drivers/vector_kern.c 	del_timer(&vp->tl);
tl               1287 arch/um/drivers/vector_kern.c 		add_timer(&vp->tl);
tl               1448 arch/um/drivers/vector_kern.c 	struct vector_private *vp = from_timer(vp, t, tl);
tl               1538 arch/um/drivers/vector_kern.c 	timer_setup(&vp->tl, vector_timer_expire, 0);
tl                 77 arch/um/drivers/vector_kern.h 	struct timer_list tl;
tl                 27 arch/um/include/shared/net_kern.h 	struct timer_list tl;
tl                801 arch/x86/crypto/camellia_glue.c 	u32 dw, tl, tr;
tl                912 arch/x86/crypto/camellia_glue.c 	tl = (subRL[10] >> 32) ^ (subRL[10] & ~subRL[8]);
tl                913 arch/x86/crypto/camellia_glue.c 	dw = tl & (subRL[8] >> 32);				/* FL(kl1) */
tl                915 arch/x86/crypto/camellia_glue.c 	tt = (tr | ((u64)tl << 32));
tl                921 arch/x86/crypto/camellia_glue.c 	tl = (subRL[7] >> 32) ^ (subRL[7] & ~subRL[9]);
tl                922 arch/x86/crypto/camellia_glue.c 	dw = tl & (subRL[9] >> 32);				/* FLinv(kl2) */
tl                924 arch/x86/crypto/camellia_glue.c 	tt = (tr | ((u64)tl << 32));
tl                932 arch/x86/crypto/camellia_glue.c 	tl = (subRL[18] >> 32) ^ (subRL[18] & ~subRL[16]);
tl                933 arch/x86/crypto/camellia_glue.c 	dw = tl & (subRL[16] >> 32);				/* FL(kl3) */
tl                935 arch/x86/crypto/camellia_glue.c 	tt = (tr | ((u64)tl << 32));
tl                941 arch/x86/crypto/camellia_glue.c 	tl = (subRL[15] >> 32) ^ (subRL[15] & ~subRL[17]);
tl                942 arch/x86/crypto/camellia_glue.c 	dw = tl & (subRL[17] >> 32);				/* FLinv(kl4) */
tl                944 arch/x86/crypto/camellia_glue.c 	tt = (tr | ((u64)tl << 32));
tl                956 arch/x86/crypto/camellia_glue.c 		tl = (subRL[26] >> 32) ^ (subRL[26] & ~subRL[24]);
tl                957 arch/x86/crypto/camellia_glue.c 		dw = tl & (subRL[24] >> 32);			/* FL(kl5) */
tl                959 arch/x86/crypto/camellia_glue.c 		tt = (tr | ((u64)tl << 32));
tl                965 arch/x86/crypto/camellia_glue.c 		tl = (subRL[23] >> 32) ^ (subRL[23] & ~subRL[25]);
tl                966 arch/x86/crypto/camellia_glue.c 		dw = tl & (subRL[25] >> 32);			/* FLinv(kl6) */
tl                968 arch/x86/crypto/camellia_glue.c 		tt = (tr | ((u64)tl << 32));
tl                 71 arch/xtensa/platforms/iss/network.c 	struct timer_list tl;
tl                580 arch/xtensa/platforms/iss/network.c 	timer_setup(&lp->tl, iss_net_user_timer_expire, 0);
tl                367 crypto/camellia_generic.c 	u32 dw, tl, tr;
tl                466 crypto/camellia_generic.c 	tl = subL[10] ^ (subR[10] & ~subR[8]);
tl                467 crypto/camellia_generic.c 	dw = tl & subL[8];  /* FL(kl1) */
tl                469 crypto/camellia_generic.c 	SUBKEY_L(7) = subL[6] ^ tl; /* round 6 */
tl                475 crypto/camellia_generic.c 	tl = subL[7] ^ (subR[7] & ~subR[9]);
tl                476 crypto/camellia_generic.c 	dw = tl & subL[9];  /* FLinv(kl2) */
tl                478 crypto/camellia_generic.c 	SUBKEY_L(10) = tl ^ subL[11]; /* round 7 */
tl                488 crypto/camellia_generic.c 	tl = subL[18] ^ (subR[18] & ~subR[16]);
tl                489 crypto/camellia_generic.c 	dw = tl & subL[16]; /* FL(kl3) */
tl                491 crypto/camellia_generic.c 	SUBKEY_L(15) = subL[14] ^ tl; /* round 12 */
tl                497 crypto/camellia_generic.c 	tl = subL[15] ^ (subR[15] & ~subR[17]);
tl                498 crypto/camellia_generic.c 	dw = tl & subL[17]; /* FLinv(kl4) */
tl                500 crypto/camellia_generic.c 	SUBKEY_L(18) = tl ^ subL[19]; /* round 13 */
tl                516 crypto/camellia_generic.c 		tl = subL[26] ^ (subR[26] & ~subR[24]);
tl                517 crypto/camellia_generic.c 		dw = tl & subL[24]; /* FL(kl5) */
tl                519 crypto/camellia_generic.c 		SUBKEY_L(23) = subL[22] ^ tl; /* round 18 */
tl                525 crypto/camellia_generic.c 		tl = subL[23] ^ (subR[23] & ~subR[25]);
tl                526 crypto/camellia_generic.c 		dw = tl & subL[25]; /* FLinv(kl6) */
tl                528 crypto/camellia_generic.c 		SUBKEY_L(26) = tl ^ subL[27]; /* round 19 */
tl                150 crypto/vmac.c  		int i; u64 th, tl;					\
tl                153 crypto/vmac.c  			MUL64(th, tl, pe64_to_cpup((mp)+i)+(kp)[i],	\
tl                155 crypto/vmac.c  			ADD128(rh, rl, th, tl);				\
tl                161 crypto/vmac.c  		int i; u64 th, tl;					\
tl                164 crypto/vmac.c  			MUL64(th, tl, pe64_to_cpup((mp)+i)+(kp)[i],	\
tl                166 crypto/vmac.c  			ADD128(rh, rl, th, tl);				\
tl                167 crypto/vmac.c  			MUL64(th, tl, pe64_to_cpup((mp)+i)+(kp)[i+2],	\
tl                169 crypto/vmac.c  			ADD128(rh1, rl1, th, tl);			\
tl                176 crypto/vmac.c  		int i; u64 th, tl;					\
tl                179 crypto/vmac.c  			MUL64(th, tl, pe64_to_cpup((mp)+i)+(kp)[i],	\
tl                181 crypto/vmac.c  			ADD128(rh, rl, th, tl);				\
tl                182 crypto/vmac.c  			MUL64(th, tl, pe64_to_cpup((mp)+i+2)+(kp)[i+2],	\
tl                184 crypto/vmac.c  			ADD128(rh, rl, th, tl);				\
tl                185 crypto/vmac.c  			MUL64(th, tl, pe64_to_cpup((mp)+i+4)+(kp)[i+4],	\
tl                187 crypto/vmac.c  			ADD128(rh, rl, th, tl);				\
tl                188 crypto/vmac.c  			MUL64(th, tl, pe64_to_cpup((mp)+i+6)+(kp)[i+6],	\
tl                190 crypto/vmac.c  			ADD128(rh, rl, th, tl);				\
tl                196 crypto/vmac.c  		int i; u64 th, tl;					\
tl                199 crypto/vmac.c  			MUL64(th, tl, pe64_to_cpup((mp)+i)+(kp)[i],	\
tl                201 crypto/vmac.c  			ADD128(rh, rl, th, tl);				\
tl                202 crypto/vmac.c  			MUL64(th, tl, pe64_to_cpup((mp)+i)+(kp)[i+2],	\
tl                204 crypto/vmac.c  			ADD128(rh1, rl1, th, tl);			\
tl                205 crypto/vmac.c  			MUL64(th, tl, pe64_to_cpup((mp)+i+2)+(kp)[i+2],	\
tl                207 crypto/vmac.c  			ADD128(rh, rl, th, tl);				\
tl                208 crypto/vmac.c  			MUL64(th, tl, pe64_to_cpup((mp)+i+2)+(kp)[i+4],	\
tl                210 crypto/vmac.c  			ADD128(rh1, rl1, th, tl);			\
tl                211 crypto/vmac.c  			MUL64(th, tl, pe64_to_cpup((mp)+i+4)+(kp)[i+4],	\
tl                213 crypto/vmac.c  			ADD128(rh, rl, th, tl);				\
tl                214 crypto/vmac.c  			MUL64(th, tl, pe64_to_cpup((mp)+i+4)+(kp)[i+6],	\
tl                216 crypto/vmac.c  			ADD128(rh1, rl1, th, tl);			\
tl                217 crypto/vmac.c  			MUL64(th, tl, pe64_to_cpup((mp)+i+6)+(kp)[i+6],	\
tl                219 crypto/vmac.c  			ADD128(rh, rl, th, tl);				\
tl                220 crypto/vmac.c  			MUL64(th, tl, pe64_to_cpup((mp)+i+6)+(kp)[i+8],	\
tl                222 crypto/vmac.c  			ADD128(rh1, rl1, th, tl);			\
tl               3358 drivers/gpu/drm/amd/amdgpu/si_dpm.c static int r600_calculate_at(u32 t, u32 h, u32 fh, u32 fl, u32 *tl, u32 *th)
tl               3374 drivers/gpu/drm/amd/amdgpu/si_dpm.c 	*tl = t + al;
tl               2143 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	struct intel_timeline *tl = ce->timeline;
tl               2160 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	list_for_each_entry(rq, &tl->requests, link) {
tl               2168 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	if (&rq->link == &tl->requests)
tl               2205 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	struct intel_timeline *tl;
tl               2234 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	tl = intel_context_timeline_lock(ce);
tl               2235 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	if (IS_ERR(tl)) {
tl               2236 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 		err = PTR_ERR(tl);
tl               2243 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	intel_context_timeline_unlock(tl);
tl               2262 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	mutex_lock(&tl->mutex);
tl               2264 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	intel_context_timeline_unlock(tl);
tl               2273 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	struct intel_timeline *tl = ce->timeline;
tl               2275 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	mutex_lock(&tl->mutex);
tl               2277 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	mutex_unlock(&tl->mutex);
tl                295 drivers/gpu/drm/i915/gt/intel_context.c 	struct intel_timeline *tl = ce->timeline;
tl                301 drivers/gpu/drm/i915/gt/intel_context.c 	if (rq->timeline != tl) { /* beware timeline sharing */
tl                302 drivers/gpu/drm/i915/gt/intel_context.c 		err = mutex_lock_interruptible_nested(&tl->mutex,
tl                308 drivers/gpu/drm/i915/gt/intel_context.c 		err = i915_active_request_set(&tl->last_request, rq);
tl                309 drivers/gpu/drm/i915/gt/intel_context.c 		mutex_unlock(&tl->mutex);
tl                129 drivers/gpu/drm/i915/gt/intel_context.h 	struct intel_timeline *tl = ce->timeline;
tl                132 drivers/gpu/drm/i915/gt/intel_context.h 	err = mutex_lock_interruptible(&tl->mutex);
tl                136 drivers/gpu/drm/i915/gt/intel_context.h 	return tl;
tl                139 drivers/gpu/drm/i915/gt/intel_context.h static inline void intel_context_timeline_unlock(struct intel_timeline *tl)
tl                140 drivers/gpu/drm/i915/gt/intel_context.h 	__releases(&tl->mutex)
tl                142 drivers/gpu/drm/i915/gt/intel_context.h 	mutex_unlock(&tl->mutex);
tl               3378 drivers/gpu/drm/i915/gt/intel_lrc.c 		struct intel_timeline *tl;
tl               3380 drivers/gpu/drm/i915/gt/intel_lrc.c 		tl = intel_timeline_create(engine->gt, NULL);
tl               3381 drivers/gpu/drm/i915/gt/intel_lrc.c 		if (IS_ERR(tl)) {
tl               3382 drivers/gpu/drm/i915/gt/intel_lrc.c 			ret = PTR_ERR(tl);
tl               3386 drivers/gpu/drm/i915/gt/intel_lrc.c 		ce->timeline = tl;
tl                792 drivers/gpu/drm/i915/gt/intel_reset.c 	struct intel_timeline *tl;
tl                814 drivers/gpu/drm/i915/gt/intel_reset.c 	list_for_each_entry(tl, &timelines->active_list, link) {
tl                817 drivers/gpu/drm/i915/gt/intel_reset.c 		rq = i915_active_request_get_unlocked(&tl->last_request);
tl                835 drivers/gpu/drm/i915/gt/intel_reset.c 		tl = list_entry(&timelines->active_list, typeof(*tl), link);
tl               1870 drivers/gpu/drm/i915/gt/intel_ringbuffer.c 	       struct intel_timeline *tl,
tl               1879 drivers/gpu/drm/i915/gt/intel_ringbuffer.c 	GEM_BUG_ON(list_empty(&tl->requests));
tl               1880 drivers/gpu/drm/i915/gt/intel_ringbuffer.c 	list_for_each_entry(target, &tl->requests, link) {
tl               1890 drivers/gpu/drm/i915/gt/intel_ringbuffer.c 	if (GEM_WARN_ON(&target->link == &tl->requests))
tl                313 drivers/gpu/drm/i915/gt/intel_timeline.c int intel_timeline_pin(struct intel_timeline *tl)
tl                317 drivers/gpu/drm/i915/gt/intel_timeline.c 	if (atomic_add_unless(&tl->pin_count, 1, 0))
tl                320 drivers/gpu/drm/i915/gt/intel_timeline.c 	err = i915_vma_pin(tl->hwsp_ggtt, 0, 0, PIN_GLOBAL | PIN_HIGH);
tl                324 drivers/gpu/drm/i915/gt/intel_timeline.c 	tl->hwsp_offset =
tl                325 drivers/gpu/drm/i915/gt/intel_timeline.c 		i915_ggtt_offset(tl->hwsp_ggtt) +
tl                326 drivers/gpu/drm/i915/gt/intel_timeline.c 		offset_in_page(tl->hwsp_offset);
tl                328 drivers/gpu/drm/i915/gt/intel_timeline.c 	cacheline_acquire(tl->hwsp_cacheline);
tl                329 drivers/gpu/drm/i915/gt/intel_timeline.c 	if (atomic_fetch_inc(&tl->pin_count)) {
tl                330 drivers/gpu/drm/i915/gt/intel_timeline.c 		cacheline_release(tl->hwsp_cacheline);
tl                331 drivers/gpu/drm/i915/gt/intel_timeline.c 		__i915_vma_unpin(tl->hwsp_ggtt);
tl                337 drivers/gpu/drm/i915/gt/intel_timeline.c void intel_timeline_enter(struct intel_timeline *tl)
tl                339 drivers/gpu/drm/i915/gt/intel_timeline.c 	struct intel_gt_timelines *timelines = &tl->gt->timelines;
tl                342 drivers/gpu/drm/i915/gt/intel_timeline.c 	lockdep_assert_held(&tl->mutex);
tl                344 drivers/gpu/drm/i915/gt/intel_timeline.c 	GEM_BUG_ON(!atomic_read(&tl->pin_count));
tl                345 drivers/gpu/drm/i915/gt/intel_timeline.c 	if (tl->active_count++)
tl                347 drivers/gpu/drm/i915/gt/intel_timeline.c 	GEM_BUG_ON(!tl->active_count); /* overflow? */
tl                350 drivers/gpu/drm/i915/gt/intel_timeline.c 	list_add(&tl->link, &timelines->active_list);
tl                354 drivers/gpu/drm/i915/gt/intel_timeline.c void intel_timeline_exit(struct intel_timeline *tl)
tl                356 drivers/gpu/drm/i915/gt/intel_timeline.c 	struct intel_gt_timelines *timelines = &tl->gt->timelines;
tl                359 drivers/gpu/drm/i915/gt/intel_timeline.c 	lockdep_assert_held(&tl->mutex);
tl                361 drivers/gpu/drm/i915/gt/intel_timeline.c 	GEM_BUG_ON(!tl->active_count);
tl                362 drivers/gpu/drm/i915/gt/intel_timeline.c 	if (--tl->active_count)
tl                366 drivers/gpu/drm/i915/gt/intel_timeline.c 	list_del(&tl->link);
tl                374 drivers/gpu/drm/i915/gt/intel_timeline.c 	i915_syncmap_free(&tl->sync);
tl                377 drivers/gpu/drm/i915/gt/intel_timeline.c static u32 timeline_advance(struct intel_timeline *tl)
tl                379 drivers/gpu/drm/i915/gt/intel_timeline.c 	GEM_BUG_ON(!atomic_read(&tl->pin_count));
tl                380 drivers/gpu/drm/i915/gt/intel_timeline.c 	GEM_BUG_ON(tl->seqno & tl->has_initial_breadcrumb);
tl                382 drivers/gpu/drm/i915/gt/intel_timeline.c 	return tl->seqno += 1 + tl->has_initial_breadcrumb;
tl                385 drivers/gpu/drm/i915/gt/intel_timeline.c static void timeline_rollback(struct intel_timeline *tl)
tl                387 drivers/gpu/drm/i915/gt/intel_timeline.c 	tl->seqno -= 1 + tl->has_initial_breadcrumb;
tl                391 drivers/gpu/drm/i915/gt/intel_timeline.c __intel_timeline_get_seqno(struct intel_timeline *tl,
tl                420 drivers/gpu/drm/i915/gt/intel_timeline.c 	vma = hwsp_alloc(tl, &cacheline);
tl                445 drivers/gpu/drm/i915/gt/intel_timeline.c 	err = i915_active_ref(&tl->hwsp_cacheline->active, tl, rq);
tl                449 drivers/gpu/drm/i915/gt/intel_timeline.c 	cacheline_release(tl->hwsp_cacheline); /* ownership now xfered to rq */
tl                450 drivers/gpu/drm/i915/gt/intel_timeline.c 	cacheline_free(tl->hwsp_cacheline);
tl                452 drivers/gpu/drm/i915/gt/intel_timeline.c 	i915_vma_unpin(tl->hwsp_ggtt); /* binding kept alive by old cacheline */
tl                453 drivers/gpu/drm/i915/gt/intel_timeline.c 	i915_vma_put(tl->hwsp_ggtt);
tl                455 drivers/gpu/drm/i915/gt/intel_timeline.c 	tl->hwsp_ggtt = i915_vma_get(vma);
tl                458 drivers/gpu/drm/i915/gt/intel_timeline.c 	tl->hwsp_offset = cacheline * CACHELINE_BYTES;
tl                459 drivers/gpu/drm/i915/gt/intel_timeline.c 	tl->hwsp_seqno =
tl                460 drivers/gpu/drm/i915/gt/intel_timeline.c 		memset(vaddr + tl->hwsp_offset, 0, CACHELINE_BYTES);
tl                462 drivers/gpu/drm/i915/gt/intel_timeline.c 	tl->hwsp_offset += i915_ggtt_offset(vma);
tl                465 drivers/gpu/drm/i915/gt/intel_timeline.c 	tl->hwsp_cacheline = cl;
tl                467 drivers/gpu/drm/i915/gt/intel_timeline.c 	*seqno = timeline_advance(tl);
tl                468 drivers/gpu/drm/i915/gt/intel_timeline.c 	GEM_BUG_ON(i915_seqno_passed(*tl->hwsp_seqno, *seqno));
tl                476 drivers/gpu/drm/i915/gt/intel_timeline.c 	timeline_rollback(tl);
tl                480 drivers/gpu/drm/i915/gt/intel_timeline.c int intel_timeline_get_seqno(struct intel_timeline *tl,
tl                484 drivers/gpu/drm/i915/gt/intel_timeline.c 	*seqno = timeline_advance(tl);
tl                487 drivers/gpu/drm/i915/gt/intel_timeline.c 	if (unlikely(!*seqno && tl->hwsp_cacheline))
tl                488 drivers/gpu/drm/i915/gt/intel_timeline.c 		return __intel_timeline_get_seqno(tl, rq, seqno);
tl                504 drivers/gpu/drm/i915/gt/intel_timeline.c 	struct intel_timeline *tl = from->timeline;
tl                507 drivers/gpu/drm/i915/gt/intel_timeline.c 	GEM_BUG_ON(to->timeline == tl);
tl                509 drivers/gpu/drm/i915/gt/intel_timeline.c 	mutex_lock_nested(&tl->mutex, SINGLE_DEPTH_NESTING);
tl                514 drivers/gpu/drm/i915/gt/intel_timeline.c 		if (likely(cl == tl->hwsp_cacheline)) {
tl                515 drivers/gpu/drm/i915/gt/intel_timeline.c 			*hwsp = tl->hwsp_offset;
tl                522 drivers/gpu/drm/i915/gt/intel_timeline.c 	mutex_unlock(&tl->mutex);
tl                527 drivers/gpu/drm/i915/gt/intel_timeline.c void intel_timeline_unpin(struct intel_timeline *tl)
tl                529 drivers/gpu/drm/i915/gt/intel_timeline.c 	GEM_BUG_ON(!atomic_read(&tl->pin_count));
tl                530 drivers/gpu/drm/i915/gt/intel_timeline.c 	if (!atomic_dec_and_test(&tl->pin_count))
tl                533 drivers/gpu/drm/i915/gt/intel_timeline.c 	cacheline_release(tl->hwsp_cacheline);
tl                535 drivers/gpu/drm/i915/gt/intel_timeline.c 	__i915_vma_unpin(tl->hwsp_ggtt);
tl                 34 drivers/gpu/drm/i915/gt/intel_timeline.h int intel_timeline_init(struct intel_timeline *tl,
tl                 37 drivers/gpu/drm/i915/gt/intel_timeline.h void intel_timeline_fini(struct intel_timeline *tl);
tl                 55 drivers/gpu/drm/i915/gt/intel_timeline.h static inline int __intel_timeline_sync_set(struct intel_timeline *tl,
tl                 58 drivers/gpu/drm/i915/gt/intel_timeline.h 	return i915_syncmap_set(&tl->sync, context, seqno);
tl                 61 drivers/gpu/drm/i915/gt/intel_timeline.h static inline int intel_timeline_sync_set(struct intel_timeline *tl,
tl                 64 drivers/gpu/drm/i915/gt/intel_timeline.h 	return __intel_timeline_sync_set(tl, fence->context, fence->seqno);
tl                 67 drivers/gpu/drm/i915/gt/intel_timeline.h static inline bool __intel_timeline_sync_is_later(struct intel_timeline *tl,
tl                 70 drivers/gpu/drm/i915/gt/intel_timeline.h 	return i915_syncmap_is_later(&tl->sync, context, seqno);
tl                 73 drivers/gpu/drm/i915/gt/intel_timeline.h static inline bool intel_timeline_sync_is_later(struct intel_timeline *tl,
tl                 76 drivers/gpu/drm/i915/gt/intel_timeline.h 	return __intel_timeline_sync_is_later(tl, fence->context, fence->seqno);
tl                 79 drivers/gpu/drm/i915/gt/intel_timeline.h int intel_timeline_pin(struct intel_timeline *tl);
tl                 80 drivers/gpu/drm/i915/gt/intel_timeline.h void intel_timeline_enter(struct intel_timeline *tl);
tl                 81 drivers/gpu/drm/i915/gt/intel_timeline.h int intel_timeline_get_seqno(struct intel_timeline *tl,
tl                 84 drivers/gpu/drm/i915/gt/intel_timeline.h void intel_timeline_exit(struct intel_timeline *tl);
tl                 85 drivers/gpu/drm/i915/gt/intel_timeline.h void intel_timeline_unpin(struct intel_timeline *tl);
tl                 35 drivers/gpu/drm/i915/gt/mock_engine.c static void mock_timeline_pin(struct intel_timeline *tl)
tl                 37 drivers/gpu/drm/i915/gt/mock_engine.c 	atomic_inc(&tl->pin_count);
tl                 40 drivers/gpu/drm/i915/gt/mock_engine.c static void mock_timeline_unpin(struct intel_timeline *tl)
tl                 42 drivers/gpu/drm/i915/gt/mock_engine.c 	GEM_BUG_ON(!atomic_read(&tl->pin_count));
tl                 43 drivers/gpu/drm/i915/gt/mock_engine.c 	atomic_dec(&tl->pin_count);
tl                 39 drivers/gpu/drm/i915/gt/selftest_context.c 	struct intel_timeline *tl = ce->timeline;
tl                 42 drivers/gpu/drm/i915/gt/selftest_context.c 	mutex_lock(&tl->mutex);
tl                 48 drivers/gpu/drm/i915/gt/selftest_context.c 		rq = rcu_dereference(tl->last_request.request);
tl                 63 drivers/gpu/drm/i915/gt/selftest_context.c 	mutex_unlock(&tl->mutex);
tl                 19 drivers/gpu/drm/i915/gt/selftest_timeline.c static struct page *hwsp_page(struct intel_timeline *tl)
tl                 21 drivers/gpu/drm/i915/gt/selftest_timeline.c 	struct drm_i915_gem_object *obj = tl->hwsp_ggtt->obj;
tl                 27 drivers/gpu/drm/i915/gt/selftest_timeline.c static unsigned long hwsp_cacheline(struct intel_timeline *tl)
tl                 29 drivers/gpu/drm/i915/gt/selftest_timeline.c 	unsigned long address = (unsigned long)page_address(hwsp_page(tl));
tl                 31 drivers/gpu/drm/i915/gt/selftest_timeline.c 	return (address + tl->hwsp_offset) / CACHELINE_BYTES;
tl                 50 drivers/gpu/drm/i915/gt/selftest_timeline.c 			       struct intel_timeline *tl)
tl                 52 drivers/gpu/drm/i915/gt/selftest_timeline.c 	tl = xchg(&state->history[idx], tl);
tl                 53 drivers/gpu/drm/i915/gt/selftest_timeline.c 	if (tl) {
tl                 54 drivers/gpu/drm/i915/gt/selftest_timeline.c 		radix_tree_delete(&state->cachelines, hwsp_cacheline(tl));
tl                 55 drivers/gpu/drm/i915/gt/selftest_timeline.c 		intel_timeline_put(tl);
tl                 63 drivers/gpu/drm/i915/gt/selftest_timeline.c 	struct intel_timeline *tl;
tl                 70 drivers/gpu/drm/i915/gt/selftest_timeline.c 		tl = intel_timeline_create(&state->i915->gt, NULL);
tl                 71 drivers/gpu/drm/i915/gt/selftest_timeline.c 		if (IS_ERR(tl))
tl                 72 drivers/gpu/drm/i915/gt/selftest_timeline.c 			return PTR_ERR(tl);
tl                 74 drivers/gpu/drm/i915/gt/selftest_timeline.c 		cacheline = hwsp_cacheline(tl);
tl                 75 drivers/gpu/drm/i915/gt/selftest_timeline.c 		err = radix_tree_insert(&state->cachelines, cacheline, tl);
tl                 81 drivers/gpu/drm/i915/gt/selftest_timeline.c 			intel_timeline_put(tl);
tl                 86 drivers/gpu/drm/i915/gt/selftest_timeline.c 		__mock_hwsp_record(state, idx, tl);
tl                166 drivers/gpu/drm/i915/gt/selftest_timeline.c static int __igt_sync(struct intel_timeline *tl,
tl                173 drivers/gpu/drm/i915/gt/selftest_timeline.c 	if (__intel_timeline_sync_is_later(tl, ctx, p->seqno) != p->expected) {
tl                180 drivers/gpu/drm/i915/gt/selftest_timeline.c 		ret = __intel_timeline_sync_set(tl, ctx, p->seqno);
tl                208 drivers/gpu/drm/i915/gt/selftest_timeline.c 	struct intel_timeline tl;
tl                212 drivers/gpu/drm/i915/gt/selftest_timeline.c 	mock_timeline_init(&tl, 0);
tl                218 drivers/gpu/drm/i915/gt/selftest_timeline.c 				ret = __igt_sync(&tl, ctx, p, "1");
tl                224 drivers/gpu/drm/i915/gt/selftest_timeline.c 	mock_timeline_fini(&tl);
tl                226 drivers/gpu/drm/i915/gt/selftest_timeline.c 	mock_timeline_init(&tl, 0);
tl                232 drivers/gpu/drm/i915/gt/selftest_timeline.c 				ret = __igt_sync(&tl, ctx, p, "2");
tl                240 drivers/gpu/drm/i915/gt/selftest_timeline.c 	mock_timeline_fini(&tl);
tl                252 drivers/gpu/drm/i915/gt/selftest_timeline.c 	struct intel_timeline tl;
tl                258 drivers/gpu/drm/i915/gt/selftest_timeline.c 	mock_timeline_init(&tl, 0);
tl                290 drivers/gpu/drm/i915/gt/selftest_timeline.c 		__intel_timeline_sync_set(&tl, id, 0);
tl                305 drivers/gpu/drm/i915/gt/selftest_timeline.c 		if (!__intel_timeline_sync_is_later(&tl, id, 0)) {
tl                306 drivers/gpu/drm/i915/gt/selftest_timeline.c 			mock_timeline_fini(&tl);
tl                316 drivers/gpu/drm/i915/gt/selftest_timeline.c 	mock_timeline_fini(&tl);
tl                319 drivers/gpu/drm/i915/gt/selftest_timeline.c 	mock_timeline_init(&tl, 0);
tl                326 drivers/gpu/drm/i915/gt/selftest_timeline.c 		__intel_timeline_sync_set(&tl, count++, 0);
tl                336 drivers/gpu/drm/i915/gt/selftest_timeline.c 		if (!__intel_timeline_sync_is_later(&tl, end_time, 0)) {
tl                338 drivers/gpu/drm/i915/gt/selftest_timeline.c 			mock_timeline_fini(&tl);
tl                346 drivers/gpu/drm/i915/gt/selftest_timeline.c 	mock_timeline_fini(&tl);
tl                349 drivers/gpu/drm/i915/gt/selftest_timeline.c 	mock_timeline_init(&tl, 0);
tl                360 drivers/gpu/drm/i915/gt/selftest_timeline.c 		if (!__intel_timeline_sync_is_later(&tl, id, seqno))
tl                361 drivers/gpu/drm/i915/gt/selftest_timeline.c 			__intel_timeline_sync_set(&tl, id, seqno);
tl                369 drivers/gpu/drm/i915/gt/selftest_timeline.c 	mock_timeline_fini(&tl);
tl                377 drivers/gpu/drm/i915/gt/selftest_timeline.c 		mock_timeline_init(&tl, 0);
tl                389 drivers/gpu/drm/i915/gt/selftest_timeline.c 			__intel_timeline_sync_is_later(&tl, id, 0);
tl                390 drivers/gpu/drm/i915/gt/selftest_timeline.c 			__intel_timeline_sync_set(&tl, id, 0);
tl                398 drivers/gpu/drm/i915/gt/selftest_timeline.c 		mock_timeline_fini(&tl);
tl                447 drivers/gpu/drm/i915/gt/selftest_timeline.c tl_write(struct intel_timeline *tl, struct intel_engine_cs *engine, u32 value)
tl                452 drivers/gpu/drm/i915/gt/selftest_timeline.c 	lockdep_assert_held(&tl->gt->i915->drm.struct_mutex); /* lazy rq refs */
tl                454 drivers/gpu/drm/i915/gt/selftest_timeline.c 	err = intel_timeline_pin(tl);
tl                464 drivers/gpu/drm/i915/gt/selftest_timeline.c 	err = emit_ggtt_store_dw(rq, tl->hwsp_offset, value);
tl                470 drivers/gpu/drm/i915/gt/selftest_timeline.c 	intel_timeline_unpin(tl);
tl                480 drivers/gpu/drm/i915/gt/selftest_timeline.c 	struct intel_timeline *tl;
tl                482 drivers/gpu/drm/i915/gt/selftest_timeline.c 	tl = intel_timeline_create(&i915->gt, NULL);
tl                483 drivers/gpu/drm/i915/gt/selftest_timeline.c 	if (IS_ERR(tl))
tl                484 drivers/gpu/drm/i915/gt/selftest_timeline.c 		return tl;
tl                486 drivers/gpu/drm/i915/gt/selftest_timeline.c 	if (*tl->hwsp_seqno != tl->seqno) {
tl                488 drivers/gpu/drm/i915/gt/selftest_timeline.c 		       *tl->hwsp_seqno, tl->seqno);
tl                489 drivers/gpu/drm/i915/gt/selftest_timeline.c 		intel_timeline_put(tl);
tl                493 drivers/gpu/drm/i915/gt/selftest_timeline.c 	return tl;
tl                527 drivers/gpu/drm/i915/gt/selftest_timeline.c 			struct intel_timeline *tl;
tl                530 drivers/gpu/drm/i915/gt/selftest_timeline.c 			tl = checked_intel_timeline_create(i915);
tl                531 drivers/gpu/drm/i915/gt/selftest_timeline.c 			if (IS_ERR(tl)) {
tl                532 drivers/gpu/drm/i915/gt/selftest_timeline.c 				err = PTR_ERR(tl);
tl                536 drivers/gpu/drm/i915/gt/selftest_timeline.c 			rq = tl_write(tl, engine, count);
tl                538 drivers/gpu/drm/i915/gt/selftest_timeline.c 				intel_timeline_put(tl);
tl                543 drivers/gpu/drm/i915/gt/selftest_timeline.c 			timelines[count++] = tl;
tl                552 drivers/gpu/drm/i915/gt/selftest_timeline.c 		struct intel_timeline *tl = timelines[n];
tl                554 drivers/gpu/drm/i915/gt/selftest_timeline.c 		if (!err && *tl->hwsp_seqno != n) {
tl                556 drivers/gpu/drm/i915/gt/selftest_timeline.c 			       n, *tl->hwsp_seqno);
tl                559 drivers/gpu/drm/i915/gt/selftest_timeline.c 		intel_timeline_put(tl);
tl                600 drivers/gpu/drm/i915/gt/selftest_timeline.c 			struct intel_timeline *tl;
tl                606 drivers/gpu/drm/i915/gt/selftest_timeline.c 			tl = checked_intel_timeline_create(i915);
tl                607 drivers/gpu/drm/i915/gt/selftest_timeline.c 			if (IS_ERR(tl)) {
tl                608 drivers/gpu/drm/i915/gt/selftest_timeline.c 				err = PTR_ERR(tl);
tl                612 drivers/gpu/drm/i915/gt/selftest_timeline.c 			rq = tl_write(tl, engine, count);
tl                614 drivers/gpu/drm/i915/gt/selftest_timeline.c 				intel_timeline_put(tl);
tl                619 drivers/gpu/drm/i915/gt/selftest_timeline.c 			timelines[count++] = tl;
tl                628 drivers/gpu/drm/i915/gt/selftest_timeline.c 		struct intel_timeline *tl = timelines[n];
tl                630 drivers/gpu/drm/i915/gt/selftest_timeline.c 		if (!err && *tl->hwsp_seqno != n) {
tl                632 drivers/gpu/drm/i915/gt/selftest_timeline.c 			       n, *tl->hwsp_seqno);
tl                635 drivers/gpu/drm/i915/gt/selftest_timeline.c 		intel_timeline_put(tl);
tl                651 drivers/gpu/drm/i915/gt/selftest_timeline.c 	struct intel_timeline *tl;
tl                664 drivers/gpu/drm/i915/gt/selftest_timeline.c 	tl = intel_timeline_create(&i915->gt, NULL);
tl                665 drivers/gpu/drm/i915/gt/selftest_timeline.c 	if (IS_ERR(tl)) {
tl                666 drivers/gpu/drm/i915/gt/selftest_timeline.c 		err = PTR_ERR(tl);
tl                669 drivers/gpu/drm/i915/gt/selftest_timeline.c 	if (!tl->has_initial_breadcrumb || !tl->hwsp_cacheline)
tl                672 drivers/gpu/drm/i915/gt/selftest_timeline.c 	err = intel_timeline_pin(tl);
tl                690 drivers/gpu/drm/i915/gt/selftest_timeline.c 		tl->seqno = -4u;
tl                692 drivers/gpu/drm/i915/gt/selftest_timeline.c 		mutex_lock_nested(&tl->mutex, SINGLE_DEPTH_NESTING);
tl                693 drivers/gpu/drm/i915/gt/selftest_timeline.c 		err = intel_timeline_get_seqno(tl, rq, &seqno[0]);
tl                694 drivers/gpu/drm/i915/gt/selftest_timeline.c 		mutex_unlock(&tl->mutex);
tl                700 drivers/gpu/drm/i915/gt/selftest_timeline.c 			 seqno[0], tl->hwsp_offset);
tl                702 drivers/gpu/drm/i915/gt/selftest_timeline.c 		err = emit_ggtt_store_dw(rq, tl->hwsp_offset, seqno[0]);
tl                707 drivers/gpu/drm/i915/gt/selftest_timeline.c 		hwsp_seqno[0] = tl->hwsp_seqno;
tl                709 drivers/gpu/drm/i915/gt/selftest_timeline.c 		mutex_lock_nested(&tl->mutex, SINGLE_DEPTH_NESTING);
tl                710 drivers/gpu/drm/i915/gt/selftest_timeline.c 		err = intel_timeline_get_seqno(tl, rq, &seqno[1]);
tl                711 drivers/gpu/drm/i915/gt/selftest_timeline.c 		mutex_unlock(&tl->mutex);
tl                717 drivers/gpu/drm/i915/gt/selftest_timeline.c 			 seqno[1], tl->hwsp_offset);
tl                719 drivers/gpu/drm/i915/gt/selftest_timeline.c 		err = emit_ggtt_store_dw(rq, tl->hwsp_offset, seqno[1]);
tl                724 drivers/gpu/drm/i915/gt/selftest_timeline.c 		hwsp_seqno[1] = tl->hwsp_seqno;
tl                753 drivers/gpu/drm/i915/gt/selftest_timeline.c 	intel_timeline_unpin(tl);
tl                755 drivers/gpu/drm/i915/gt/selftest_timeline.c 	intel_timeline_put(tl);
tl                789 drivers/gpu/drm/i915/gt/selftest_timeline.c 			struct intel_timeline *tl;
tl                792 drivers/gpu/drm/i915/gt/selftest_timeline.c 			tl = checked_intel_timeline_create(i915);
tl                793 drivers/gpu/drm/i915/gt/selftest_timeline.c 			if (IS_ERR(tl)) {
tl                794 drivers/gpu/drm/i915/gt/selftest_timeline.c 				err = PTR_ERR(tl);
tl                798 drivers/gpu/drm/i915/gt/selftest_timeline.c 			rq = tl_write(tl, engine, count);
tl                800 drivers/gpu/drm/i915/gt/selftest_timeline.c 				intel_timeline_put(tl);
tl                807 drivers/gpu/drm/i915/gt/selftest_timeline.c 				intel_timeline_put(tl);
tl                812 drivers/gpu/drm/i915/gt/selftest_timeline.c 			if (*tl->hwsp_seqno != count) {
tl                814 drivers/gpu/drm/i915/gt/selftest_timeline.c 				       count, *tl->hwsp_seqno);
tl                818 drivers/gpu/drm/i915/gt/selftest_timeline.c 			intel_timeline_put(tl);
tl                182 drivers/gpu/drm/i915/i915_active.c active_instance(struct i915_active *ref, struct intel_timeline *tl)
tl                186 drivers/gpu/drm/i915/i915_active.c 	u64 idx = tl->fence_context;
tl                225 drivers/gpu/drm/i915/i915_active.c 	i915_active_request_init(&node->base, &tl->mutex, NULL, node_retire);
tl                308 drivers/gpu/drm/i915/i915_active.c 		    struct intel_timeline *tl,
tl                314 drivers/gpu/drm/i915/i915_active.c 	lockdep_assert_held(&tl->mutex);
tl                321 drivers/gpu/drm/i915/i915_active.c 	active = active_instance(ref, tl);
tl                373 drivers/gpu/drm/i915/i915_active.h 		    struct intel_timeline *tl,
tl                895 drivers/gpu/drm/i915/i915_gem.c 	struct intel_timeline *tl;
tl                899 drivers/gpu/drm/i915/i915_gem.c 	list_for_each_entry(tl, &timelines->active_list, link) {
tl                902 drivers/gpu/drm/i915/i915_gem.c 		rq = i915_active_request_get_unlocked(&tl->last_request);
tl                927 drivers/gpu/drm/i915/i915_gem.c 		tl = list_entry(&timelines->active_list, typeof(*tl), link);
tl                319 drivers/gpu/drm/i915/i915_request.c 	struct intel_timeline * const tl = rq->timeline;
tl                327 drivers/gpu/drm/i915/i915_request.c 	lockdep_assert_held(&tl->mutex);
tl                331 drivers/gpu/drm/i915/i915_request.c 		tmp = list_first_entry(&tl->requests, typeof(*tmp), link);
tl                594 drivers/gpu/drm/i915/i915_request.c static void retire_requests(struct intel_timeline *tl)
tl                598 drivers/gpu/drm/i915/i915_request.c 	list_for_each_entry_safe(rq, rn, &tl->requests, link)
tl                604 drivers/gpu/drm/i915/i915_request.c request_alloc_slow(struct intel_timeline *tl, gfp_t gfp)
tl                608 drivers/gpu/drm/i915/i915_request.c 	if (list_empty(&tl->requests))
tl                615 drivers/gpu/drm/i915/i915_request.c 	rq = list_first_entry(&tl->requests, typeof(*rq), link);
tl                624 drivers/gpu/drm/i915/i915_request.c 	rq = list_last_entry(&tl->requests, typeof(*rq), link);
tl                628 drivers/gpu/drm/i915/i915_request.c 	retire_requests(tl);
tl                637 drivers/gpu/drm/i915/i915_request.c 	struct intel_timeline *tl = ce->timeline;
tl                679 drivers/gpu/drm/i915/i915_request.c 		rq = request_alloc_slow(tl, gfp);
tl                686 drivers/gpu/drm/i915/i915_request.c 	ret = intel_timeline_get_seqno(tl, rq, &seqno);
tl                695 drivers/gpu/drm/i915/i915_request.c 	rq->timeline = tl;
tl                696 drivers/gpu/drm/i915/i915_request.c 	rq->hwsp_seqno = tl->hwsp_seqno;
tl                697 drivers/gpu/drm/i915/i915_request.c 	rq->hwsp_cacheline = tl->hwsp_cacheline;
tl                702 drivers/gpu/drm/i915/i915_request.c 		       tl->fence_context, seqno);
tl                771 drivers/gpu/drm/i915/i915_request.c 	struct intel_timeline *tl;
tl                773 drivers/gpu/drm/i915/i915_request.c 	tl = intel_context_timeline_lock(ce);
tl                774 drivers/gpu/drm/i915/i915_request.c 	if (IS_ERR(tl))
tl                775 drivers/gpu/drm/i915/i915_request.c 		return ERR_CAST(tl);
tl                778 drivers/gpu/drm/i915/i915_request.c 	rq = list_first_entry(&tl->requests, typeof(*rq), link);
tl                779 drivers/gpu/drm/i915/i915_request.c 	if (!list_is_last(&rq->link, &tl->requests))
tl                789 drivers/gpu/drm/i915/i915_request.c 	rq->cookie = lockdep_pin_lock(&tl->mutex);
tl                794 drivers/gpu/drm/i915/i915_request.c 	intel_context_timeline_unlock(tl);
tl               1241 drivers/gpu/drm/i915/i915_request.c 	struct intel_timeline * const tl = rq->timeline;
tl               1244 drivers/gpu/drm/i915/i915_request.c 	lockdep_assert_held(&tl->mutex);
tl               1245 drivers/gpu/drm/i915/i915_request.c 	lockdep_unpin_lock(&tl->mutex, rq->cookie);
tl               1296 drivers/gpu/drm/i915/i915_request.c 	if (prev && i915_request_completed(prev) && prev->timeline == tl)
tl               1299 drivers/gpu/drm/i915/i915_request.c 	mutex_unlock(&tl->mutex);
tl               1520 drivers/gpu/drm/i915/i915_request.c 	struct intel_timeline *tl, *tn;
tl               1525 drivers/gpu/drm/i915/i915_request.c 	list_for_each_entry_safe(tl, tn, &timelines->active_list, link) {
tl               1526 drivers/gpu/drm/i915/i915_request.c 		if (!mutex_trylock(&tl->mutex))
tl               1529 drivers/gpu/drm/i915/i915_request.c 		intel_timeline_get(tl);
tl               1530 drivers/gpu/drm/i915/i915_request.c 		GEM_BUG_ON(!tl->active_count);
tl               1531 drivers/gpu/drm/i915/i915_request.c 		tl->active_count++; /* pin the list element */
tl               1534 drivers/gpu/drm/i915/i915_request.c 		retire_requests(tl);
tl               1539 drivers/gpu/drm/i915/i915_request.c 		list_safe_reset_next(tl, tn, link);
tl               1540 drivers/gpu/drm/i915/i915_request.c 		if (!--tl->active_count)
tl               1541 drivers/gpu/drm/i915/i915_request.c 			list_del(&tl->link);
tl               1543 drivers/gpu/drm/i915/i915_request.c 		mutex_unlock(&tl->mutex);
tl               1546 drivers/gpu/drm/i915/i915_request.c 		if (refcount_dec_and_test(&tl->kref.refcount)) {
tl               1547 drivers/gpu/drm/i915/i915_request.c 			GEM_BUG_ON(tl->active_count);
tl               1548 drivers/gpu/drm/i915/i915_request.c 			list_add(&tl->link, &free);
tl               1553 drivers/gpu/drm/i915/i915_request.c 	list_for_each_entry_safe(tl, tn, &free, link)
tl               1554 drivers/gpu/drm/i915/i915_request.c 		__intel_timeline_free(&tl->kref);
tl               3624 drivers/gpu/drm/omapdrm/dss/dsi.c 		int tl, t_he, width_bytes;
tl               3633 drivers/gpu/drm/omapdrm/dss/dsi.c 		tl = DIV_ROUND_UP(4, ndl) + (hsync_end ? hsa : 0) + t_he + hfp +
tl               3637 drivers/gpu/drm/omapdrm/dss/dsi.c 			hfp, hsync_end ? hsa : 0, tl);
tl               3656 drivers/gpu/drm/omapdrm/dss/dsi.c 		r = FLD_MOD(r, tl, 31, 16);		/* TL */
tl                221 drivers/gpu/drm/radeon/r600_dpm.c int r600_calculate_at(u32 t, u32 h, u32 fh, u32 fl, u32 *tl, u32 *th)
tl                237 drivers/gpu/drm/radeon/r600_dpm.c 	*tl = t + al;
tl                142 drivers/gpu/drm/radeon/r600_dpm.h int r600_calculate_at(u32 t, u32 h, u32 fh, u32 fl, u32 *tl, u32 *th);
tl                777 drivers/ide/pmac.c 	u32 *timings, *timings2, tl[2];
tl                785 drivers/ide/pmac.c 	tl[0] = *timings;
tl                786 drivers/ide/pmac.c 	tl[1] = *timings2;
tl                790 drivers/ide/pmac.c 			ret = set_timings_udma_ata4(&tl[0], speed);
tl                793 drivers/ide/pmac.c 			ret = set_timings_udma_ata6(&tl[0], &tl[1], speed);
tl                795 drivers/ide/pmac.c 			ret = set_timings_udma_shasta(&tl[0], &tl[1], speed);
tl                799 drivers/ide/pmac.c 		set_timings_mdma(drive, pmif->kind, &tl[0], &tl[1], speed);
tl                805 drivers/ide/pmac.c 	*timings = tl[0];
tl                806 drivers/ide/pmac.c 	*timings2 = tl[1];
tl                167 drivers/input/keyboard/mpr121_touchkey.c 	unsigned char usl, lsl, tl, eleconf;
tl                199 drivers/input/keyboard/mpr121_touchkey.c 	tl = (usl * 90) / 100;
tl                202 drivers/input/keyboard/mpr121_touchkey.c 	ret |= i2c_smbus_write_byte_data(client, AUTO_CONFIG_TL_ADDR, tl);
tl                592 drivers/isdn/hardware/mISDN/mISDNipac.c 	u8 tl = 0;
tl                602 drivers/isdn/hardware/mISDN/mISDNipac.c 				tl |= 0x0c;
tl                604 drivers/isdn/hardware/mISDN/mISDNipac.c 				tl |= 0x3;
tl                606 drivers/isdn/hardware/mISDN/mISDNipac.c 			WriteISAC(isac, ISAC_SPCR, tl);
tl                607 drivers/isdn/hardware/mISDN/mISDNipac.c 			if (tl)
tl                154 drivers/isdn/mISDN/dsp.h 	struct timer_list tl;
tl                929 drivers/isdn/mISDN/dsp_core.c 		if (timer_pending(&dsp->tone.tl))
tl                930 drivers/isdn/mISDN/dsp_core.c 			del_timer(&dsp->tone.tl);
tl                977 drivers/isdn/mISDN/dsp_core.c 		if (timer_pending(&dsp->tone.tl))
tl                978 drivers/isdn/mISDN/dsp_core.c 			del_timer(&dsp->tone.tl);
tl               1093 drivers/isdn/mISDN/dsp_core.c 	timer_setup(&ndsp->tone.tl, dsp_tone_timeout, 0);
tl                462 drivers/isdn/mISDN/dsp_tones.c 	struct dsp *dsp = from_timer(dsp, t, tone.tl);
tl                481 drivers/isdn/mISDN/dsp_tones.c 	tone->tl.expires = jiffies + (pat->seq[index] * HZ) / 8000;
tl                482 drivers/isdn/mISDN/dsp_tones.c 	add_timer(&tone->tl);
tl                507 drivers/isdn/mISDN/dsp_tones.c 		if (dsp->features.hfc_loops && timer_pending(&tonet->tl))
tl                508 drivers/isdn/mISDN/dsp_tones.c 			del_timer(&tonet->tl);
tl                541 drivers/isdn/mISDN/dsp_tones.c 		if (timer_pending(&tonet->tl))
tl                542 drivers/isdn/mISDN/dsp_tones.c 			del_timer(&tonet->tl);
tl                543 drivers/isdn/mISDN/dsp_tones.c 		tonet->tl.expires = jiffies + (pat->seq[0] * HZ) / 8000;
tl                544 drivers/isdn/mISDN/dsp_tones.c 		add_timer(&tonet->tl);
tl                 98 drivers/isdn/mISDN/fsm.c 	struct FsmTimer *ft = from_timer(ft, t, tl);
tl                114 drivers/isdn/mISDN/fsm.c 	timer_setup(&ft->tl, FsmExpireTimer, 0);
tl                126 drivers/isdn/mISDN/fsm.c 	del_timer(&ft->tl);
tl                141 drivers/isdn/mISDN/fsm.c 	if (timer_pending(&ft->tl)) {
tl                152 drivers/isdn/mISDN/fsm.c 	ft->tl.expires = jiffies + (millisec * HZ) / 1000;
tl                153 drivers/isdn/mISDN/fsm.c 	add_timer(&ft->tl);
tl                169 drivers/isdn/mISDN/fsm.c 	if (timer_pending(&ft->tl))
tl                170 drivers/isdn/mISDN/fsm.c 		del_timer(&ft->tl);
tl                173 drivers/isdn/mISDN/fsm.c 	ft->tl.expires = jiffies + (millisec * HZ) / 1000;
tl                174 drivers/isdn/mISDN/fsm.c 	add_timer(&ft->tl);
tl                 44 drivers/isdn/mISDN/fsm.h 	struct timer_list tl;
tl                 39 drivers/isdn/mISDN/timerdev.c 	struct timer_list	tl;
tl                 77 drivers/isdn/mISDN/timerdev.c 		del_timer_sync(&timer->tl);
tl                158 drivers/isdn/mISDN/timerdev.c 	struct mISDNtimer *timer = from_timer(timer, t, tl);
tl                183 drivers/isdn/mISDN/timerdev.c 		timer_setup(&timer->tl, dev_expire_timer, 0);
tl                189 drivers/isdn/mISDN/timerdev.c 		timer->tl.expires = jiffies + ((HZ * (u_long)timeout) / 1000);
tl                190 drivers/isdn/mISDN/timerdev.c 		add_timer(&timer->tl);
tl                207 drivers/isdn/mISDN/timerdev.c 			del_timer_sync(&timer->tl);
tl                 32 drivers/net/ethernet/broadcom/bnx2x/bnx2x_vfpf.c 	struct channel_tlv *tl =
tl                 35 drivers/net/ethernet/broadcom/bnx2x/bnx2x_vfpf.c 	tl->type = type;
tl                 36 drivers/net/ethernet/broadcom/bnx2x/bnx2x_vfpf.c 	tl->length = length;
tl                 52 drivers/net/ethernet/broadcom/bnx2x/bnx2x_vfpf.c 	bnx2x_add_tlv(bp, &first_tlv->tl, 0, type, length);
tl                 63 drivers/net/ethernet/broadcom/bnx2x/bnx2x_vfpf.c 	   first_tlv->tl.type);
tl                256 drivers/net/ethernet/broadcom/bnx2x/bnx2x_vfpf.c 	bnx2x_add_tlv(bp, req, req->first_tlv.tl.length,
tl                266 drivers/net/ethernet/broadcom/bnx2x/bnx2x_vfpf.c 		      req->first_tlv.tl.length + sizeof(struct channel_tlv),
tl                413 drivers/net/ethernet/broadcom/bnx2x/bnx2x_vfpf.c 	bnx2x_add_tlv(bp, req, req->first_tlv.tl.length, CHANNEL_TLV_LIST_END,
tl                464 drivers/net/ethernet/broadcom/bnx2x/bnx2x_vfpf.c 	bnx2x_add_tlv(bp, req, req->first_tlv.tl.length, CHANNEL_TLV_LIST_END,
tl                515 drivers/net/ethernet/broadcom/bnx2x/bnx2x_vfpf.c 	bnx2x_add_tlv(bp, req, req->first_tlv.tl.length, CHANNEL_TLV_LIST_END,
tl                662 drivers/net/ethernet/broadcom/bnx2x/bnx2x_vfpf.c 	bnx2x_add_tlv(bp, req, req->first_tlv.tl.length, CHANNEL_TLV_LIST_END,
tl                697 drivers/net/ethernet/broadcom/bnx2x/bnx2x_vfpf.c 	bnx2x_add_tlv(bp, req, req->first_tlv.tl.length, CHANNEL_TLV_LIST_END,
tl                751 drivers/net/ethernet/broadcom/bnx2x/bnx2x_vfpf.c 	bnx2x_add_tlv(bp, req, req->first_tlv.tl.length, CHANNEL_TLV_LIST_END,
tl                810 drivers/net/ethernet/broadcom/bnx2x/bnx2x_vfpf.c 	bnx2x_add_tlv(bp, req, req->first_tlv.tl.length, CHANNEL_TLV_LIST_END,
tl                907 drivers/net/ethernet/broadcom/bnx2x/bnx2x_vfpf.c 	bnx2x_add_tlv(bp, req, req->first_tlv.tl.length, CHANNEL_TLV_LIST_END,
tl                966 drivers/net/ethernet/broadcom/bnx2x/bnx2x_vfpf.c 	bnx2x_add_tlv(bp, req, req->first_tlv.tl.length, CHANNEL_TLV_LIST_END,
tl               1024 drivers/net/ethernet/broadcom/bnx2x/bnx2x_vfpf.c 	bnx2x_add_tlv(bp, req, req->first_tlv.tl.length, CHANNEL_TLV_LIST_END,
tl               1130 drivers/net/ethernet/broadcom/bnx2x/bnx2x_vfpf.c 	type = mbx->first_tlv.tl.type;
tl               1190 drivers/net/ethernet/broadcom/bnx2x/bnx2x_vfpf.c 	bnx2x_unlock_vf_pf_channel(bp, vf, mbx->first_tlv.tl.type);
tl               2123 drivers/net/ethernet/broadcom/bnx2x/bnx2x_vfpf.c 	if (bnx2x_tlv_supported(mbx->first_tlv.tl.type)) {
tl               2127 drivers/net/ethernet/broadcom/bnx2x/bnx2x_vfpf.c 		bnx2x_lock_vf_pf_channel(bp, vf, mbx->first_tlv.tl.type);
tl               2130 drivers/net/ethernet/broadcom/bnx2x/bnx2x_vfpf.c 		switch (mbx->first_tlv.tl.type) {
tl               2168 drivers/net/ethernet/broadcom/bnx2x/bnx2x_vfpf.c 			  mbx->first_tlv.tl.type, mbx->first_tlv.tl.length,
tl               2186 drivers/net/ethernet/broadcom/bnx2x/bnx2x_vfpf.c 		bnx2x_unlock_vf_pf_channel(bp, vf, mbx->first_tlv.tl.type);
tl                 99 drivers/net/ethernet/broadcom/bnx2x/bnx2x_vfpf.h 	struct channel_tlv tl;
tl                105 drivers/net/ethernet/broadcom/bnx2x/bnx2x_vfpf.h 	struct channel_tlv tl;
tl                117 drivers/net/ethernet/broadcom/bnx2x/bnx2x_vfpf.h 	struct channel_tlv tl;
tl                213 drivers/net/ethernet/broadcom/bnx2x/bnx2x_vfpf.h 	struct channel_tlv tl;
tl                219 drivers/net/ethernet/broadcom/bnx2x/bnx2x_vfpf.h 	struct channel_tlv tl;
tl                 58 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 	struct nfp_dump_tl tl;
tl                 64 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 	struct nfp_dump_tl tl;
tl                 70 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 	struct nfp_dump_tl tl;
tl                 78 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 	struct nfp_dump_tl tl;
tl                 87 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 	struct nfp_dump_tl tl;
tl                 92 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 	struct nfp_dump_tl tl;
tl                112 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c typedef int (*nfp_tlv_visit)(struct nfp_pf *pf, struct nfp_dump_tl *tl,
tl                120 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 	struct nfp_dump_tl *tl;
tl                125 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 	while (remaining >= sizeof(*tl)) {
tl                126 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 		tl = p;
tl                127 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 		if (!tl->type && !tl->length)
tl                130 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 		if (be32_to_cpu(tl->length) > remaining - sizeof(*tl))
tl                133 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 		total_tlv_size = sizeof(*tl) + be32_to_cpu(tl->length);
tl                141 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 		err = tlv_visit(pf, tl, param);
tl                216 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 	u32 required_read_sz = sizeof(*spec_csr) - sizeof(spec_csr->tl);
tl                217 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 	u32 available_sz = be32_to_cpu(spec_csr->tl.length);
tl                251 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c nfp_add_tlv_size(struct nfp_pf *pf, struct nfp_dump_tl *tl, void *param)
tl                257 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 	switch (be32_to_cpu(tl->type)) {
tl                264 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 		spec_csr = (struct nfp_dumpspec_csr *)tl;
tl                266 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 			*size += nfp_dump_error_tlv_size(tl);
tl                272 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 		spec_csr = (struct nfp_dumpspec_csr *)tl;
tl                274 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 			*size += nfp_dump_error_tlv_size(tl);
tl                281 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 		*size += nfp_calc_rtsym_dump_sz(pf, tl);
tl                288 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 		*size += nfp_calc_hwinfo_field_sz(pf, tl);
tl                291 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 		*size += nfp_dump_error_tlv_size(tl);
tl                331 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 	struct nfp_dump_tl *tl = dump->p;
tl                339 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 	tl->type = cpu_to_be32(type);
tl                340 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 	tl->length = cpu_to_be32(total_tlv_sz - sizeof(*tl));
tl                461 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 		return nfp_dump_error_tlv(&spec_csr->tl, -EINVAL, dump);
tl                469 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 	err = nfp_add_tlv(be32_to_cpu(spec_csr->tl.type), total_size, dump);
tl                563 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 		return nfp_dump_error_tlv(&spec_csr->tl, -EINVAL, dump);
tl                572 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 	err = nfp_add_tlv(be32_to_cpu(spec_csr->tl.type), total_size, dump);
tl                610 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 	tl_len = be32_to_cpu(spec->tl.length);
tl                613 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 		return nfp_dump_error_tlv(&spec->tl, -EINVAL, dump);
tl                617 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 		return nfp_dump_error_tlv(&spec->tl, -ENOENT, dump);
tl                625 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 	err = nfp_add_tlv(be32_to_cpu(spec->tl.type), total_size, dump);
tl                654 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c nfp_dump_for_tlv(struct nfp_pf *pf, struct nfp_dump_tl *tl, void *param)
tl                661 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 	switch (be32_to_cpu(tl->type)) {
tl                670 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 		spec_csr = (struct nfp_dumpspec_csr *)tl;
tl                676 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 		spec_csr = (struct nfp_dumpspec_csr *)tl;
tl                682 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 		spec_rtsym = (struct nfp_dumpspec_rtsym *)tl;
tl                688 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 		err = nfp_dump_hwinfo(pf, tl, dump);
tl                693 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 		err = nfp_dump_hwinfo_field(pf, tl, dump);
tl                698 drivers/net/ethernet/netronome/nfp/nfp_net_debugdump.c 		err = nfp_dump_error_tlv(tl, -EOPNOTSUPP, dump);
tl               1167 drivers/net/ethernet/qlogic/qed/qed_sriov.c 	struct channel_tlv *tl = (struct channel_tlv *)*offset;
tl               1169 drivers/net/ethernet/qlogic/qed/qed_sriov.c 	tl->type = type;
tl               1170 drivers/net/ethernet/qlogic/qed/qed_sriov.c 	tl->length = length;
tl               3886 drivers/net/ethernet/qlogic/qed/qed_sriov.c 		   p_vf->abs_vf_id, mbx->first_tlv.tl.type);
tl               3889 drivers/net/ethernet/qlogic/qed/qed_sriov.c 	if (qed_iov_tlv_supported(mbx->first_tlv.tl.type) &&
tl               3891 drivers/net/ethernet/qlogic/qed/qed_sriov.c 		switch (mbx->first_tlv.tl.type) {
tl               3944 drivers/net/ethernet/qlogic/qed/qed_sriov.c 	} else if (qed_iov_tlv_supported(mbx->first_tlv.tl.type)) {
tl               3947 drivers/net/ethernet/qlogic/qed/qed_sriov.c 			   p_vf->abs_vf_id, mbx->first_tlv.tl.type);
tl               3950 drivers/net/ethernet/qlogic/qed/qed_sriov.c 				     mbx->first_tlv.tl.type,
tl               3963 drivers/net/ethernet/qlogic/qed/qed_sriov.c 			  mbx->first_tlv.tl.type,
tl               3964 drivers/net/ethernet/qlogic/qed/qed_sriov.c 			  mbx->first_tlv.tl.length,
tl               3974 drivers/net/ethernet/qlogic/qed/qed_sriov.c 					     mbx->first_tlv.tl.type,
tl                139 drivers/net/ethernet/qlogic/qed/qed_vf.c 			  p_req->first_tlv.tl.type);
tl                146 drivers/net/ethernet/qlogic/qed/qed_vf.c 				  *done, p_req->first_tlv.tl.type);
tl                150 drivers/net/ethernet/qlogic/qed/qed_vf.c 				   *done, p_req->first_tlv.tl.type);
tl                 82 drivers/net/ethernet/qlogic/qed/qed_vf.h 	struct channel_tlv tl;
tl                 89 drivers/net/ethernet/qlogic/qed/qed_vf.h 	struct channel_tlv tl;
tl                101 drivers/net/ethernet/qlogic/qed/qed_vf.h 	struct channel_tlv tl;
tl                150 drivers/net/ethernet/qlogic/qed/qed_vf.h 	struct channel_tlv tl;
tl                259 drivers/net/ethernet/qlogic/qed/qed_vf.h 	struct channel_tlv tl;
tl                372 drivers/net/ethernet/qlogic/qed/qed_vf.h 	struct channel_tlv tl;
tl                380 drivers/net/ethernet/qlogic/qed/qed_vf.h 	struct channel_tlv tl;
tl                386 drivers/net/ethernet/qlogic/qed/qed_vf.h 	struct channel_tlv tl;
tl                392 drivers/net/ethernet/qlogic/qed/qed_vf.h 	struct channel_tlv tl;
tl                404 drivers/net/ethernet/qlogic/qed/qed_vf.h 	struct channel_tlv tl;
tl                412 drivers/net/ethernet/qlogic/qed/qed_vf.h 	struct channel_tlv tl;
tl                420 drivers/net/ethernet/qlogic/qed/qed_vf.h 	struct channel_tlv tl;
tl                127 drivers/net/usb/rtl8150.c 	struct tasklet_struct tl;
tl                453 drivers/net/usb/rtl8150.c 	tasklet_schedule(&dev->tl);
tl                625 drivers/net/usb/rtl8150.c 	tasklet_schedule(&dev->tl);
tl                893 drivers/net/usb/rtl8150.c 	tasklet_init(&dev->tl, rx_fixup, (unsigned long)dev);
tl                943 drivers/net/usb/rtl8150.c 		tasklet_kill(&dev->tl);
tl                235 drivers/net/wireless/intel/iwlegacy/4965-rs.c il4965_rs_tl_rm_old_stats(struct il_traffic_load *tl, u32 curr_time)
tl                240 drivers/net/wireless/intel/iwlegacy/4965-rs.c 	while (tl->queue_count && tl->time_stamp < oldest_time) {
tl                241 drivers/net/wireless/intel/iwlegacy/4965-rs.c 		tl->total -= tl->packet_count[tl->head];
tl                242 drivers/net/wireless/intel/iwlegacy/4965-rs.c 		tl->packet_count[tl->head] = 0;
tl                243 drivers/net/wireless/intel/iwlegacy/4965-rs.c 		tl->time_stamp += TID_QUEUE_CELL_SPACING;
tl                244 drivers/net/wireless/intel/iwlegacy/4965-rs.c 		tl->queue_count--;
tl                245 drivers/net/wireless/intel/iwlegacy/4965-rs.c 		tl->head++;
tl                246 drivers/net/wireless/intel/iwlegacy/4965-rs.c 		if (tl->head >= TID_QUEUE_MAX_SIZE)
tl                247 drivers/net/wireless/intel/iwlegacy/4965-rs.c 			tl->head = 0;
tl                261 drivers/net/wireless/intel/iwlegacy/4965-rs.c 	struct il_traffic_load *tl = NULL;
tl                273 drivers/net/wireless/intel/iwlegacy/4965-rs.c 	tl = &lq_data->load[tid];
tl                278 drivers/net/wireless/intel/iwlegacy/4965-rs.c 	if (!(tl->queue_count)) {
tl                279 drivers/net/wireless/intel/iwlegacy/4965-rs.c 		tl->total = 1;
tl                280 drivers/net/wireless/intel/iwlegacy/4965-rs.c 		tl->time_stamp = curr_time;
tl                281 drivers/net/wireless/intel/iwlegacy/4965-rs.c 		tl->queue_count = 1;
tl                282 drivers/net/wireless/intel/iwlegacy/4965-rs.c 		tl->head = 0;
tl                283 drivers/net/wireless/intel/iwlegacy/4965-rs.c 		tl->packet_count[0] = 1;
tl                287 drivers/net/wireless/intel/iwlegacy/4965-rs.c 	time_diff = TIME_WRAP_AROUND(tl->time_stamp, curr_time);
tl                293 drivers/net/wireless/intel/iwlegacy/4965-rs.c 		il4965_rs_tl_rm_old_stats(tl, curr_time);
tl                295 drivers/net/wireless/intel/iwlegacy/4965-rs.c 	idx = (tl->head + idx) % TID_QUEUE_MAX_SIZE;
tl                296 drivers/net/wireless/intel/iwlegacy/4965-rs.c 	tl->packet_count[idx] = tl->packet_count[idx] + 1;
tl                297 drivers/net/wireless/intel/iwlegacy/4965-rs.c 	tl->total = tl->total + 1;
tl                299 drivers/net/wireless/intel/iwlegacy/4965-rs.c 	if ((idx + 1) > tl->queue_count)
tl                300 drivers/net/wireless/intel/iwlegacy/4965-rs.c 		tl->queue_count = idx + 1;
tl                314 drivers/net/wireless/intel/iwlegacy/4965-rs.c 	struct il_traffic_load *tl = NULL;
tl                319 drivers/net/wireless/intel/iwlegacy/4965-rs.c 	tl = &(lq_data->load[tid]);
tl                323 drivers/net/wireless/intel/iwlegacy/4965-rs.c 	if (!(tl->queue_count))
tl                326 drivers/net/wireless/intel/iwlegacy/4965-rs.c 	time_diff = TIME_WRAP_AROUND(tl->time_stamp, curr_time);
tl                332 drivers/net/wireless/intel/iwlegacy/4965-rs.c 		il4965_rs_tl_rm_old_stats(tl, curr_time);
tl                334 drivers/net/wireless/intel/iwlegacy/4965-rs.c 	return tl->total;
tl                248 drivers/net/wireless/intel/iwlwifi/dvm/rs.c static void rs_tl_rm_old_stats(struct iwl_traffic_load *tl, u32 curr_time)
tl                253 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 	while (tl->queue_count &&
tl                254 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 	       (tl->time_stamp < oldest_time)) {
tl                255 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 		tl->total -= tl->packet_count[tl->head];
tl                256 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 		tl->packet_count[tl->head] = 0;
tl                257 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 		tl->time_stamp += TID_QUEUE_CELL_SPACING;
tl                258 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 		tl->queue_count--;
tl                259 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 		tl->head++;
tl                260 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 		if (tl->head >= TID_QUEUE_MAX_SIZE)
tl                261 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 			tl->head = 0;
tl                275 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 	struct iwl_traffic_load *tl = NULL;
tl                287 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 	tl = &lq_data->load[tid];
tl                292 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 	if (!(tl->queue_count)) {
tl                293 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 		tl->total = 1;
tl                294 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 		tl->time_stamp = curr_time;
tl                295 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 		tl->queue_count = 1;
tl                296 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 		tl->head = 0;
tl                297 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 		tl->packet_count[0] = 1;
tl                301 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 	time_diff = TIME_WRAP_AROUND(tl->time_stamp, curr_time);
tl                307 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 		rs_tl_rm_old_stats(tl, curr_time);
tl                309 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 	index = (tl->head + index) % TID_QUEUE_MAX_SIZE;
tl                310 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 	tl->packet_count[index] = tl->packet_count[index] + 1;
tl                311 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 	tl->total = tl->total + 1;
tl                313 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 	if ((index + 1) > tl->queue_count)
tl                314 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 		tl->queue_count = index + 1;
tl                357 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 	struct iwl_traffic_load *tl = NULL;
tl                362 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 	tl = &(lq_data->load[tid]);
tl                366 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 	if (!(tl->queue_count))
tl                369 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 	time_diff = TIME_WRAP_AROUND(tl->time_stamp, curr_time);
tl                375 drivers/net/wireless/intel/iwlwifi/dvm/rs.c 		rs_tl_rm_old_stats(tl, curr_time);
tl                  9 drivers/net/wireless/mediatek/mt76/mt76x02_dfs.c 		   w_tolerance, tl, th, t_tolerance,	\
tl                 19 drivers/net/wireless/mediatek/mt76/mt76x02_dfs.c 	.t_low = tl,					\
tl                135 drivers/s390/net/fsm.c 	fsm_timer *this = from_timer(this, t, tl);
tl                151 drivers/s390/net/fsm.c 	timer_setup(&this->tl, fsm_expire_timer, 0);
tl                161 drivers/s390/net/fsm.c 	del_timer(&this->tl);
tl                173 drivers/s390/net/fsm.c 	timer_setup(&this->tl, fsm_expire_timer, 0);
tl                176 drivers/s390/net/fsm.c 	this->tl.expires = jiffies + (millisec * HZ) / 1000;
tl                177 drivers/s390/net/fsm.c 	add_timer(&this->tl);
tl                191 drivers/s390/net/fsm.c 	del_timer(&this->tl);
tl                192 drivers/s390/net/fsm.c 	timer_setup(&this->tl, fsm_expire_timer, 0);
tl                195 drivers/s390/net/fsm.c 	this->tl.expires = jiffies + (millisec * HZ) / 1000;
tl                196 drivers/s390/net/fsm.c 	add_timer(&this->tl);
tl                 92 drivers/s390/net/fsm.h 	struct timer_list tl;
tl               2219 drivers/scsi/isci/host.c 					    &ihost->scu_registers->peg0.pe[i].tl,
tl                383 drivers/scsi/isci/phy.c 				   struct scu_transport_layer_registers __iomem *tl,
tl                387 drivers/scsi/isci/phy.c 	sci_phy_transport_layer_initialization(iphy, tl);
tl               1807 drivers/scsi/isci/registers.h 	struct scu_transport_layer_registers tl;
tl               1990 drivers/staging/rtl8192u/ieee80211/ieee80211.h 	void (*enter_sleep_state)(struct net_device *dev, u32 th, u32 tl);
tl               1688 drivers/staging/rtl8192u/ieee80211/ieee80211_softmac.c 	u32 th, tl;
tl               1706 drivers/staging/rtl8192u/ieee80211/ieee80211_softmac.c 	sleep = ieee80211_sta_ps_sleep(ieee, &th, &tl);
tl               1713 drivers/staging/rtl8192u/ieee80211/ieee80211_softmac.c 			ieee->enter_sleep_state(ieee->dev, th, tl);
tl               1726 drivers/staging/rtl8192u/ieee80211/ieee80211_softmac.c 				ieee->ps_tl = tl;
tl               3700 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 		int tl, t_he, width_bytes;
tl               3709 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 		tl = DIV_ROUND_UP(4, ndl) + (hsync_end ? hsa : 0) + t_he + hfp +
tl               3713 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 			hfp, hsync_end ? hsa : 0, tl);
tl               3732 drivers/video/fbdev/omap2/omapfb/dss/dsi.c 		r = FLD_MOD(r, tl, 31, 16);		/* TL */
tl                129 fs/cifs/cifssmb.c 	struct dfs_cache_tgt_list tl;
tl                153 fs/cifs/cifssmb.c 	rc = dfs_cache_noreq_find(tcon->dfs_path + 1, NULL, &tl);
tl                160 fs/cifs/cifssmb.c 	for (it = dfs_cache_get_tgt_iterator(&tl); it;
tl                161 fs/cifs/cifssmb.c 	     it = dfs_cache_get_next_tgt(&tl, it)) {
tl                191 fs/cifs/cifssmb.c 	dfs_cache_free_tgts(&tl);
tl                472 fs/cifs/connect.c 					   struct dfs_cache_tgt_list *tl,
tl                477 fs/cifs/connect.c 	return dfs_cache_noreq_find(cifs_sb->origin_fullpath + 1, NULL, tl);
tl                761 fs/cifs/dfs_cache.c 			struct dfs_cache_tgt_list *tl)
tl                764 fs/cifs/dfs_cache.c 	struct list_head *head = &tl->tl_list;
tl                768 fs/cifs/dfs_cache.c 	memset(tl, 0, sizeof(*tl));
tl                791 fs/cifs/dfs_cache.c 	tl->tl_numtgts = ce->ce_numtgts;
tl                 53 fs/cifs/dfs_cache.h dfs_cache_get_next_tgt(struct dfs_cache_tgt_list *tl,
tl                 56 fs/cifs/dfs_cache.h 	if (!tl || list_empty(&tl->tl_list) || !it ||
tl                 57 fs/cifs/dfs_cache.h 	    list_is_last(&it->it_list, &tl->tl_list))
tl                 63 fs/cifs/dfs_cache.h dfs_cache_get_tgt_iterator(struct dfs_cache_tgt_list *tl)
tl                 65 fs/cifs/dfs_cache.h 	if (!tl)
tl                 67 fs/cifs/dfs_cache.h 	return list_first_entry_or_null(&tl->tl_list,
tl                 72 fs/cifs/dfs_cache.h static inline void dfs_cache_free_tgts(struct dfs_cache_tgt_list *tl)
tl                 76 fs/cifs/dfs_cache.h 	if (!tl || list_empty(&tl->tl_list))
tl                 78 fs/cifs/dfs_cache.h 	list_for_each_entry_safe(it, nit, &tl->tl_list, it_list) {
tl                 83 fs/cifs/dfs_cache.h 	tl->tl_numtgts = 0;
tl                 93 fs/cifs/dfs_cache.h dfs_cache_get_nr_tgts(const struct dfs_cache_tgt_list *tl)
tl                 95 fs/cifs/dfs_cache.h 	return tl ? tl->tl_numtgts : 0;
tl                163 fs/cifs/smb2pdu.c 	struct dfs_cache_tgt_list tl;
tl                187 fs/cifs/smb2pdu.c 	rc = dfs_cache_noreq_find(tcon->dfs_path + 1, NULL, &tl);
tl                194 fs/cifs/smb2pdu.c 	for (it = dfs_cache_get_tgt_iterator(&tl); it;
tl                195 fs/cifs/smb2pdu.c 	     it = dfs_cache_get_next_tgt(&tl, it)) {
tl                225 fs/cifs/smb2pdu.c 	dfs_cache_free_tgts(&tl);
tl               5810 fs/ocfs2/alloc.c 	struct ocfs2_truncate_log *tl;
tl               5813 fs/ocfs2/alloc.c 	tl = &di->id2.i_dealloc;
tl               5815 fs/ocfs2/alloc.c 	mlog_bug_on_msg(le16_to_cpu(tl->tl_used) > le16_to_cpu(tl->tl_count),
tl               5818 fs/ocfs2/alloc.c 			le16_to_cpu(tl->tl_used), le16_to_cpu(tl->tl_count));
tl               5819 fs/ocfs2/alloc.c 	return le16_to_cpu(tl->tl_used) == le16_to_cpu(tl->tl_count);
tl               5822 fs/ocfs2/alloc.c static int ocfs2_truncate_log_can_coalesce(struct ocfs2_truncate_log *tl,
tl               5829 fs/ocfs2/alloc.c 	if (!le16_to_cpu(tl->tl_used))
tl               5832 fs/ocfs2/alloc.c 	tail_index = le16_to_cpu(tl->tl_used) - 1;
tl               5833 fs/ocfs2/alloc.c 	current_tail = le32_to_cpu(tl->tl_recs[tail_index].t_start);
tl               5834 fs/ocfs2/alloc.c 	current_tail += le32_to_cpu(tl->tl_recs[tail_index].t_clusters);
tl               5849 fs/ocfs2/alloc.c 	struct ocfs2_truncate_log *tl;
tl               5862 fs/ocfs2/alloc.c 	tl = &di->id2.i_dealloc;
tl               5863 fs/ocfs2/alloc.c 	tl_count = le16_to_cpu(tl->tl_count);
tl               5870 fs/ocfs2/alloc.c 			le16_to_cpu(tl->tl_count));
tl               5873 fs/ocfs2/alloc.c 	index = le16_to_cpu(tl->tl_used);
tl               5890 fs/ocfs2/alloc.c 	if (ocfs2_truncate_log_can_coalesce(tl, start_cluster)) {
tl               5897 fs/ocfs2/alloc.c 		num_clusters += le32_to_cpu(tl->tl_recs[index].t_clusters);
tl               5900 fs/ocfs2/alloc.c 			index, le32_to_cpu(tl->tl_recs[index].t_start),
tl               5903 fs/ocfs2/alloc.c 		tl->tl_recs[index].t_start = cpu_to_le32(start_cluster);
tl               5904 fs/ocfs2/alloc.c 		tl->tl_used = cpu_to_le16(index + 1);
tl               5906 fs/ocfs2/alloc.c 	tl->tl_recs[index].t_clusters = cpu_to_le32(num_clusters);
tl               5925 fs/ocfs2/alloc.c 	struct ocfs2_truncate_log *tl;
tl               5931 fs/ocfs2/alloc.c 	tl = &di->id2.i_dealloc;
tl               5932 fs/ocfs2/alloc.c 	i = le16_to_cpu(tl->tl_used) - 1;
tl               5950 fs/ocfs2/alloc.c 		tl->tl_used = cpu_to_le16(i);
tl               5954 fs/ocfs2/alloc.c 		rec = tl->tl_recs[i];
tl               5995 fs/ocfs2/alloc.c 	struct ocfs2_truncate_log *tl;
tl               6007 fs/ocfs2/alloc.c 	tl = &di->id2.i_dealloc;
tl               6008 fs/ocfs2/alloc.c 	num_to_flush = le16_to_cpu(tl->tl_used);
tl               6186 fs/ocfs2/alloc.c 	struct ocfs2_truncate_log *tl;
tl               6205 fs/ocfs2/alloc.c 	tl = &di->id2.i_dealloc;
tl               6206 fs/ocfs2/alloc.c 	if (le16_to_cpu(tl->tl_used)) {
tl               6207 fs/ocfs2/alloc.c 		trace_ocfs2_truncate_log_recovery_num(le16_to_cpu(tl->tl_used));
tl               6222 fs/ocfs2/alloc.c 		tl->tl_used = 0;
tl               6254 fs/ocfs2/alloc.c 	struct ocfs2_truncate_log *tl;
tl               6261 fs/ocfs2/alloc.c 	tl = &tl_copy->id2.i_dealloc;
tl               6262 fs/ocfs2/alloc.c 	num_recs = le16_to_cpu(tl->tl_used);
tl               6284 fs/ocfs2/alloc.c 		clusters = le32_to_cpu(tl->tl_recs[i].t_clusters);
tl               6285 fs/ocfs2/alloc.c 		start_cluster = le32_to_cpu(tl->tl_recs[i].t_start);
tl                 52 include/linux/ipmi-fru.h 	struct fru_type_length tl[0];	/* type-length stuff follows */
tl                 85 include/linux/ipmi-fru.h static inline int fru_type(struct fru_type_length *tl)
tl                 87 include/linux/ipmi-fru.h 	return tl->type_length & 0xc0;
tl                 90 include/linux/ipmi-fru.h static inline int fru_length(struct fru_type_length *tl)
tl                 92 include/linux/ipmi-fru.h 	return (tl->type_length & 0x3f) + 1; /* len of whole record */
tl                 96 include/linux/ipmi-fru.h static inline int fru_strlen(struct fru_type_length *tl)
tl                 98 include/linux/ipmi-fru.h 	return fru_length(tl) - 1;
tl                101 include/linux/ipmi-fru.h static inline char *fru_strcpy(char *dest, struct fru_type_length *tl)
tl                103 include/linux/ipmi-fru.h 	int len = fru_strlen(tl);
tl                104 include/linux/ipmi-fru.h 	memcpy(dest, tl->data, len);
tl                109 include/linux/ipmi-fru.h static inline struct fru_type_length *fru_next_tl(struct fru_type_length *tl)
tl                111 include/linux/ipmi-fru.h 	return tl + fru_length(tl);
tl                114 include/linux/ipmi-fru.h static inline int fru_is_eof(struct fru_type_length *tl)
tl                116 include/linux/ipmi-fru.h 	return tl->type_length == 0xc1;
tl                189 include/linux/sched/topology.h extern void set_sched_topology(struct sched_domain_topology_level *tl);
tl               1315 kernel/sched/topology.c sd_init(struct sched_domain_topology_level *tl,
tl               1319 kernel/sched/topology.c 	struct sd_data *sdd = &tl->data;
tl               1327 kernel/sched/topology.c 	sched_domains_curr_level = tl->numa_level;
tl               1330 kernel/sched/topology.c 	sd_weight = cpumask_weight(tl->mask(cpu));
tl               1332 kernel/sched/topology.c 	if (tl->sd_flags)
tl               1333 kernel/sched/topology.c 		sd_flags = (*tl->sd_flags)();
tl               1369 kernel/sched/topology.c 		.name			= tl->name,
tl               1373 kernel/sched/topology.c 	cpumask_and(sched_domain_span(sd), cpu_map, tl->mask(cpu));
tl               1406 kernel/sched/topology.c 		if (sched_domains_numa_distance[tl->numa_level] > node_reclaim_distance) {
tl               1449 kernel/sched/topology.c #define for_each_sd_topology(tl)			\
tl               1450 kernel/sched/topology.c 	for (tl = sched_domain_topology; tl->mask; tl++)
tl               1452 kernel/sched/topology.c void set_sched_topology(struct sched_domain_topology_level *tl)
tl               1457 kernel/sched/topology.c 	sched_domain_topology = tl;
tl               1558 kernel/sched/topology.c 	struct sched_domain_topology_level *tl;
tl               1664 kernel/sched/topology.c 	tl = kzalloc((i + level + 1) *
tl               1666 kernel/sched/topology.c 	if (!tl)
tl               1673 kernel/sched/topology.c 		tl[i] = sched_domain_topology[i];
tl               1678 kernel/sched/topology.c 	tl[i++] = (struct sched_domain_topology_level){
tl               1688 kernel/sched/topology.c 		tl[i] = (struct sched_domain_topology_level){
tl               1697 kernel/sched/topology.c 	sched_domain_topology = tl;
tl               1752 kernel/sched/topology.c 	struct sched_domain_topology_level *tl;
tl               1755 kernel/sched/topology.c 	for_each_sd_topology(tl) {
tl               1756 kernel/sched/topology.c 		struct sd_data *sdd = &tl->data;
tl               1821 kernel/sched/topology.c 	struct sched_domain_topology_level *tl;
tl               1824 kernel/sched/topology.c 	for_each_sd_topology(tl) {
tl               1825 kernel/sched/topology.c 		struct sd_data *sdd = &tl->data;
tl               1855 kernel/sched/topology.c static struct sched_domain *build_sched_domain(struct sched_domain_topology_level *tl,
tl               1859 kernel/sched/topology.c 	struct sched_domain *sd = sd_init(tl, cpu_map, child, dflags, cpu);
tl               1889 kernel/sched/topology.c static bool topology_span_sane(struct sched_domain_topology_level *tl,
tl               1895 kernel/sched/topology.c 	if (tl->flags & SDTL_OVERLAP)
tl               1913 kernel/sched/topology.c 		if (!cpumask_equal(tl->mask(cpu), tl->mask(i)) &&
tl               1914 kernel/sched/topology.c 		    cpumask_intersects(tl->mask(cpu), tl->mask(i)))
tl               1930 kernel/sched/topology.c 	struct sched_domain_topology_level *tl, *asym_tl = NULL;
tl               1955 kernel/sched/topology.c 		for_each_sd_topology(tl) {
tl               1959 kernel/sched/topology.c 			for_each_cpu_and(j, tl->mask(i), cpu_map) {
tl               1969 kernel/sched/topology.c 				asym_tl = tl;
tl               2006 kernel/sched/topology.c 		struct sched_domain_topology_level *tl;
tl               2009 kernel/sched/topology.c 		for_each_sd_topology(tl) {
tl               2012 kernel/sched/topology.c 			if (tl == tl_asym) {
tl               2017 kernel/sched/topology.c 			if (WARN_ON(!topology_span_sane(tl, cpu_map, i)))
tl               2020 kernel/sched/topology.c 			sd = build_sched_domain(tl, cpu_map, attr, sd, dflags, i);
tl               2022 kernel/sched/topology.c 			if (tl == sched_domain_topology)
tl               2024 kernel/sched/topology.c 			if (tl->flags & SDTL_OVERLAP)
tl                590 lib/inflate.c  	struct huft *tl,    /* literal/length decoder tables */
tl                618 lib/inflate.c      if ((e = (t = tl + ((unsigned)b & ml))->e) > 16)
tl                771 lib/inflate.c    struct huft *tl;      /* literal/length code table */
tl                793 lib/inflate.c    if ((i = huft_build(l, 288, 257, cplens, cplext, &tl, &bl)) != 0) {
tl                804 lib/inflate.c      huft_free(tl);
tl                813 lib/inflate.c    if (inflate_codes(tl, td, bl, bd)) {
tl                820 lib/inflate.c    huft_free(tl);
tl                837 lib/inflate.c    struct huft *tl;      /* literal/length code table */
tl                901 lib/inflate.c    if ((i = huft_build(ll, 19, 19, NULL, NULL, &tl, &bl)) != 0)
tl                904 lib/inflate.c        huft_free(tl);
tl                918 lib/inflate.c      j = (td = tl + ((unsigned)b & m))->b;
tl                966 lib/inflate.c    huft_free(tl);
tl                978 lib/inflate.c    if ((i = huft_build(ll, nl, 257, cplens, cplext, &tl, &bl)) != 0)
tl                983 lib/inflate.c        huft_free(tl);
tl               1001 lib/inflate.c      huft_free(tl);
tl               1010 lib/inflate.c    if (inflate_codes(tl, td, bl, bd)) {
tl               1018 lib/inflate.c    huft_free(tl);
tl               1588 net/batman-adv/distributed-arp-table.c 	} *tl, _tl;
tl               1592 net/batman-adv/distributed-arp-table.c 	while ((tl = skb_header_pointer(skb, offset, sizeof(_tl), &_tl))) {
tl               1593 net/batman-adv/distributed-arp-table.c 		if (tl->type == BATADV_DHCP_OPT_MSG_TYPE)
tl               1596 net/batman-adv/distributed-arp-table.c 		if (tl->type == BATADV_DHCP_OPT_END)
tl               1599 net/batman-adv/distributed-arp-table.c 		if (tl->type == BATADV_DHCP_OPT_PAD)
tl               1602 net/batman-adv/distributed-arp-table.c 			offset += tl->len + sizeof(_tl);
tl               1606 net/batman-adv/distributed-arp-table.c 	if (!tl || tl->type != BATADV_DHCP_OPT_MSG_TYPE ||
tl               1607 net/batman-adv/distributed-arp-table.c 	    tl->len != sizeof(_type))
tl                732 security/apparmor/lsm.c 	struct aa_label *cl, *tl;
tl                740 security/apparmor/lsm.c 		tl = aa_get_task_label(target);
tl                741 security/apparmor/lsm.c 		error = aa_may_signal(cl, tl, sig);
tl                743 security/apparmor/lsm.c 		aa_put_label(tl);
tl                748 security/apparmor/lsm.c 	tl = aa_get_task_label(target);
tl                749 security/apparmor/lsm.c 	error = aa_may_signal(cl, tl, sig);
tl                750 security/apparmor/lsm.c 	aa_put_label(tl);