tg 867 arch/arm/mach-pxa/eseries.c .tg = &e800_tg_info, tg 508 arch/arm/mach-pxa/hx4700.c .tg = &w3220_tg_info, tg 703 arch/powerpc/kernel/smp.c struct thread_groups *tg, tg 717 arch/powerpc/kernel/smp.c tg->property = thread_group_array[0]; tg 718 arch/powerpc/kernel/smp.c tg->nr_groups = thread_group_array[1]; tg 719 arch/powerpc/kernel/smp.c tg->threads_per_group = thread_group_array[2]; tg 720 arch/powerpc/kernel/smp.c if (tg->property != property || tg 721 arch/powerpc/kernel/smp.c tg->nr_groups < 1 || tg 722 arch/powerpc/kernel/smp.c tg->threads_per_group < 1) tg 725 arch/powerpc/kernel/smp.c total_threads = tg->nr_groups * tg->threads_per_group; tg 736 arch/powerpc/kernel/smp.c tg->thread_list[i] = thread_list[i]; tg 755 arch/powerpc/kernel/smp.c static int get_cpu_thread_group_start(int cpu, struct thread_groups *tg) tg 760 arch/powerpc/kernel/smp.c for (i = 0; i < tg->nr_groups; i++) { tg 761 arch/powerpc/kernel/smp.c int group_start = i * tg->threads_per_group; tg 763 arch/powerpc/kernel/smp.c for (j = 0; j < tg->threads_per_group; j++) { tg 766 arch/powerpc/kernel/smp.c if (tg->thread_list[idx] == hw_cpu_id) tg 778 arch/powerpc/kernel/smp.c struct thread_groups tg = {.property = 0, tg 787 arch/powerpc/kernel/smp.c err = parse_thread_groups(dn, &tg, THREAD_GROUP_SHARE_L1); tg 795 arch/powerpc/kernel/smp.c cpu_group_start = get_cpu_thread_group_start(cpu, &tg); tg 804 arch/powerpc/kernel/smp.c int i_group_start = get_cpu_thread_group_start(i, &tg); tg 70 block/blk-throttle.c struct throtl_grp *tg; /* tg this qnode belongs to */ tg 237 block/blk-throttle.c static inline struct blkcg_gq *tg_to_blkg(struct throtl_grp *tg) tg 239 block/blk-throttle.c return pd_to_blkg(&tg->pd); tg 266 block/blk-throttle.c struct throtl_grp *tg = sq_to_tg(sq); tg 268 block/blk-throttle.c if (tg) tg 269 block/blk-throttle.c return tg->td; tg 292 block/blk-throttle.c static uint64_t tg_bps_limit(struct throtl_grp *tg, int rw) tg 294 block/blk-throttle.c struct blkcg_gq *blkg = tg_to_blkg(tg); tg 301 block/blk-throttle.c td = tg->td; tg 302 block/blk-throttle.c ret = tg->bps[rw][td->limit_index]; tg 306 block/blk-throttle.c tg->iops[rw][td->limit_index]) tg 312 block/blk-throttle.c if (td->limit_index == LIMIT_MAX && tg->bps[rw][LIMIT_LOW] && tg 313 block/blk-throttle.c tg->bps[rw][LIMIT_LOW] != tg->bps[rw][LIMIT_MAX]) { tg 316 block/blk-throttle.c adjusted = throtl_adjusted_limit(tg->bps[rw][LIMIT_LOW], td); tg 317 block/blk-throttle.c ret = min(tg->bps[rw][LIMIT_MAX], adjusted); tg 322 block/blk-throttle.c static unsigned int tg_iops_limit(struct throtl_grp *tg, int rw) tg 324 block/blk-throttle.c struct blkcg_gq *blkg = tg_to_blkg(tg); tg 331 block/blk-throttle.c td = tg->td; tg 332 block/blk-throttle.c ret = tg->iops[rw][td->limit_index]; tg 333 block/blk-throttle.c if (ret == 0 && tg->td->limit_index == LIMIT_LOW) { tg 336 block/blk-throttle.c tg->bps[rw][td->limit_index]) tg 342 block/blk-throttle.c if (td->limit_index == LIMIT_MAX && tg->iops[rw][LIMIT_LOW] && tg 343 block/blk-throttle.c tg->iops[rw][LIMIT_LOW] != tg->iops[rw][LIMIT_MAX]) { tg 346 block/blk-throttle.c adjusted = throtl_adjusted_limit(tg->iops[rw][LIMIT_LOW], td); tg 349 block/blk-throttle.c ret = min_t(unsigned int, tg->iops[rw][LIMIT_MAX], adjusted); tg 389 block/blk-throttle.c static void throtl_qnode_init(struct throtl_qnode *qn, struct throtl_grp *tg) tg 393 block/blk-throttle.c qn->tg = tg; tg 412 block/blk-throttle.c blkg_get(tg_to_blkg(qn->tg)); tg 462 block/blk-throttle.c *tg_to_put = qn->tg; tg 464 block/blk-throttle.c blkg_put(tg_to_blkg(qn->tg)); tg 485 block/blk-throttle.c struct throtl_grp *tg; tg 488 block/blk-throttle.c tg = kzalloc_node(sizeof(*tg), gfp, q->node); tg 489 block/blk-throttle.c if (!tg) tg 492 block/blk-throttle.c throtl_service_queue_init(&tg->service_queue); tg 495 block/blk-throttle.c throtl_qnode_init(&tg->qnode_on_self[rw], tg); tg 496 block/blk-throttle.c throtl_qnode_init(&tg->qnode_on_parent[rw], tg); tg 499 block/blk-throttle.c RB_CLEAR_NODE(&tg->rb_node); tg 500 block/blk-throttle.c tg->bps[READ][LIMIT_MAX] = U64_MAX; tg 501 block/blk-throttle.c tg->bps[WRITE][LIMIT_MAX] = U64_MAX; tg 502 block/blk-throttle.c tg->iops[READ][LIMIT_MAX] = UINT_MAX; tg 503 block/blk-throttle.c tg->iops[WRITE][LIMIT_MAX] = UINT_MAX; tg 504 block/blk-throttle.c tg->bps_conf[READ][LIMIT_MAX] = U64_MAX; tg 505 block/blk-throttle.c tg->bps_conf[WRITE][LIMIT_MAX] = U64_MAX; tg 506 block/blk-throttle.c tg->iops_conf[READ][LIMIT_MAX] = UINT_MAX; tg 507 block/blk-throttle.c tg->iops_conf[WRITE][LIMIT_MAX] = UINT_MAX; tg 510 block/blk-throttle.c tg->latency_target = DFL_LATENCY_TARGET; tg 511 block/blk-throttle.c tg->latency_target_conf = DFL_LATENCY_TARGET; tg 512 block/blk-throttle.c tg->idletime_threshold = DFL_IDLE_THRESHOLD; tg 513 block/blk-throttle.c tg->idletime_threshold_conf = DFL_IDLE_THRESHOLD; tg 515 block/blk-throttle.c return &tg->pd; tg 520 block/blk-throttle.c struct throtl_grp *tg = pd_to_tg(pd); tg 521 block/blk-throttle.c struct blkcg_gq *blkg = tg_to_blkg(tg); tg 523 block/blk-throttle.c struct throtl_service_queue *sq = &tg->service_queue; tg 541 block/blk-throttle.c tg->td = td; tg 549 block/blk-throttle.c static void tg_update_has_rules(struct throtl_grp *tg) tg 551 block/blk-throttle.c struct throtl_grp *parent_tg = sq_to_tg(tg->service_queue.parent_sq); tg 552 block/blk-throttle.c struct throtl_data *td = tg->td; tg 556 block/blk-throttle.c tg->has_rules[rw] = (parent_tg && parent_tg->has_rules[rw]) || tg 558 block/blk-throttle.c (tg_bps_limit(tg, rw) != U64_MAX || tg 559 block/blk-throttle.c tg_iops_limit(tg, rw) != UINT_MAX)); tg 564 block/blk-throttle.c struct throtl_grp *tg = pd_to_tg(pd); tg 569 block/blk-throttle.c tg_update_has_rules(tg); tg 580 block/blk-throttle.c struct throtl_grp *tg = blkg_to_tg(blkg); tg 582 block/blk-throttle.c if (tg->bps[READ][LIMIT_LOW] || tg->bps[WRITE][LIMIT_LOW] || tg 583 block/blk-throttle.c tg->iops[READ][LIMIT_LOW] || tg->iops[WRITE][LIMIT_LOW]) { tg 596 block/blk-throttle.c struct throtl_grp *tg = pd_to_tg(pd); tg 598 block/blk-throttle.c tg->bps[READ][LIMIT_LOW] = 0; tg 599 block/blk-throttle.c tg->bps[WRITE][LIMIT_LOW] = 0; tg 600 block/blk-throttle.c tg->iops[READ][LIMIT_LOW] = 0; tg 601 block/blk-throttle.c tg->iops[WRITE][LIMIT_LOW] = 0; tg 603 block/blk-throttle.c blk_throtl_update_limit_valid(tg->td); tg 605 block/blk-throttle.c if (!tg->td->limit_valid[tg->td->limit_index]) tg 606 block/blk-throttle.c throtl_upgrade_state(tg->td); tg 611 block/blk-throttle.c struct throtl_grp *tg = pd_to_tg(pd); tg 613 block/blk-throttle.c del_timer_sync(&tg->service_queue.pending_timer); tg 614 block/blk-throttle.c kfree(tg); tg 642 block/blk-throttle.c struct throtl_grp *tg; tg 644 block/blk-throttle.c tg = throtl_rb_first(parent_sq); tg 645 block/blk-throttle.c if (!tg) tg 648 block/blk-throttle.c parent_sq->first_pending_disptime = tg->disptime; tg 651 block/blk-throttle.c static void tg_service_queue_add(struct throtl_grp *tg) tg 653 block/blk-throttle.c struct throtl_service_queue *parent_sq = tg->service_queue.parent_sq; tg 657 block/blk-throttle.c unsigned long key = tg->disptime; tg 672 block/blk-throttle.c rb_link_node(&tg->rb_node, parent, node); tg 673 block/blk-throttle.c rb_insert_color_cached(&tg->rb_node, &parent_sq->pending_tree, tg 677 block/blk-throttle.c static void __throtl_enqueue_tg(struct throtl_grp *tg) tg 679 block/blk-throttle.c tg_service_queue_add(tg); tg 680 block/blk-throttle.c tg->flags |= THROTL_TG_PENDING; tg 681 block/blk-throttle.c tg->service_queue.parent_sq->nr_pending++; tg 684 block/blk-throttle.c static void throtl_enqueue_tg(struct throtl_grp *tg) tg 686 block/blk-throttle.c if (!(tg->flags & THROTL_TG_PENDING)) tg 687 block/blk-throttle.c __throtl_enqueue_tg(tg); tg 690 block/blk-throttle.c static void __throtl_dequeue_tg(struct throtl_grp *tg) tg 692 block/blk-throttle.c throtl_rb_erase(&tg->rb_node, tg->service_queue.parent_sq); tg 693 block/blk-throttle.c tg->flags &= ~THROTL_TG_PENDING; tg 696 block/blk-throttle.c static void throtl_dequeue_tg(struct throtl_grp *tg) tg 698 block/blk-throttle.c if (tg->flags & THROTL_TG_PENDING) tg 699 block/blk-throttle.c __throtl_dequeue_tg(tg); tg 759 block/blk-throttle.c static inline void throtl_start_new_slice_with_credit(struct throtl_grp *tg, tg 762 block/blk-throttle.c tg->bytes_disp[rw] = 0; tg 763 block/blk-throttle.c tg->io_disp[rw] = 0; tg 771 block/blk-throttle.c if (time_after_eq(start, tg->slice_start[rw])) tg 772 block/blk-throttle.c tg->slice_start[rw] = start; tg 774 block/blk-throttle.c tg->slice_end[rw] = jiffies + tg->td->throtl_slice; tg 775 block/blk-throttle.c throtl_log(&tg->service_queue, tg 777 block/blk-throttle.c rw == READ ? 'R' : 'W', tg->slice_start[rw], tg 778 block/blk-throttle.c tg->slice_end[rw], jiffies); tg 781 block/blk-throttle.c static inline void throtl_start_new_slice(struct throtl_grp *tg, bool rw) tg 783 block/blk-throttle.c tg->bytes_disp[rw] = 0; tg 784 block/blk-throttle.c tg->io_disp[rw] = 0; tg 785 block/blk-throttle.c tg->slice_start[rw] = jiffies; tg 786 block/blk-throttle.c tg->slice_end[rw] = jiffies + tg->td->throtl_slice; tg 787 block/blk-throttle.c throtl_log(&tg->service_queue, tg 789 block/blk-throttle.c rw == READ ? 'R' : 'W', tg->slice_start[rw], tg 790 block/blk-throttle.c tg->slice_end[rw], jiffies); tg 793 block/blk-throttle.c static inline void throtl_set_slice_end(struct throtl_grp *tg, bool rw, tg 796 block/blk-throttle.c tg->slice_end[rw] = roundup(jiffy_end, tg->td->throtl_slice); tg 799 block/blk-throttle.c static inline void throtl_extend_slice(struct throtl_grp *tg, bool rw, tg 802 block/blk-throttle.c tg->slice_end[rw] = roundup(jiffy_end, tg->td->throtl_slice); tg 803 block/blk-throttle.c throtl_log(&tg->service_queue, tg 805 block/blk-throttle.c rw == READ ? 'R' : 'W', tg->slice_start[rw], tg 806 block/blk-throttle.c tg->slice_end[rw], jiffies); tg 810 block/blk-throttle.c static bool throtl_slice_used(struct throtl_grp *tg, bool rw) tg 812 block/blk-throttle.c if (time_in_range(jiffies, tg->slice_start[rw], tg->slice_end[rw])) tg 819 block/blk-throttle.c static inline void throtl_trim_slice(struct throtl_grp *tg, bool rw) tg 824 block/blk-throttle.c BUG_ON(time_before(tg->slice_end[rw], tg->slice_start[rw])); tg 831 block/blk-throttle.c if (throtl_slice_used(tg, rw)) tg 842 block/blk-throttle.c throtl_set_slice_end(tg, rw, jiffies + tg->td->throtl_slice); tg 844 block/blk-throttle.c time_elapsed = jiffies - tg->slice_start[rw]; tg 846 block/blk-throttle.c nr_slices = time_elapsed / tg->td->throtl_slice; tg 850 block/blk-throttle.c tmp = tg_bps_limit(tg, rw) * tg->td->throtl_slice * nr_slices; tg 854 block/blk-throttle.c io_trim = (tg_iops_limit(tg, rw) * tg->td->throtl_slice * nr_slices) / tg 860 block/blk-throttle.c if (tg->bytes_disp[rw] >= bytes_trim) tg 861 block/blk-throttle.c tg->bytes_disp[rw] -= bytes_trim; tg 863 block/blk-throttle.c tg->bytes_disp[rw] = 0; tg 865 block/blk-throttle.c if (tg->io_disp[rw] >= io_trim) tg 866 block/blk-throttle.c tg->io_disp[rw] -= io_trim; tg 868 block/blk-throttle.c tg->io_disp[rw] = 0; tg 870 block/blk-throttle.c tg->slice_start[rw] += nr_slices * tg->td->throtl_slice; tg 872 block/blk-throttle.c throtl_log(&tg->service_queue, tg 875 block/blk-throttle.c tg->slice_start[rw], tg->slice_end[rw], jiffies); tg 878 block/blk-throttle.c static bool tg_with_in_iops_limit(struct throtl_grp *tg, struct bio *bio, tg 886 block/blk-throttle.c jiffy_elapsed = jiffies - tg->slice_start[rw]; tg 889 block/blk-throttle.c jiffy_elapsed_rnd = roundup(jiffy_elapsed + 1, tg->td->throtl_slice); tg 898 block/blk-throttle.c tmp = (u64)tg_iops_limit(tg, rw) * jiffy_elapsed_rnd; tg 906 block/blk-throttle.c if (tg->io_disp[rw] + 1 <= io_allowed) { tg 920 block/blk-throttle.c static bool tg_with_in_bps_limit(struct throtl_grp *tg, struct bio *bio, tg 928 block/blk-throttle.c jiffy_elapsed = jiffy_elapsed_rnd = jiffies - tg->slice_start[rw]; tg 932 block/blk-throttle.c jiffy_elapsed_rnd = tg->td->throtl_slice; tg 934 block/blk-throttle.c jiffy_elapsed_rnd = roundup(jiffy_elapsed_rnd, tg->td->throtl_slice); tg 936 block/blk-throttle.c tmp = tg_bps_limit(tg, rw) * jiffy_elapsed_rnd; tg 940 block/blk-throttle.c if (tg->bytes_disp[rw] + bio_size <= bytes_allowed) { tg 947 block/blk-throttle.c extra_bytes = tg->bytes_disp[rw] + bio_size - bytes_allowed; tg 948 block/blk-throttle.c jiffy_wait = div64_u64(extra_bytes * HZ, tg_bps_limit(tg, rw)); tg 967 block/blk-throttle.c static bool tg_may_dispatch(struct throtl_grp *tg, struct bio *bio, tg 979 block/blk-throttle.c BUG_ON(tg->service_queue.nr_queued[rw] && tg 980 block/blk-throttle.c bio != throtl_peek_queued(&tg->service_queue.queued[rw])); tg 983 block/blk-throttle.c if (tg_bps_limit(tg, rw) == U64_MAX && tg 984 block/blk-throttle.c tg_iops_limit(tg, rw) == UINT_MAX) { tg 997 block/blk-throttle.c if (throtl_slice_used(tg, rw) && !(tg->service_queue.nr_queued[rw])) tg 998 block/blk-throttle.c throtl_start_new_slice(tg, rw); tg 1000 block/blk-throttle.c if (time_before(tg->slice_end[rw], tg 1001 block/blk-throttle.c jiffies + tg->td->throtl_slice)) tg 1002 block/blk-throttle.c throtl_extend_slice(tg, rw, tg 1003 block/blk-throttle.c jiffies + tg->td->throtl_slice); tg 1006 block/blk-throttle.c if (tg_with_in_bps_limit(tg, bio, &bps_wait) && tg 1007 block/blk-throttle.c tg_with_in_iops_limit(tg, bio, &iops_wait)) { tg 1018 block/blk-throttle.c if (time_before(tg->slice_end[rw], jiffies + max_wait)) tg 1019 block/blk-throttle.c throtl_extend_slice(tg, rw, jiffies + max_wait); tg 1024 block/blk-throttle.c static void throtl_charge_bio(struct throtl_grp *tg, struct bio *bio) tg 1030 block/blk-throttle.c tg->bytes_disp[rw] += bio_size; tg 1031 block/blk-throttle.c tg->io_disp[rw]++; tg 1032 block/blk-throttle.c tg->last_bytes_disp[rw] += bio_size; tg 1033 block/blk-throttle.c tg->last_io_disp[rw]++; tg 1055 block/blk-throttle.c struct throtl_grp *tg) tg 1057 block/blk-throttle.c struct throtl_service_queue *sq = &tg->service_queue; tg 1061 block/blk-throttle.c qn = &tg->qnode_on_self[rw]; tg 1070 block/blk-throttle.c tg->flags |= THROTL_TG_WAS_EMPTY; tg 1075 block/blk-throttle.c throtl_enqueue_tg(tg); tg 1078 block/blk-throttle.c static void tg_update_disptime(struct throtl_grp *tg) tg 1080 block/blk-throttle.c struct throtl_service_queue *sq = &tg->service_queue; tg 1086 block/blk-throttle.c tg_may_dispatch(tg, bio, &read_wait); tg 1090 block/blk-throttle.c tg_may_dispatch(tg, bio, &write_wait); tg 1096 block/blk-throttle.c throtl_dequeue_tg(tg); tg 1097 block/blk-throttle.c tg->disptime = disptime; tg 1098 block/blk-throttle.c throtl_enqueue_tg(tg); tg 1101 block/blk-throttle.c tg->flags &= ~THROTL_TG_WAS_EMPTY; tg 1114 block/blk-throttle.c static void tg_dispatch_one_bio(struct throtl_grp *tg, bool rw) tg 1116 block/blk-throttle.c struct throtl_service_queue *sq = &tg->service_queue; tg 1131 block/blk-throttle.c throtl_charge_bio(tg, bio); tg 1141 block/blk-throttle.c throtl_add_bio_tg(bio, &tg->qnode_on_parent[rw], parent_tg); tg 1142 block/blk-throttle.c start_parent_slice_with_credit(tg, parent_tg, rw); tg 1144 block/blk-throttle.c throtl_qnode_add_bio(bio, &tg->qnode_on_parent[rw], tg 1146 block/blk-throttle.c BUG_ON(tg->td->nr_queued[rw] <= 0); tg 1147 block/blk-throttle.c tg->td->nr_queued[rw]--; tg 1150 block/blk-throttle.c throtl_trim_slice(tg, rw); tg 1156 block/blk-throttle.c static int throtl_dispatch_tg(struct throtl_grp *tg) tg 1158 block/blk-throttle.c struct throtl_service_queue *sq = &tg->service_queue; tg 1167 block/blk-throttle.c tg_may_dispatch(tg, bio, NULL)) { tg 1169 block/blk-throttle.c tg_dispatch_one_bio(tg, bio_data_dir(bio)); tg 1177 block/blk-throttle.c tg_may_dispatch(tg, bio, NULL)) { tg 1179 block/blk-throttle.c tg_dispatch_one_bio(tg, bio_data_dir(bio)); tg 1194 block/blk-throttle.c struct throtl_grp *tg = throtl_rb_first(parent_sq); tg 1197 block/blk-throttle.c if (!tg) tg 1200 block/blk-throttle.c if (time_before(jiffies, tg->disptime)) tg 1203 block/blk-throttle.c throtl_dequeue_tg(tg); tg 1205 block/blk-throttle.c nr_disp += throtl_dispatch_tg(tg); tg 1207 block/blk-throttle.c sq = &tg->service_queue; tg 1209 block/blk-throttle.c tg_update_disptime(tg); tg 1238 block/blk-throttle.c struct throtl_grp *tg = sq_to_tg(sq); tg 1278 block/blk-throttle.c if (tg->flags & THROTL_TG_WAS_EMPTY) { tg 1279 block/blk-throttle.c tg_update_disptime(tg); tg 1283 block/blk-throttle.c tg = sq_to_tg(sq); tg 1333 block/blk-throttle.c struct throtl_grp *tg = pd_to_tg(pd); tg 1334 block/blk-throttle.c u64 v = *(u64 *)((void *)tg + off); tg 1344 block/blk-throttle.c struct throtl_grp *tg = pd_to_tg(pd); tg 1345 block/blk-throttle.c unsigned int v = *(unsigned int *)((void *)tg + off); tg 1366 block/blk-throttle.c static void tg_conf_updated(struct throtl_grp *tg, bool global) tg 1368 block/blk-throttle.c struct throtl_service_queue *sq = &tg->service_queue; tg 1372 block/blk-throttle.c throtl_log(&tg->service_queue, tg 1374 block/blk-throttle.c tg_bps_limit(tg, READ), tg_bps_limit(tg, WRITE), tg 1375 block/blk-throttle.c tg_iops_limit(tg, READ), tg_iops_limit(tg, WRITE)); tg 1385 block/blk-throttle.c global ? tg->td->queue->root_blkg : tg_to_blkg(tg)) { tg 1413 block/blk-throttle.c throtl_start_new_slice(tg, 0); tg 1414 block/blk-throttle.c throtl_start_new_slice(tg, 1); tg 1416 block/blk-throttle.c if (tg->flags & THROTL_TG_PENDING) { tg 1417 block/blk-throttle.c tg_update_disptime(tg); tg 1427 block/blk-throttle.c struct throtl_grp *tg; tg 1441 block/blk-throttle.c tg = blkg_to_tg(ctx.blkg); tg 1444 block/blk-throttle.c *(u64 *)((void *)tg + of_cft(of)->private) = v; tg 1446 block/blk-throttle.c *(unsigned int *)((void *)tg + of_cft(of)->private) = v; tg 1448 block/blk-throttle.c tg_conf_updated(tg, false); tg 1518 block/blk-throttle.c struct throtl_grp *tg = pd_to_tg(pd); tg 1537 block/blk-throttle.c if (tg->bps_conf[READ][off] == bps_dft && tg 1538 block/blk-throttle.c tg->bps_conf[WRITE][off] == bps_dft && tg 1539 block/blk-throttle.c tg->iops_conf[READ][off] == iops_dft && tg 1540 block/blk-throttle.c tg->iops_conf[WRITE][off] == iops_dft && tg 1542 block/blk-throttle.c (tg->idletime_threshold_conf == DFL_IDLE_THRESHOLD && tg 1543 block/blk-throttle.c tg->latency_target_conf == DFL_LATENCY_TARGET))) tg 1546 block/blk-throttle.c if (tg->bps_conf[READ][off] != U64_MAX) tg 1548 block/blk-throttle.c tg->bps_conf[READ][off]); tg 1549 block/blk-throttle.c if (tg->bps_conf[WRITE][off] != U64_MAX) tg 1551 block/blk-throttle.c tg->bps_conf[WRITE][off]); tg 1552 block/blk-throttle.c if (tg->iops_conf[READ][off] != UINT_MAX) tg 1554 block/blk-throttle.c tg->iops_conf[READ][off]); tg 1555 block/blk-throttle.c if (tg->iops_conf[WRITE][off] != UINT_MAX) tg 1557 block/blk-throttle.c tg->iops_conf[WRITE][off]); tg 1559 block/blk-throttle.c if (tg->idletime_threshold_conf == ULONG_MAX) tg 1563 block/blk-throttle.c tg->idletime_threshold_conf); tg 1565 block/blk-throttle.c if (tg->latency_target_conf == ULONG_MAX) tg 1569 block/blk-throttle.c " latency=%lu", tg->latency_target_conf); tg 1590 block/blk-throttle.c struct throtl_grp *tg; tg 1601 block/blk-throttle.c tg = blkg_to_tg(ctx.blkg); tg 1603 block/blk-throttle.c v[0] = tg->bps_conf[READ][index]; tg 1604 block/blk-throttle.c v[1] = tg->bps_conf[WRITE][index]; tg 1605 block/blk-throttle.c v[2] = tg->iops_conf[READ][index]; tg 1606 block/blk-throttle.c v[3] = tg->iops_conf[WRITE][index]; tg 1608 block/blk-throttle.c idle_time = tg->idletime_threshold_conf; tg 1609 block/blk-throttle.c latency_time = tg->latency_target_conf; tg 1649 block/blk-throttle.c tg->bps_conf[READ][index] = v[0]; tg 1650 block/blk-throttle.c tg->bps_conf[WRITE][index] = v[1]; tg 1651 block/blk-throttle.c tg->iops_conf[READ][index] = v[2]; tg 1652 block/blk-throttle.c tg->iops_conf[WRITE][index] = v[3]; tg 1655 block/blk-throttle.c tg->bps[READ][index] = v[0]; tg 1656 block/blk-throttle.c tg->bps[WRITE][index] = v[1]; tg 1657 block/blk-throttle.c tg->iops[READ][index] = v[2]; tg 1658 block/blk-throttle.c tg->iops[WRITE][index] = v[3]; tg 1660 block/blk-throttle.c tg->bps[READ][LIMIT_LOW] = min(tg->bps_conf[READ][LIMIT_LOW], tg 1661 block/blk-throttle.c tg->bps_conf[READ][LIMIT_MAX]); tg 1662 block/blk-throttle.c tg->bps[WRITE][LIMIT_LOW] = min(tg->bps_conf[WRITE][LIMIT_LOW], tg 1663 block/blk-throttle.c tg->bps_conf[WRITE][LIMIT_MAX]); tg 1664 block/blk-throttle.c tg->iops[READ][LIMIT_LOW] = min(tg->iops_conf[READ][LIMIT_LOW], tg 1665 block/blk-throttle.c tg->iops_conf[READ][LIMIT_MAX]); tg 1666 block/blk-throttle.c tg->iops[WRITE][LIMIT_LOW] = min(tg->iops_conf[WRITE][LIMIT_LOW], tg 1667 block/blk-throttle.c tg->iops_conf[WRITE][LIMIT_MAX]); tg 1668 block/blk-throttle.c tg->idletime_threshold_conf = idle_time; tg 1669 block/blk-throttle.c tg->latency_target_conf = latency_time; tg 1672 block/blk-throttle.c if (!(tg->bps[READ][LIMIT_LOW] || tg->iops[READ][LIMIT_LOW] || tg 1673 block/blk-throttle.c tg->bps[WRITE][LIMIT_LOW] || tg->iops[WRITE][LIMIT_LOW]) || tg 1674 block/blk-throttle.c tg->idletime_threshold_conf == DFL_IDLE_THRESHOLD || tg 1675 block/blk-throttle.c tg->latency_target_conf == DFL_LATENCY_TARGET) { tg 1676 block/blk-throttle.c tg->bps[READ][LIMIT_LOW] = 0; tg 1677 block/blk-throttle.c tg->bps[WRITE][LIMIT_LOW] = 0; tg 1678 block/blk-throttle.c tg->iops[READ][LIMIT_LOW] = 0; tg 1679 block/blk-throttle.c tg->iops[WRITE][LIMIT_LOW] = 0; tg 1680 block/blk-throttle.c tg->idletime_threshold = DFL_IDLE_THRESHOLD; tg 1681 block/blk-throttle.c tg->latency_target = DFL_LATENCY_TARGET; tg 1683 block/blk-throttle.c tg->idletime_threshold = tg->idletime_threshold_conf; tg 1684 block/blk-throttle.c tg->latency_target = tg->latency_target_conf; tg 1687 block/blk-throttle.c blk_throtl_update_limit_valid(tg->td); tg 1688 block/blk-throttle.c if (tg->td->limit_valid[LIMIT_LOW]) { tg 1690 block/blk-throttle.c tg->td->limit_index = LIMIT_LOW; tg 1692 block/blk-throttle.c tg->td->limit_index = LIMIT_MAX; tg 1693 block/blk-throttle.c tg_conf_updated(tg, index == LIMIT_LOW && tg 1694 block/blk-throttle.c tg->td->limit_valid[LIMIT_LOW]); tg 1739 block/blk-throttle.c static unsigned long __tg_last_low_overflow_time(struct throtl_grp *tg) tg 1743 block/blk-throttle.c if (tg->bps[READ][LIMIT_LOW] || tg->iops[READ][LIMIT_LOW]) tg 1744 block/blk-throttle.c rtime = tg->last_low_overflow_time[READ]; tg 1745 block/blk-throttle.c if (tg->bps[WRITE][LIMIT_LOW] || tg->iops[WRITE][LIMIT_LOW]) tg 1746 block/blk-throttle.c wtime = tg->last_low_overflow_time[WRITE]; tg 1751 block/blk-throttle.c static unsigned long tg_last_low_overflow_time(struct throtl_grp *tg) tg 1754 block/blk-throttle.c struct throtl_grp *parent = tg; tg 1755 block/blk-throttle.c unsigned long ret = __tg_last_low_overflow_time(tg); tg 1778 block/blk-throttle.c static bool throtl_tg_is_idle(struct throtl_grp *tg) tg 1790 block/blk-throttle.c time = min_t(unsigned long, MAX_IDLE_TIME, 4 * tg->idletime_threshold); tg 1791 block/blk-throttle.c ret = tg->latency_target == DFL_LATENCY_TARGET || tg 1792 block/blk-throttle.c tg->idletime_threshold == DFL_IDLE_THRESHOLD || tg 1793 block/blk-throttle.c (ktime_get_ns() >> 10) - tg->last_finish_time > time || tg 1794 block/blk-throttle.c tg->avg_idletime > tg->idletime_threshold || tg 1795 block/blk-throttle.c (tg->latency_target && tg->bio_cnt && tg 1796 block/blk-throttle.c tg->bad_bio_cnt * 5 < tg->bio_cnt); tg 1797 block/blk-throttle.c throtl_log(&tg->service_queue, tg 1799 block/blk-throttle.c tg->avg_idletime, tg->idletime_threshold, tg->bad_bio_cnt, tg 1800 block/blk-throttle.c tg->bio_cnt, ret, tg->td->scale); tg 1804 block/blk-throttle.c static bool throtl_tg_can_upgrade(struct throtl_grp *tg) tg 1806 block/blk-throttle.c struct throtl_service_queue *sq = &tg->service_queue; tg 1813 block/blk-throttle.c read_limit = tg->bps[READ][LIMIT_LOW] || tg->iops[READ][LIMIT_LOW]; tg 1814 block/blk-throttle.c write_limit = tg->bps[WRITE][LIMIT_LOW] || tg->iops[WRITE][LIMIT_LOW]; tg 1825 block/blk-throttle.c tg_last_low_overflow_time(tg) + tg->td->throtl_slice) && tg 1826 block/blk-throttle.c throtl_tg_is_idle(tg)) tg 1831 block/blk-throttle.c static bool throtl_hierarchy_can_upgrade(struct throtl_grp *tg) tg 1834 block/blk-throttle.c if (throtl_tg_can_upgrade(tg)) tg 1836 block/blk-throttle.c tg = sq_to_tg(tg->service_queue.parent_sq); tg 1837 block/blk-throttle.c if (!tg || !tg_to_blkg(tg)->parent) tg 1857 block/blk-throttle.c struct throtl_grp *tg = blkg_to_tg(blkg); tg 1859 block/blk-throttle.c if (tg == this_tg) tg 1861 block/blk-throttle.c if (!list_empty(&tg_to_blkg(tg)->blkcg->css.children)) tg 1863 block/blk-throttle.c if (!throtl_hierarchy_can_upgrade(tg)) { tg 1872 block/blk-throttle.c static void throtl_upgrade_check(struct throtl_grp *tg) tg 1876 block/blk-throttle.c if (tg->td->limit_index != LIMIT_LOW) tg 1879 block/blk-throttle.c if (time_after(tg->last_check_time + tg->td->throtl_slice, now)) tg 1882 block/blk-throttle.c tg->last_check_time = now; tg 1885 block/blk-throttle.c __tg_last_low_overflow_time(tg) + tg->td->throtl_slice)) tg 1888 block/blk-throttle.c if (throtl_can_upgrade(tg->td, NULL)) tg 1889 block/blk-throttle.c throtl_upgrade_state(tg->td); tg 1903 block/blk-throttle.c struct throtl_grp *tg = blkg_to_tg(blkg); tg 1904 block/blk-throttle.c struct throtl_service_queue *sq = &tg->service_queue; tg 1906 block/blk-throttle.c tg->disptime = jiffies - 1; tg 1930 block/blk-throttle.c static bool throtl_tg_can_downgrade(struct throtl_grp *tg) tg 1932 block/blk-throttle.c struct throtl_data *td = tg->td; tg 1940 block/blk-throttle.c time_after_eq(now, tg_last_low_overflow_time(tg) + tg 1942 block/blk-throttle.c (!throtl_tg_is_idle(tg) || tg 1943 block/blk-throttle.c !list_empty(&tg_to_blkg(tg)->blkcg->css.children))) tg 1948 block/blk-throttle.c static bool throtl_hierarchy_can_downgrade(struct throtl_grp *tg) tg 1951 block/blk-throttle.c if (!throtl_tg_can_downgrade(tg)) tg 1953 block/blk-throttle.c tg = sq_to_tg(tg->service_queue.parent_sq); tg 1954 block/blk-throttle.c if (!tg || !tg_to_blkg(tg)->parent) tg 1960 block/blk-throttle.c static void throtl_downgrade_check(struct throtl_grp *tg) tg 1967 block/blk-throttle.c if (tg->td->limit_index != LIMIT_MAX || tg 1968 block/blk-throttle.c !tg->td->limit_valid[LIMIT_LOW]) tg 1970 block/blk-throttle.c if (!list_empty(&tg_to_blkg(tg)->blkcg->css.children)) tg 1972 block/blk-throttle.c if (time_after(tg->last_check_time + tg->td->throtl_slice, now)) tg 1975 block/blk-throttle.c elapsed_time = now - tg->last_check_time; tg 1976 block/blk-throttle.c tg->last_check_time = now; tg 1978 block/blk-throttle.c if (time_before(now, tg_last_low_overflow_time(tg) + tg 1979 block/blk-throttle.c tg->td->throtl_slice)) tg 1982 block/blk-throttle.c if (tg->bps[READ][LIMIT_LOW]) { tg 1983 block/blk-throttle.c bps = tg->last_bytes_disp[READ] * HZ; tg 1985 block/blk-throttle.c if (bps >= tg->bps[READ][LIMIT_LOW]) tg 1986 block/blk-throttle.c tg->last_low_overflow_time[READ] = now; tg 1989 block/blk-throttle.c if (tg->bps[WRITE][LIMIT_LOW]) { tg 1990 block/blk-throttle.c bps = tg->last_bytes_disp[WRITE] * HZ; tg 1992 block/blk-throttle.c if (bps >= tg->bps[WRITE][LIMIT_LOW]) tg 1993 block/blk-throttle.c tg->last_low_overflow_time[WRITE] = now; tg 1996 block/blk-throttle.c if (tg->iops[READ][LIMIT_LOW]) { tg 1997 block/blk-throttle.c iops = tg->last_io_disp[READ] * HZ / elapsed_time; tg 1998 block/blk-throttle.c if (iops >= tg->iops[READ][LIMIT_LOW]) tg 1999 block/blk-throttle.c tg->last_low_overflow_time[READ] = now; tg 2002 block/blk-throttle.c if (tg->iops[WRITE][LIMIT_LOW]) { tg 2003 block/blk-throttle.c iops = tg->last_io_disp[WRITE] * HZ / elapsed_time; tg 2004 block/blk-throttle.c if (iops >= tg->iops[WRITE][LIMIT_LOW]) tg 2005 block/blk-throttle.c tg->last_low_overflow_time[WRITE] = now; tg 2012 block/blk-throttle.c if (throtl_hierarchy_can_downgrade(tg)) tg 2013 block/blk-throttle.c throtl_downgrade_state(tg->td, LIMIT_LOW); tg 2015 block/blk-throttle.c tg->last_bytes_disp[READ] = 0; tg 2016 block/blk-throttle.c tg->last_bytes_disp[WRITE] = 0; tg 2017 block/blk-throttle.c tg->last_io_disp[READ] = 0; tg 2018 block/blk-throttle.c tg->last_io_disp[WRITE] = 0; tg 2021 block/blk-throttle.c static void blk_throtl_update_idletime(struct throtl_grp *tg) tg 2024 block/blk-throttle.c unsigned long last_finish_time = tg->last_finish_time; tg 2027 block/blk-throttle.c last_finish_time == tg->checked_last_finish_time) tg 2030 block/blk-throttle.c tg->avg_idletime = (tg->avg_idletime * 7 + now - last_finish_time) >> 3; tg 2031 block/blk-throttle.c tg->checked_last_finish_time = last_finish_time; tg 2121 block/blk-throttle.c struct throtl_grp *tg = blkg_to_tg(blkg ?: q->root_blkg); tg 2125 block/blk-throttle.c struct throtl_data *td = tg->td; tg 2130 block/blk-throttle.c if (bio_flagged(bio, BIO_THROTTLED) || !tg->has_rules[rw]) tg 2137 block/blk-throttle.c blk_throtl_update_idletime(tg); tg 2139 block/blk-throttle.c sq = &tg->service_queue; tg 2143 block/blk-throttle.c if (tg->last_low_overflow_time[rw] == 0) tg 2144 block/blk-throttle.c tg->last_low_overflow_time[rw] = jiffies; tg 2145 block/blk-throttle.c throtl_downgrade_check(tg); tg 2146 block/blk-throttle.c throtl_upgrade_check(tg); tg 2152 block/blk-throttle.c if (!tg_may_dispatch(tg, bio, NULL)) { tg 2153 block/blk-throttle.c tg->last_low_overflow_time[rw] = jiffies; tg 2154 block/blk-throttle.c if (throtl_can_upgrade(td, tg)) { tg 2162 block/blk-throttle.c throtl_charge_bio(tg, bio); tg 2175 block/blk-throttle.c throtl_trim_slice(tg, rw); tg 2182 block/blk-throttle.c qn = &tg->qnode_on_parent[rw]; tg 2184 block/blk-throttle.c tg = sq_to_tg(sq); tg 2185 block/blk-throttle.c if (!tg) tg 2192 block/blk-throttle.c tg->bytes_disp[rw], bio->bi_iter.bi_size, tg 2193 block/blk-throttle.c tg_bps_limit(tg, rw), tg 2194 block/blk-throttle.c tg->io_disp[rw], tg_iops_limit(tg, rw), tg 2197 block/blk-throttle.c tg->last_low_overflow_time[rw] = jiffies; tg 2200 block/blk-throttle.c throtl_add_bio_tg(bio, qn, tg); tg 2209 block/blk-throttle.c if (tg->flags & THROTL_TG_WAS_EMPTY) { tg 2210 block/blk-throttle.c tg_update_disptime(tg); tg 2211 block/blk-throttle.c throtl_schedule_next_dispatch(tg->service_queue.parent_sq, true); tg 2258 block/blk-throttle.c struct throtl_grp *tg; tg 2268 block/blk-throttle.c tg = blkg_to_tg(blkg); tg 2271 block/blk-throttle.c tg->last_finish_time = finish_time_ns >> 10; tg 2281 block/blk-throttle.c throtl_track_latency(tg->td, bio_issue_size(&bio->bi_issue), tg 2284 block/blk-throttle.c if (tg->latency_target && lat >= tg->td->filtered_latency) { tg 2289 block/blk-throttle.c threshold = tg->td->avg_buckets[rw][bucket].latency + tg 2290 block/blk-throttle.c tg->latency_target; tg 2292 block/blk-throttle.c tg->bad_bio_cnt++; tg 2297 block/blk-throttle.c tg->bio_cnt++; tg 2300 block/blk-throttle.c if (time_after(jiffies, tg->bio_cnt_reset_time) || tg->bio_cnt > 1024) { tg 2301 block/blk-throttle.c tg->bio_cnt_reset_time = tg->td->throtl_slice + jiffies; tg 2302 block/blk-throttle.c tg->bio_cnt /= 2; tg 2303 block/blk-throttle.c tg->bad_bio_cnt /= 2; tg 2315 block/blk-throttle.c struct throtl_grp *tg; tg 2317 block/blk-throttle.c while ((tg = throtl_rb_first(parent_sq))) { tg 2318 block/blk-throttle.c struct throtl_service_queue *sq = &tg->service_queue; tg 2321 block/blk-throttle.c throtl_dequeue_tg(tg); tg 2324 block/blk-throttle.c tg_dispatch_one_bio(tg, bio_data_dir(bio)); tg 2326 block/blk-throttle.c tg_dispatch_one_bio(tg, bio_data_dir(bio)); tg 26 drivers/firmware/efi/libstub/arm64-stub.c u64 tg; tg 32 drivers/firmware/efi/libstub/arm64-stub.c tg = (read_cpuid(ID_AA64MMFR0_EL1) >> ID_AA64MMFR0_TGRAN_SHIFT) & 0xf; tg 33 drivers/firmware/efi/libstub/arm64-stub.c if (tg != ID_AA64MMFR0_TGRAN_SUPPORTED) { tg 148 drivers/gpu/drm/amd/display/dc/clk_mgr/dce110/dce110_clk_mgr.c cfg->pipe_idx = pipe_ctx->stream_res.tg->inst; tg 291 drivers/gpu/drm/amd/display/dc/core/dc.c if (pipe->stream == stream && pipe->stream_res.tg) { tg 348 drivers/gpu/drm/amd/display/dc/core/dc.c struct timing_generator *tg; tg 374 drivers/gpu/drm/amd/display/dc/core/dc.c tg = pipe->stream_res.tg; tg 377 drivers/gpu/drm/amd/display/dc/core/dc.c if (tg->funcs->configure_crc) tg 378 drivers/gpu/drm/amd/display/dc/core/dc.c return tg->funcs->configure_crc(tg, ¶m); tg 397 drivers/gpu/drm/amd/display/dc/core/dc.c struct timing_generator *tg; tg 408 drivers/gpu/drm/amd/display/dc/core/dc.c tg = pipe->stream_res.tg; tg 410 drivers/gpu/drm/amd/display/dc/core/dc.c if (tg->funcs->get_crc) tg 411 drivers/gpu/drm/amd/display/dc/core/dc.c return tg->funcs->get_crc(tg, r_cr, g_y, b_cb); tg 974 drivers/gpu/drm/amd/display/dc/core/dc.c struct timing_generator *tg; tg 1003 drivers/gpu/drm/amd/display/dc/core/dc.c tg = dc->res_pool->timing_generators[tg_inst]; tg 1005 drivers/gpu/drm/amd/display/dc/core/dc.c if (!tg->funcs->is_matching_timing) tg 1008 drivers/gpu/drm/amd/display/dc/core/dc.c if (!tg->funcs->is_matching_timing(tg, crtc_timing)) tg 1980 drivers/gpu/drm/amd/display/dc/core/dc.c if (pipe_ctx->stream_res.tg->funcs->is_blanked) { tg 1982 drivers/gpu/drm/amd/display/dc/core/dc.c if (!pipe_ctx->stream_res.tg->funcs->is_blanked(pipe_ctx->stream_res.tg)) tg 2164 drivers/gpu/drm/amd/display/dc/core/dc.c if (pipe_ctx->stream_res.tg->funcs->program_manual_trigger) tg 2165 drivers/gpu/drm/amd/display/dc/core/dc.c pipe_ctx->stream_res.tg->funcs->program_manual_trigger(pipe_ctx->stream_res.tg); tg 328 drivers/gpu/drm/amd/display/dc/core/dc_debug.c pipe_ctx->stream_res.tg->funcs->get_position(pipe_ctx->stream_res.tg, &position); tg 339 drivers/gpu/drm/amd/display/dc/core/dc_debug.c pipe_ctx->stream_res.tg->inst, tg 273 drivers/gpu/drm/amd/display/dc/core/dc_hw_sequencer.c struct timing_generator *tg) tg 278 drivers/gpu/drm/amd/display/dc/core/dc_hw_sequencer.c if (!tg->funcs->is_blanked) tg 281 drivers/gpu/drm/amd/display/dc/core/dc_hw_sequencer.c if (tg->funcs->is_blanked(tg)) tg 2350 drivers/gpu/drm/amd/display/dc/core/dc_link.c res_ctx.pipe_ctx[i].stream_res.tg->inst + tg 2687 drivers/gpu/drm/amd/display/dc/core/dc_link.c pipe_ctx->stream_res.tg->inst, tg 2766 drivers/gpu/drm/amd/display/dc/core/dc_link.c if (pipe_ctx->stream_res.tg->funcs->set_test_pattern) tg 2767 drivers/gpu/drm/amd/display/dc/core/dc_link.c pipe_ctx->stream_res.tg->funcs->set_test_pattern(pipe_ctx->stream_res.tg, tg 3106 drivers/gpu/drm/amd/display/dc/core/dc_link_dp.c if (pipe_ctx->stream_res.tg->funcs->set_test_pattern) tg 3107 drivers/gpu/drm/amd/display/dc/core/dc_link_dp.c pipe_ctx->stream_res.tg->funcs->set_test_pattern(pipe_ctx->stream_res.tg, tg 3146 drivers/gpu/drm/amd/display/dc/core/dc_link_dp.c if (pipe_ctx->stream_res.tg->funcs->set_test_pattern) tg 3147 drivers/gpu/drm/amd/display/dc/core/dc_link_dp.c pipe_ctx->stream_res.tg->funcs->set_test_pattern(pipe_ctx->stream_res.tg, tg 433 drivers/gpu/drm/amd/display/dc/core/dc_link_hwss.c DC_LOG_DSC("Setting optc DSC config for tg instance %d:", pipe_ctx->stream_res.tg->inst); tg 435 drivers/gpu/drm/amd/display/dc/core/dc_link_hwss.c pipe_ctx->stream_res.tg->funcs->set_dsc_config(pipe_ctx->stream_res.tg, tg 441 drivers/gpu/drm/amd/display/dc/core/dc_link_hwss.c pipe_ctx->stream_res.tg->funcs->set_dsc_config( tg 442 drivers/gpu/drm/amd/display/dc/core/dc_link_hwss.c pipe_ctx->stream_res.tg, tg 1214 drivers/gpu/drm/amd/display/dc/core/dc_resource.c split_pipe->stream_res.tg = pool->timing_generators[i]; tg 1289 drivers/gpu/drm/amd/display/dc/core/dc_resource.c free_pipe->stream_res.tg = tail_pipe->stream_res.tg; tg 1616 drivers/gpu/drm/amd/display/dc/core/dc_resource.c pipe_ctx->stream_res.tg = pool->timing_generators[i]; tg 1886 drivers/gpu/drm/amd/display/dc/core/dc_resource.c pipe_ctx->stream_res.tg = pool->timing_generators[tg_inst]; tg 1952 drivers/gpu/drm/amd/display/dc/core/dc_resource.c if (pipe_idx < 0 || context->res_ctx.pipe_ctx[pipe_idx].stream_res.tg == NULL) tg 1992 drivers/gpu/drm/amd/display/dc/core/dc_resource.c context->stream_status[i].primary_otg_inst = pipe_ctx->stream_res.tg->inst; tg 2749 drivers/gpu/drm/amd/display/dc/core/dc_resource.c struct timing_generator *tg = core_dc->res_pool->timing_generators[0]; tg 2754 drivers/gpu/drm/amd/display/dc/core/dc_resource.c if (!tg->funcs->validate_timing(tg, &stream->timing)) tg 493 drivers/gpu/drm/amd/display/dc/core/dc_stream.c struct timing_generator *tg = res_ctx->pipe_ctx[i].stream_res.tg; tg 498 drivers/gpu/drm/amd/display/dc/core/dc_stream.c return tg->funcs->get_frame_count(tg); tg 552 drivers/gpu/drm/amd/display/dc/core/dc_stream.c struct timing_generator *tg = res_ctx->pipe_ctx[i].stream_res.tg; tg 557 drivers/gpu/drm/amd/display/dc/core/dc_stream.c tg->funcs->get_scanoutpos(tg, tg 520 drivers/gpu/drm/amd/display/dc/dce/dce_clk_mgr.c cfg->pipe_idx = pipe_ctx->stream_res.tg->inst; tg 56 drivers/gpu/drm/amd/display/dc/dce/dce_hwseq.c if (lock && pipe->stream_res.tg->funcs->is_blanked && tg 57 drivers/gpu/drm/amd/display/dc/dce/dce_hwseq.c pipe->stream_res.tg->funcs->is_blanked(pipe->stream_res.tg)) tg 60 drivers/gpu/drm/amd/display/dc/dce/dce_hwseq.c val = REG_GET_4(BLND_V_UPDATE_LOCK[pipe->stream_res.tg->inst], tg 71 drivers/gpu/drm/amd/display/dc/dce/dce_hwseq.c REG_SET_2(BLND_V_UPDATE_LOCK[pipe->stream_res.tg->inst], val, tg 76 drivers/gpu/drm/amd/display/dc/dce/dce_hwseq.c REG_SET_2(BLND_V_UPDATE_LOCK[pipe->stream_res.tg->inst], val, tg 82 drivers/gpu/drm/amd/display/dc/dce/dce_hwseq.c uint32_t value = REG_READ(CRTC_H_BLANK_START_END[pipe->stream_res.tg->inst]); tg 83 drivers/gpu/drm/amd/display/dc/dce/dce_hwseq.c REG_WRITE(CRTC_H_BLANK_START_END[pipe->stream_res.tg->inst], value); tg 661 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c struct timing_generator *tg = pipe_ctx->stream_res.tg; tg 684 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c tg->funcs->set_early_control(tg, early_control); tg 1184 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c pipe_ctx->stream_res.tg->inst + 1); tg 1196 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c uint32_t color_value = MAX_TG_COLOR_VALUE * (4 - pipe_ctx->stream_res.tg->inst) / 4; tg 1251 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c if (pipe_ctx->stream_res.tg->funcs->set_overscan_blank_color) { tg 1260 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c pipe_ctx->stream_res.tg->funcs->set_overscan_blank_color( tg 1261 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c pipe_ctx->stream_res.tg, tg 1284 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c pipe_ctx->stream_res.tg->funcs->set_blank_color( tg 1285 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c pipe_ctx->stream_res.tg, tg 1292 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c pipe_ctx->stream_res.tg->funcs->set_blank(pipe_ctx->stream_res.tg, true); tg 1302 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c pipe_ctx->stream_res.tg->funcs->program_timing( tg 1303 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c pipe_ctx->stream_res.tg, tg 1314 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c if (false == pipe_ctx->stream_res.tg->funcs->enable_crtc( tg 1315 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c pipe_ctx->stream_res.tg)) { tg 1374 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c if (pipe_ctx->stream_res.tg->funcs->set_drr) tg 1375 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c pipe_ctx->stream_res.tg->funcs->set_drr( tg 1376 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c pipe_ctx->stream_res.tg, ¶ms); tg 1381 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c if (pipe_ctx->stream_res.tg->funcs->set_static_screen_control) tg 1382 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c pipe_ctx->stream_res.tg->funcs->set_static_screen_control( tg 1383 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c pipe_ctx->stream_res.tg, event_triggers); tg 1388 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c pipe_ctx->stream_res.tg->inst); tg 1499 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c struct timing_generator *tg; tg 1503 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c tg = dc->res_pool->timing_generators[i]; tg 1505 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c if (tg->funcs->disable_vga) tg 1506 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c tg->funcs->disable_vga(tg); tg 1738 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c pipe_ctx[i]->stream_res.tg->funcs->set_drr( tg 1739 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c pipe_ctx[i]->stream_res.tg, ¶ms); tg 1742 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c pipe_ctx[i]->stream_res.tg->funcs->set_static_screen_control( tg 1743 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c pipe_ctx[i]->stream_res.tg, tg 1757 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c pipe_ctx[i]->stream_res.tg->funcs->get_position(pipe_ctx[i]->stream_res.tg, position); tg 1783 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c pipe_ctx[i]->stream_res.tg->funcs-> tg 1784 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c set_static_screen_control(pipe_ctx[i]->stream_res.tg, value); tg 1868 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c params.inst = pipe_ctx->stream_res.tg->inst; tg 1927 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c pipe_ctx_old->stream_res.tg->funcs->set_blank(pipe_ctx_old->stream_res.tg, true); tg 1928 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c if (!hwss_wait_for_blank_complete(pipe_ctx_old->stream_res.tg)) { tg 1932 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c pipe_ctx_old->stream_res.tg->funcs->disable_crtc(pipe_ctx_old->stream_res.tg); tg 2177 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c dce_set_blender_mode(dc->hwseq, pipe_ctx->stream_res.tg->inst, blender_mode); tg 2178 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c pipe_ctx->stream_res.tg->funcs->set_blank(pipe_ctx->stream_res.tg, blank_target); tg 2232 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c pipe_ctx->stream_res.tg->funcs->is_stereo_left_eye) { tg 2234 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c !pipe_ctx->stream_res.tg->funcs->is_stereo_left_eye(pipe_ctx->stream_res.tg); tg 2246 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c struct timing_generator *tg) tg 2257 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c if (!tg->funcs->is_counter_moving(tg)) { tg 2262 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c if (tg->funcs->did_triggered_reset_occur(tg)) { tg 2271 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c tg->funcs->wait_for_state(tg, CRTC_STATE_VACTIVE); tg 2272 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c tg->funcs->wait_for_state(tg, CRTC_STATE_VBLANK); tg 2298 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c gsl_params.gsl_master = grouped_pipes[0]->stream_res.tg->inst; tg 2301 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c grouped_pipes[i]->stream_res.tg->funcs->setup_global_swap_lock( tg 2302 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c grouped_pipes[i]->stream_res.tg, &gsl_params); tg 2308 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c grouped_pipes[i]->stream_res.tg->funcs->enable_reset_trigger( tg 2309 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c grouped_pipes[i]->stream_res.tg, tg 2314 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c wait_for_reset_trigger_to_occur(dc_ctx, grouped_pipes[i]->stream_res.tg); tg 2315 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c grouped_pipes[i]->stream_res.tg->funcs->disable_reset_trigger( tg 2316 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c grouped_pipes[i]->stream_res.tg); tg 2323 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c grouped_pipes[i]->stream_res.tg->funcs->tear_down_global_swap_lock(grouped_pipes[i]->stream_res.tg); tg 2341 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c grouped_pipes[i]->stream_res.tg->funcs->setup_global_swap_lock( tg 2342 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c grouped_pipes[i]->stream_res.tg, &gsl_params); tg 2347 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c grouped_pipes[i]->stream_res.tg->funcs->enable_crtc_reset( tg 2348 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c grouped_pipes[i]->stream_res.tg, tg 2354 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c wait_for_reset_trigger_to_occur(dc_ctx, grouped_pipes[i]->stream_res.tg); tg 2357 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c grouped_pipes[i]->stream_res.tg->funcs->tear_down_global_swap_lock(grouped_pipes[i]->stream_res.tg); tg 2404 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c struct timing_generator *tg = dc->res_pool->timing_generators[i]; tg 2406 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c tg->funcs->disable_vga(tg); tg 2410 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c tg->funcs->set_blank(tg, true); tg 2411 drivers/gpu/drm/amd/display/dc/dce110/dce110_hw_sequencer.c hwss_wait_for_blank_complete(tg); tg 822 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c pixel_clk_params->controller_id = pipe_ctx->stream_res.tg->inst + 1; tg 1061 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c pipe_ctx->stream_res.tg = pool->timing_generators[underlay_idx]; tg 1076 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c pipe_ctx->stream_res.tg->inst, tg 1084 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c pipe_ctx->stream_res.tg->funcs->program_timing(pipe_ctx->stream_res.tg, tg 1093 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c pipe_ctx->stream_res.tg->funcs->enable_advanced_request( tg 1094 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c pipe_ctx->stream_res.tg, tg 1106 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c pipe_ctx->stream_res.tg->funcs->set_blank_color( tg 1107 drivers/gpu/drm/amd/display/dc/dce110/dce110_resource.c pipe_ctx->stream_res.tg, tg 66 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct timing_generator *tg, tg 92 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct timing_generator *tg) tg 97 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 100 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c value = dm_read_reg(tg->ctx, addr); tg 106 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct timing_generator *tg, tg 110 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 113 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c regval = dm_read_reg(tg->ctx, address); tg 116 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, address, regval); tg 123 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c bool dce110_timing_generator_enable_crtc(struct timing_generator *tg) tg 127 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 140 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_MASTER_UPDATE_MODE), value); tg 144 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_MASTER_UPDATE_LOCK), value); tg 146 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c result = tg->bp->funcs->enable_crtc(tg->bp, tg110->controller_id, true); tg 152 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct timing_generator *tg, tg 155 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 157 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c uint32_t value = dm_read_reg(tg->ctx, addr); tg 175 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, addr, value); tg 189 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c static void disable_stereo(struct timing_generator *tg) tg 191 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 199 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c value = dm_read_reg(tg->ctx, addr); tg 217 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c tg->funcs->wait_for_vblank(tg); tg 218 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c tg->funcs->wait_for_vactive(tg); tg 222 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, addr, value); tg 224 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c addr = tg->regs[IDX_CRTC_STEREO_CONTROL]; tg 225 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, addr, value); tg 232 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c bool dce110_timing_generator_disable_crtc(struct timing_generator *tg) tg 236 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 238 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c result = tg->bp->funcs->enable_crtc(tg->bp, tg110->controller_id, false); tg 256 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct timing_generator *tg, tg 260 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 262 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c regval = dm_read_reg(tg->ctx, tg 272 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, tg 282 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct timing_generator *tg, tg 288 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 306 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dce110_timing_generator_apply_front_porch_workaround(tg, &patched_crtc_timing); tg 343 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c result = tg->bp->funcs->program_crtc_timing(tg->bp, &bp_params); tg 345 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c program_horz_count_by_2(tg, &patched_crtc_timing); tg 347 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c tg110->base.funcs->enable_advanced_request(tg, true, &patched_crtc_timing); tg 367 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct timing_generator *tg, tg 374 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 379 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c v_total_min = dm_read_reg(tg->ctx, addr); tg 382 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c v_total_max = dm_read_reg(tg->ctx, addr); tg 385 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c v_total_cntl = dm_read_reg(tg->ctx, addr); tg 461 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, addr, v_total_min); tg 464 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, addr, v_total_max); tg 467 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, addr, v_total_cntl); tg 471 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct timing_generator *tg, tg 474 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 479 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c static_screen_cntl = dm_read_reg(tg->ctx, addr); tg 491 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, addr, static_screen_cntl); tg 508 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c uint32_t dce110_timing_generator_get_vblank_counter(struct timing_generator *tg) tg 510 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 512 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c uint32_t value = dm_read_reg(tg->ctx, addr); tg 529 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c void dce110_timing_generator_get_position(struct timing_generator *tg, tg 533 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 535 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c value = dm_read_reg(tg->ctx, CRTC_REG(mmCRTC_STATUS_POSITION)); tg 547 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c value = dm_read_reg(tg->ctx, CRTC_REG(mmCRTC_NOM_VERT_POSITION)); tg 566 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct timing_generator *tg, tg 572 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 575 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c uint32_t value = dm_read_reg(tg->ctx, tg 586 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c tg, &position); tg 597 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct timing_generator *tg, tg 607 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 609 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dc_context *ctx = tg->ctx; tg 700 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct timing_generator *tg, tg 707 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dc_context *ctx = tg->ctx; tg 710 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 1110 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct timing_generator *tg, tg 1117 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 1168 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c void dce110_timing_generator_wait_for_vblank(struct timing_generator *tg) tg 1174 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c while (dce110_timing_generator_is_in_vertical_blank(tg)) { tg 1175 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c if (!dce110_timing_generator_is_counter_moving(tg)) { tg 1181 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c while (!dce110_timing_generator_is_in_vertical_blank(tg)) { tg 1182 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c if (!dce110_timing_generator_is_counter_moving(tg)) { tg 1192 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c void dce110_timing_generator_wait_for_vactive(struct timing_generator *tg) tg 1194 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c while (dce110_timing_generator_is_in_vertical_blank(tg)) { tg 1195 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c if (!dce110_timing_generator_is_counter_moving(tg)) { tg 1215 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct timing_generator *tg, tg 1219 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 1223 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c value = dm_read_reg(tg->ctx, address); tg 1232 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c gsl_params->gsl_master == tg->inst, tg 1248 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, CRTC_REG(mmDCP_GSL_CONTROL), value); tg 1266 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, CRTC_REG(mmDCIO_GSL0_CNTL), value); tg 1272 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c value_crtc_vtotal = dm_read_reg(tg->ctx, tg 1285 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_GSL_WINDOW), 0); tg 1293 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, address, value); tg 1298 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c value = dm_read_reg(tg->ctx, address); tg 1309 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, address, value); tg 1313 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct timing_generator *tg) tg 1320 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 1351 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c value_crtc_vtotal = dm_read_reg(tg->ctx, tg 1365 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, address, value); tg 1381 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, address, value); tg 1395 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c bool dce110_timing_generator_is_counter_moving(struct timing_generator *tg) tg 1399 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c tg->funcs->get_position(tg, &position1); tg 1400 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c tg->funcs->get_position(tg, &position2); tg 1410 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct timing_generator *tg, tg 1414 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 1416 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c uint32_t value = dm_read_reg(tg->ctx, addr); tg 1468 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, addr, value); tg 1472 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c void dce110_timing_generator_set_lock_master(struct timing_generator *tg, tg 1475 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dc_context *ctx = tg->ctx; tg 1476 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 1490 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct timing_generator *tg, tg 1497 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 1501 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c uint32_t pol_value = dm_read_reg(tg->ctx, tg 1515 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c value = dm_read_reg(tg->ctx, CRTC_REG(mmCRTC_TRIGB_CNTL)); tg 1554 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_TRIGB_CNTL), value); tg 1558 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c value = dm_read_reg(tg->ctx, CRTC_REG(mmCRTC_FORCE_COUNT_NOW_CNTL)); tg 1575 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_FORCE_COUNT_NOW_CNTL), value); tg 1579 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct timing_generator *tg, tg 1586 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 1599 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c value = dm_read_reg(tg->ctx, CRTC_REG(mmCRTC_TRIGB_CNTL)); tg 1626 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_TRIGB_CNTL), value); tg 1632 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c value = dm_read_reg(tg->ctx, CRTC_REG(mmCRTC_FORCE_COUNT_NOW_CNTL)); tg 1649 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_FORCE_COUNT_NOW_CNTL), value); tg 1651 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c value = dm_read_reg(tg->ctx, CRTC_REG(mmCRTC_VERT_SYNC_CONTROL)); tg 1666 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c value = dm_read_reg(tg->ctx, CRTC_REG(mmCRTC_VERT_SYNC_CONTROL)); tg 1678 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_VERT_SYNC_CONTROL), value); tg 1680 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c value = dm_read_reg(tg->ctx, CRTC_REG(mmCRTC_FORCE_COUNT_NOW_CNTL)); tg 1697 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_FORCE_COUNT_NOW_CNTL), value); tg 1701 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c value = dm_read_reg(tg->ctx, CRTC_REG(mmCRTC_MASTER_UPDATE_MODE)); tg 1708 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_MASTER_UPDATE_MODE), value); tg 1711 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct timing_generator *tg) tg 1714 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 1716 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c value = dm_read_reg(tg->ctx, CRTC_REG(mmCRTC_FORCE_COUNT_NOW_CNTL)); tg 1728 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_FORCE_COUNT_NOW_CNTL), value); tg 1730 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c value = dm_read_reg(tg->ctx, CRTC_REG(mmCRTC_VERT_SYNC_CONTROL)); tg 1742 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_VERT_SYNC_CONTROL), value); tg 1745 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c value = dm_read_reg(tg->ctx, CRTC_REG(mmCRTC_TRIGB_CNTL)); tg 1762 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_TRIGB_CNTL), value); tg 1775 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct timing_generator *tg) tg 1777 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 1778 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c uint32_t value = dm_read_reg(tg->ctx, tg 1780 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c uint32_t value1 = dm_read_reg(tg->ctx, tg 1798 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct timing_generator *tg) tg 1803 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 1827 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c value = dm_read_reg(tg->ctx, addr); tg 1835 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, addr, value); tg 1847 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct timing_generator *tg, tg 1850 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dc_context *ctx = tg->ctx; tg 1853 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 1894 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c void dce110_tg_program_blank_color(struct timing_generator *tg, tg 1897 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 1899 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c uint32_t value = dm_read_reg(tg->ctx, addr); tg 1917 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, addr, value); tg 1920 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, addr, value); tg 1923 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c void dce110_tg_set_overscan_color(struct timing_generator *tg, tg 1926 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dc_context *ctx = tg->ctx; tg 1929 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 1953 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c void dce110_tg_program_timing(struct timing_generator *tg, tg 1963 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dce110_timing_generator_program_timing_generator(tg, timing); tg 1965 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dce110_timing_generator_program_blanking(tg, timing); tg 1968 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c bool dce110_tg_is_blanked(struct timing_generator *tg) tg 1970 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 1971 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c uint32_t value = dm_read_reg(tg->ctx, CRTC_REG(mmCRTC_BLANK_CONTROL)); tg 1985 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c void dce110_tg_set_blank(struct timing_generator *tg, tg 1988 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 1997 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_DOUBLE_BUFFER_CONTROL), value); tg 2007 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_BLANK_CONTROL), value); tg 2010 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_BLANK_CONTROL), 0); tg 2013 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c bool dce110_tg_validate_timing(struct timing_generator *tg, tg 2016 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c return dce110_timing_generator_validate_timing(tg, timing, SIGNAL_TYPE_NONE); tg 2019 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c void dce110_tg_wait_for_state(struct timing_generator *tg, tg 2024 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dce110_timing_generator_wait_for_vblank(tg); tg 2028 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dce110_timing_generator_wait_for_vactive(tg); tg 2036 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c void dce110_tg_set_colors(struct timing_generator *tg, tg 2041 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dce110_tg_program_blank_color(tg, blank_color); tg 2043 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dce110_tg_set_overscan_color(tg, overscan_color); tg 2049 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c bool dce110_arm_vert_intr(struct timing_generator *tg, uint8_t width) tg 2051 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 2057 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c tg->funcs->get_scanoutpos( tg 2058 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c tg, tg 2080 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, CRTC_REG(mmCRTC_VERTICAL_INTERRUPT0_POSITION), val); tg 2085 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c static bool dce110_is_tg_enabled(struct timing_generator *tg) tg 2090 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 2093 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c value = dm_read_reg(tg->ctx, addr); tg 2099 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c bool dce110_configure_crc(struct timing_generator *tg, tg 2105 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 2108 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c if (!dce110_is_tg_enabled(tg)) tg 2114 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, cntl_addr, 0); tg 2129 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, addr, value); tg 2140 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, addr, value); tg 2151 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, addr, value); tg 2162 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, addr, value); tg 2171 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c dm_write_reg(tg->ctx, cntl_addr, value); tg 2176 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c bool dce110_get_crc(struct timing_generator *tg, tg 2182 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 2185 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c value = dm_read_reg(tg->ctx, addr); tg 2193 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c value = dm_read_reg(tg->ctx, addr); tg 2198 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.c value = dm_read_reg(tg->ctx, addr); tg 117 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h #define DCE110TG_FROM_TG(tg)\ tg 118 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h container_of(tg, struct dce110_timing_generator, base) tg 121 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h struct dce110_timing_generator *tg, tg 128 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h struct timing_generator *tg, tg 136 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h struct timing_generator *tg, tg 140 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h bool dce110_timing_generator_enable_crtc(struct timing_generator *tg); tg 141 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h bool dce110_timing_generator_disable_crtc(struct timing_generator *tg); tg 144 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h struct timing_generator *tg, tg 151 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h struct timing_generator *tg); tg 154 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h struct timing_generator *tg, tg 158 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h bool dce110_timing_generator_is_counter_moving(struct timing_generator *tg); tg 161 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h void dce110_timing_generator_wait_for_vblank(struct timing_generator *tg); tg 164 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h void dce110_timing_generator_wait_for_vactive(struct timing_generator *tg); tg 170 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h struct timing_generator *tg, tg 175 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h struct timing_generator *tg); tg 179 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h struct timing_generator *tg, tg 185 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h struct timing_generator *tg, tg 190 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h struct timing_generator *tg); tg 194 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h struct timing_generator *tg); tg 198 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h void dce110_timing_generator_disable_vga(struct timing_generator *tg); tg 202 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h struct timing_generator *tg, tg 208 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h struct timing_generator *tg, tg 212 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h struct timing_generator *tg, tg 221 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h struct timing_generator *tg, tg 229 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h struct timing_generator *tg, tg 233 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h struct timing_generator *tg, tg 237 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h struct timing_generator *tg, tg 244 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h struct timing_generator *tg, tg 248 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h void dce110_timing_generator_set_lock_master(struct timing_generator *tg, tg 251 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h void dce110_tg_program_blank_color(struct timing_generator *tg, tg 254 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h void dce110_tg_set_overscan_color(struct timing_generator *tg, tg 257 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h void dce110_tg_program_timing(struct timing_generator *tg, tg 266 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h bool dce110_tg_is_blanked(struct timing_generator *tg); tg 268 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h void dce110_tg_set_blank(struct timing_generator *tg, tg 271 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h bool dce110_tg_validate_timing(struct timing_generator *tg, tg 274 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h void dce110_tg_wait_for_state(struct timing_generator *tg, tg 277 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h void dce110_tg_set_colors(struct timing_generator *tg, tg 282 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h struct timing_generator *tg, uint8_t width); tg 284 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h bool dce110_configure_crc(struct timing_generator *tg, tg 287 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator.h bool dce110_get_crc(struct timing_generator *tg, tg 42 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c tg->ctx->logger tg 53 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c static bool dce110_timing_generator_v_enable_crtc(struct timing_generator *tg) tg 65 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c dm_write_reg(tg->ctx, tg 70 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c dm_write_reg(tg->ctx, mmCRTCV_MASTER_UPDATE_MODE, value); tg 75 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c dm_write_reg(tg->ctx, tg 81 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c static bool dce110_timing_generator_v_disable_crtc(struct timing_generator *tg) tg 85 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c value = dm_read_reg(tg->ctx, tg 91 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c dm_write_reg(tg->ctx, tg 100 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c static void dce110_timing_generator_v_blank_crtc(struct timing_generator *tg) tg 103 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c uint32_t value = dm_read_reg(tg->ctx, addr); tg 117 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c dm_write_reg(tg->ctx, addr, value); tg 120 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c static void dce110_timing_generator_v_unblank_crtc(struct timing_generator *tg) tg 123 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c uint32_t value = dm_read_reg(tg->ctx, addr); tg 137 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c dm_write_reg(tg->ctx, addr, value); tg 141 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c struct timing_generator *tg) tg 148 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c value = dm_read_reg(tg->ctx, addr); tg 153 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c static bool dce110_timing_generator_v_is_counter_moving(struct timing_generator *tg) tg 161 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c value = dm_read_reg(tg->ctx, mmCRTCV_STATUS_POSITION); tg 173 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c value = dm_read_reg(tg->ctx, mmCRTCV_STATUS_POSITION); tg 191 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c static void dce110_timing_generator_v_wait_for_vblank(struct timing_generator *tg) tg 197 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c while (dce110_timing_generator_v_is_in_vertical_blank(tg)) { tg 198 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c if (!dce110_timing_generator_v_is_counter_moving(tg)) { tg 204 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c while (!dce110_timing_generator_v_is_in_vertical_blank(tg)) { tg 205 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c if (!dce110_timing_generator_v_is_counter_moving(tg)) { tg 215 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c static void dce110_timing_generator_v_wait_for_vactive(struct timing_generator *tg) tg 217 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c while (dce110_timing_generator_v_is_in_vertical_blank(tg)) { tg 218 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c if (!dce110_timing_generator_v_is_counter_moving(tg)) { tg 225 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c static void dce110_timing_generator_v_wait_for_state(struct timing_generator *tg, tg 230 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c dce110_timing_generator_v_wait_for_vblank(tg); tg 234 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c dce110_timing_generator_v_wait_for_vactive(tg); tg 243 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c struct timing_generator *tg, tg 254 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c struct dc_context *ctx = tg->ctx; tg 385 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c struct timing_generator *tg, tg 390 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c uint32_t value = dm_read_reg(tg->ctx, addr); tg 424 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c dm_write_reg(tg->ctx, addr, value); tg 427 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c static void dce110_timing_generator_v_set_blank(struct timing_generator *tg, tg 431 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c dce110_timing_generator_v_blank_crtc(tg); tg 433 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c dce110_timing_generator_v_unblank_crtc(tg); tg 436 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c static void dce110_timing_generator_v_program_timing(struct timing_generator *tg, tg 446 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c dce110_timing_generator_program_timing_generator(tg, timing); tg 448 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c dce110_timing_generator_v_program_blanking(tg, timing); tg 452 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c struct timing_generator *tg, tg 456 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c uint32_t value = dm_read_reg(tg->ctx, addr); tg 474 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c dm_write_reg(tg->ctx, addr, value); tg 478 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c struct timing_generator *tg, tg 481 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c struct dc_context *ctx = tg->ctx; tg 523 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c static void dce110_tg_v_program_blank_color(struct timing_generator *tg, tg 527 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c uint32_t value = dm_read_reg(tg->ctx, addr); tg 545 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c dm_write_reg(tg->ctx, addr, value); tg 548 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c dm_write_reg(tg->ctx, addr, value); tg 551 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c static void dce110_timing_generator_v_set_overscan_color(struct timing_generator *tg, tg 554 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c struct dc_context *ctx = tg->ctx; tg 580 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c static void dce110_timing_generator_v_set_colors(struct timing_generator *tg, tg 585 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c dce110_tg_v_program_blank_color(tg, blank_color); tg 587 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c dce110_timing_generator_v_set_overscan_color(tg, overscan_color); tg 591 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c struct timing_generator *tg, tg 597 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c regval = dm_read_reg(tg->ctx, address); tg 600 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c dm_write_reg(tg->ctx, address, regval); tg 603 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c static uint32_t dce110_timing_generator_v_get_vblank_counter(struct timing_generator *tg) tg 606 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c uint32_t value = dm_read_reg(tg->ctx, addr); tg 614 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c struct timing_generator *tg) tg 621 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c struct timing_generator *tg, tg 629 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c struct timing_generator *tg, tg 637 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c struct timing_generator *tg) tg 644 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c struct timing_generator *tg) tg 651 drivers/gpu/drm/amd/display/dc/dce110/dce110_timing_generator_v.c struct timing_generator *tg) tg 86 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct timing_generator *tg) tg 89 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 91 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c tg->ctx, tg 102 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct timing_generator *tg, tg 111 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 114 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c tg, tg 128 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c bool dce120_tg_validate_timing(struct timing_generator *tg, tg 131 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c return dce120_timing_generator_validate_timing(tg, timing, SIGNAL_TYPE_NONE); tg 136 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c bool dce120_timing_generator_enable_crtc(struct timing_generator *tg) tg 139 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 151 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c result = tg->bp->funcs->enable_crtc(tg->bp, tg110->controller_id, true); tg 157 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct timing_generator *tg, tg 160 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 170 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct timing_generator *tg) tg 172 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 174 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c tg->ctx, tg 185 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct timing_generator *tg, tg 188 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 190 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c tg->ctx, tg 201 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c tg->ctx, tg 210 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c void dce120_timing_generator_wait_for_vblank(struct timing_generator *tg) tg 216 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c while (dce120_timing_generator_is_in_vertical_blank(tg)) { tg 217 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c if (!tg->funcs->is_counter_moving(tg)) { tg 223 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c while (!dce120_timing_generator_is_in_vertical_blank(tg)) { tg 224 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c if (!tg->funcs->is_counter_moving(tg)) { tg 232 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c void dce120_timing_generator_wait_for_vactive(struct timing_generator *tg) tg 234 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c while (dce120_timing_generator_is_in_vertical_blank(tg)) { tg 235 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c if (!tg->funcs->is_counter_moving(tg)) { tg 246 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct timing_generator *tg, tg 249 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 251 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c dm_read_reg_soc15(tg->ctx, tg 261 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c dm_write_reg_soc15(tg->ctx, mmCRTC0_CRTC_GSL_WINDOW, tg110->offsets.crtc, 0); tg 266 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c FD(DCP0_DCP_GSL_CONTROL__DCP_GSL_MASTER_EN), gsl_params->gsl_master == tg->inst, tg 283 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct timing_generator *tg) tg 285 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 304 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct timing_generator *tg, tg 308 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 313 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c tg->ctx, tg 351 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct timing_generator *tg) tg 353 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 371 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct timing_generator *tg) tg 373 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 375 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c tg->ctx, tg 387 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c void dce120_timing_generator_disable_vga(struct timing_generator *tg) tg 391 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 416 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c value = dm_read_reg_soc15(tg->ctx, mmD1VGA_CONTROL, offset); tg 424 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c dm_write_reg_soc15(tg->ctx, mmD1VGA_CONTROL, offset, value); tg 429 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct timing_generator *tg, tg 441 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 489 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct timing_generator *tg, tg 492 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 502 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct timing_generator *tg, tg 505 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 514 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c tg->ctx, tg 519 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c tg->ctx, tg 529 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c tg->ctx, tg 544 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct timing_generator *tg, tg 548 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 602 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c void dce120_timing_generator_get_position(struct timing_generator *tg, tg 606 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 609 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c tg->ctx, tg 624 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c tg->ctx, tg 636 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct timing_generator *tg, tg 642 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 646 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c tg->ctx, tg 658 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c tg, &position); tg 665 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct timing_generator *tg, tg 669 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 674 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c tg->ctx, tg 696 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c dm_write_reg_soc15(tg->ctx, tg 702 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c void dce120_tg_program_blank_color(struct timing_generator *tg, tg 705 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 715 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c tg->ctx, tg 719 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c tg->ctx, tg 725 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c void dce120_tg_set_overscan_color(struct timing_generator *tg, tg 728 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 737 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c static void dce120_tg_program_timing(struct timing_generator *tg, tg 747 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c dce110_timing_generator_program_timing_generator(tg, timing); tg 749 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c dce120_timing_generator_program_blanking(tg, timing); tg 752 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c bool dce120_tg_is_blanked(struct timing_generator *tg) tg 754 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 756 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c tg->ctx, tg 773 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c void dce120_tg_set_blank(struct timing_generator *tg, tg 776 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 785 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c dm_write_reg_soc15(tg->ctx, mmCRTC0_CRTC_BLANK_CONTROL, tg 789 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c bool dce120_tg_validate_timing(struct timing_generator *tg, tg 792 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c void dce120_tg_wait_for_state(struct timing_generator *tg, tg 797 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c dce120_timing_generator_wait_for_vblank(tg); tg 801 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c dce120_timing_generator_wait_for_vactive(tg); tg 809 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c void dce120_tg_set_colors(struct timing_generator *tg, tg 814 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c dce120_tg_program_blank_color(tg, blank_color); tg 817 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c dce120_tg_set_overscan_color(tg, overscan_color); tg 821 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct timing_generator *tg, tg 824 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 832 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct timing_generator *tg, tg 839 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct dc_context *ctx = tg->ctx; tg 841 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 1093 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct timing_generator *tg, tg 1096 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 1099 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c tg->funcs->get_scanoutpos( tg 1100 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c tg, tg 1118 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c static bool dce120_is_tg_enabled(struct timing_generator *tg) tg 1120 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 1123 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c value = dm_read_reg_soc15(tg->ctx, mmCRTC0_CRTC_CONTROL, tg 1131 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c static bool dce120_configure_crc(struct timing_generator *tg, tg 1134 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 1137 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c if (!dce120_is_tg_enabled(tg)) tg 1141 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c dm_write_reg_soc15(tg->ctx, mmCRTC0_CRTC_CRC_CNTL, tg 1177 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c static bool dce120_get_crc(struct timing_generator *tg, uint32_t *r_cr, tg 1180 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 1183 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c value = dm_read_reg_soc15(tg->ctx, mmCRTC0_CRTC_CRC_CNTL, tg 1191 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c value = dm_read_reg_soc15(tg->ctx, mmCRTC0_CRTC_CRC0_DATA_RG, tg 1196 drivers/gpu/drm/amd/display/dc/dce120/dce120_timing_generator.c value = dm_read_reg_soc15(tg->ctx, mmCRTC0_CRTC_CRC0_DATA_B, tg 87 drivers/gpu/drm/amd/display/dc/dce80/dce80_timing_generator.c static void program_pix_dur(struct timing_generator *tg, uint32_t pix_clk_100hz) tg 91 drivers/gpu/drm/amd/display/dc/dce80/dce80_timing_generator.c + DCE110TG_FROM_TG(tg)->offsets.dmif; tg 92 drivers/gpu/drm/amd/display/dc/dce80/dce80_timing_generator.c uint32_t value = dm_read_reg(tg->ctx, addr); tg 105 drivers/gpu/drm/amd/display/dc/dce80/dce80_timing_generator.c dm_write_reg(tg->ctx, addr, value); tg 108 drivers/gpu/drm/amd/display/dc/dce80/dce80_timing_generator.c static void program_timing(struct timing_generator *tg, tg 118 drivers/gpu/drm/amd/display/dc/dce80/dce80_timing_generator.c program_pix_dur(tg, timing->pix_clk_100hz); tg 120 drivers/gpu/drm/amd/display/dc/dce80/dce80_timing_generator.c dce110_tg_program_timing(tg, timing, 0, 0, 0, 0, 0, use_vbios); tg 124 drivers/gpu/drm/amd/display/dc/dce80/dce80_timing_generator.c struct timing_generator *tg, tg 128 drivers/gpu/drm/amd/display/dc/dce80/dce80_timing_generator.c struct dce110_timing_generator *tg110 = DCE110TG_FROM_TG(tg); tg 130 drivers/gpu/drm/amd/display/dc/dce80/dce80_timing_generator.c uint32_t value = dm_read_reg(tg->ctx, addr); tg 182 drivers/gpu/drm/amd/display/dc/dce80/dce80_timing_generator.c dm_write_reg(tg->ctx, addr, value); tg 34 drivers/gpu/drm/amd/display/dc/dce80/dce80_timing_generator.h struct dce110_timing_generator *tg, tg 315 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct timing_generator *tg = pool->timing_generators[i]; tg 318 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c optc1_read_otg_state(DCN10TG_FROM_TG(tg), &s); tg 326 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c tg->inst, tg 349 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c tg->funcs->clear_optc_underflow(tg); tg 427 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct timing_generator *tg = pipe_ctx->stream_res.tg; tg 429 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c if (tg->funcs->is_optc_underflow_occurred(tg)) { tg 430 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c tg->funcs->clear_optc_underflow(tg); tg 705 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct timing_generator *tg) tg 713 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c underflow = tg->funcs->is_optc_underflow_occurred(tg); tg 724 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c if (tg->funcs->set_blank_data_double_buffer) tg 725 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c tg->funcs->set_blank_data_double_buffer(tg, true); tg 727 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c if (tg->funcs->is_optc_underflow_occurred(tg) && !underflow) tg 728 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c tg->funcs->clear_optc_underflow(tg); tg 752 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c pipe_ctx->stream_res.tg->funcs->enable_optc_clock(pipe_ctx->stream_res.tg, true); tg 762 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c pipe_ctx->stream_res.tg->funcs->program_timing( tg 763 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c pipe_ctx->stream_res.tg, tg 776 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c inst_offset = reg_offsets[pipe_ctx->stream_res.tg->inst].fmt; tg 787 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c if (pipe_ctx->stream_res.tg->funcs->set_blank_color) tg 788 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c pipe_ctx->stream_res.tg->funcs->set_blank_color( tg 789 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c pipe_ctx->stream_res.tg, tg 792 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c if (pipe_ctx->stream_res.tg->funcs->is_blanked && tg 793 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c !pipe_ctx->stream_res.tg->funcs->is_blanked(pipe_ctx->stream_res.tg)) { tg 794 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c pipe_ctx->stream_res.tg->funcs->set_blank(pipe_ctx->stream_res.tg, true); tg 795 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c hwss_wait_for_blank_complete(pipe_ctx->stream_res.tg); tg 796 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c false_optc_underflow_wa(dc, pipe_ctx->stream, pipe_ctx->stream_res.tg); tg 800 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c if (false == pipe_ctx->stream_res.tg->funcs->enable_crtc(pipe_ctx->stream_res.tg)) { tg 855 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c pipe_ctx->stream_res.tg->funcs->disable_crtc(pipe_ctx->stream_res.tg); tg 857 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c pipe_ctx->stream_res.tg->funcs->enable_optc_clock(pipe_ctx->stream_res.tg, false); tg 858 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c if (pipe_ctx->stream_res.tg->funcs->set_drr) tg 859 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c pipe_ctx->stream_res.tg->funcs->set_drr( tg 860 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c pipe_ctx->stream_res.tg, NULL); tg 872 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c pipe_ctx->pipe_idx, pipe_ctx->stream_res.tg->inst); tg 1089 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct timing_generator *tg = dc->res_pool->timing_generators[i]; tg 1102 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c if (tg->funcs->is_tg_enabled(tg)) { tg 1104 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c dc->hwss.init_blank(dc, tg); tg 1105 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c tg->funcs->lock(tg); tg 1107 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c tg->funcs->lock(tg); tg 1108 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c tg->funcs->set_blank(tg, true); tg 1109 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c hwss_wait_for_blank_complete(tg); tg 1126 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct timing_generator *tg = dc->res_pool->timing_generators[i]; tg 1137 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c pipe_ctx->stream_res.tg->funcs->is_tg_enabled( tg 1138 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c pipe_ctx->stream_res.tg)) tg 1146 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c pipe_ctx->stream_res.tg = tg; tg 1163 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c if (tg->funcs->is_tg_enabled(tg)) tg 1164 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c tg->funcs->unlock(tg); tg 1168 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c pipe_ctx->stream_res.tg = NULL; tg 1171 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c tg->funcs->tg_init(tg); tg 1520 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c pipe->stream_res.tg->funcs->lock(pipe->stream_res.tg); tg 1522 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c pipe->stream_res.tg->funcs->unlock(pipe->stream_res.tg); tg 1530 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct timing_generator *tg) tg 1541 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c if (!tg->funcs->is_counter_moving(tg)) { tg 1546 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c if (tg->funcs->did_triggered_reset_occur(tg)) { tg 1555 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c tg->funcs->wait_for_state(tg, CRTC_STATE_VACTIVE); tg 1556 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c tg->funcs->wait_for_state(tg, CRTC_STATE_VBLANK); tg 1577 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c grouped_pipes[i]->stream_res.tg->funcs->enable_reset_trigger( tg 1578 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c grouped_pipes[i]->stream_res.tg, tg 1579 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c grouped_pipes[0]->stream_res.tg->inst); tg 1587 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c wait_for_reset_trigger_to_occur(dc_ctx, grouped_pipes[1]->stream_res.tg); tg 1589 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c grouped_pipes[i]->stream_res.tg->funcs->disable_reset_trigger( tg 1590 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c grouped_pipes[i]->stream_res.tg); tg 1605 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c if (grouped_pipes[i]->stream_res.tg->funcs->enable_crtc_reset) tg 1606 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c grouped_pipes[i]->stream_res.tg->funcs->enable_crtc_reset( tg 1607 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c grouped_pipes[i]->stream_res.tg, tg 1614 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c wait_for_reset_trigger_to_occur(dc_ctx, grouped_pipes[i]->stream_res.tg); tg 2320 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c hubp->funcs->hubp_vtg_sel(hubp, pipe_ctx->stream_res.tg->inst); tg 2432 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c if (stream_res->tg->funcs->set_blank_color) tg 2433 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c stream_res->tg->funcs->set_blank_color( tg 2434 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c stream_res->tg, tg 2438 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c if (stream_res->tg->funcs->set_blank) tg 2439 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c stream_res->tg->funcs->set_blank(stream_res->tg, blank); tg 2441 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c stream_res->abm->funcs->set_pipe(stream_res->abm, stream_res->tg->inst + 1); tg 2447 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c if (stream_res->tg->funcs->set_blank) tg 2448 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c stream_res->tg->funcs->set_blank(stream_res->tg, blank); tg 2505 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c pipe_ctx->stream_res.tg->funcs->program_global_sync( tg 2506 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c pipe_ctx->stream_res.tg, tg 2512 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c pipe_ctx->stream_res.tg->funcs->set_vtg_params( tg 2513 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c pipe_ctx->stream_res.tg, &pipe_ctx->stream->timing); tg 2557 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct timing_generator *tg; tg 2568 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c tg = top_pipe_to_program->stream_res.tg; tg 2606 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c if (old_pipe_ctx->stream_res.tg == tg && tg 2613 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c pipe_ctx->stream_res.tg != old_pipe_ctx->stream_res.tg) && tg 2615 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c old_pipe_ctx->stream_res.tg == tg) { tg 2638 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c !pipe_ctx->plane_state || !tg->funcs->is_tg_enabled(tg)) tg 2653 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c false_optc_underflow_wa(dc, stream, tg); tg 2768 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c pipe_ctx[i]->stream_res.tg->funcs->set_drr( tg 2769 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c pipe_ctx[i]->stream_res.tg, ¶ms); tg 2771 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c pipe_ctx[i]->stream_res.tg->funcs->set_static_screen_control( tg 2772 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c pipe_ctx[i]->stream_res.tg, tg 2786 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c pipe_ctx[i]->stream_res.tg->funcs->get_position(pipe_ctx[i]->stream_res.tg, position); tg 2803 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c pipe_ctx[i]->stream_res.tg->funcs-> tg 2804 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c set_static_screen_control(pipe_ctx[i]->stream_res.tg, value); tg 2863 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c pipe_ctx->stream_res.tg->funcs->program_stereo( tg 2864 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c pipe_ctx->stream_res.tg, tg 2925 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct timing_generator *tg = pipe_ctx->stream_res.tg; tg 2940 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c tg->funcs->is_stereo_left_eye) { tg 2942 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c !tg->funcs->is_stereo_left_eye(pipe_ctx->stream_res.tg); tg 3109 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct timing_generator *tg; tg 3114 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c tg = pipe_ctx->stream_res.tg; tg 3121 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c !tg->funcs->is_tg_enabled(tg)) tg 3125 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c tg->funcs->lock(tg); tg 3127 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c tg->funcs->unlock(tg); tg 3193 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct timing_generator *tg = pipe_ctx->stream_res.tg; tg 3201 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c tg->funcs->setup_vertical_interrupt0(tg, start_line, end_line); tg 3204 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c pipe_ctx->stream_res.tg->funcs->setup_vertical_interrupt1( tg 3205 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c tg, tg 3212 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c struct timing_generator *tg = pipe_ctx->stream_res.tg; tg 3220 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c if (tg->funcs->setup_vertical_interrupt2) tg 3221 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c tg->funcs->setup_vertical_interrupt2(tg, start_line); tg 427 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c struct timing_generator *tg = pool->timing_generators[i]; tg 431 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c optc1_read_otg_state(DCN10TG_FROM_TG(tg), &s); tg 439 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c tg->inst, tg 494 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c struct timing_generator *tg = pool->timing_generators[i]; tg 497 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c optc1_read_otg_state(DCN10TG_FROM_TG(tg), &s); tg 500 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c tg->funcs->clear_optc_underflow(tg); tg 1233 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_optc.c bool optc1_is_matching_timing(struct timing_generator *tg, tg 1239 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_optc.c if (tg == NULL || otg_timing == NULL) tg 1242 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_optc.c optc1_read_otg_state(DCN10TG_FROM_TG(tg), &s); tg 31 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_optc.h #define DCN10TG_FROM_TG(tg)\ tg 32 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_optc.h container_of(tg, struct optc, base) tg 551 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_optc.h struct timing_generator *tg, tg 993 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c pixel_clk_params->controller_id = pipe_ctx->stream_res.tg->inst + 1; tg 1106 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_resource.c idle_pipe->stream_res.tg = head_pipe->stream_res.tg; tg 196 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c struct timing_generator *tg) tg 210 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c tg->funcs->get_otg_active_size(tg, tg 215 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c tg->funcs->get_optc_source(tg, &num_opps, &opp_id_src0, &opp_id_src1); tg 550 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg->funcs->set_odm_combine( tg 551 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg, tg 558 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg->funcs->enable_optc_clock(pipe_ctx->stream_res.tg, true); tg 568 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg->funcs->program_timing( tg 569 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg, tg 590 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c if (false == pipe_ctx->stream_res.tg->funcs->enable_crtc(pipe_ctx->stream_res.tg)) { tg 601 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c if (pipe_ctx->stream_res.tg->funcs->set_drr) tg 602 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg->funcs->set_drr( tg 603 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg, ¶ms); tg 608 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c if (pipe_ctx->stream_res.tg->funcs->set_static_screen_control) tg 609 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg->funcs->set_static_screen_control( tg 610 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg, event_triggers); tg 838 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg->funcs->set_odm_combine( tg 839 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg, tg 843 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg->funcs->set_odm_bypass( tg 844 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg, &pipe_ctx->stream->timing); tg 902 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c stream_res->abm->funcs->set_pipe(stream_res->abm, stream_res->tg->inst + 1); tg 1039 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg->funcs->program_global_sync( tg 1040 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg, tg 1046 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg->funcs->set_vtg_params( tg 1047 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg, &pipe_ctx->stream->timing); tg 1073 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe->stream_res.tg->funcs->lock_doublebuffer_enable( tg 1074 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe->stream_res.tg); tg 1075 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe->stream_res.tg->funcs->lock(pipe->stream_res.tg); tg 1077 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe->stream_res.tg->funcs->unlock(pipe->stream_res.tg); tg 1078 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe->stream_res.tg->funcs->wait_for_state(pipe->stream_res.tg, tg 1080 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe->stream_res.tg->funcs->wait_for_state(pipe->stream_res.tg, tg 1082 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe->stream_res.tg->funcs->wait_for_state(pipe->stream_res.tg, tg 1084 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe->stream_res.tg->funcs->lock_doublebuffer_disable( tg 1085 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe->stream_res.tg); tg 1134 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe->stream_res.tg->funcs->triplebuffer_lock(pipe->stream_res.tg); tg 1136 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe->stream_res.tg->funcs->triplebuffer_unlock(pipe->stream_res.tg); tg 1139 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe->stream_res.tg->funcs->lock(pipe->stream_res.tg); tg 1141 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe->stream_res.tg->funcs->unlock(pipe->stream_res.tg); tg 1153 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c struct timing_generator *tg; tg 1177 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c tg = top_pipe_to_program->stream_res.tg; tg 1204 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c if (old_pipe_ctx->stream_res.tg == tg && tg 1211 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg != old_pipe_ctx->stream_res.tg) && tg 1213 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c old_pipe_ctx->stream_res.tg == tg) { tg 1236 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c !pipe_ctx->plane_state || !tg->funcs->is_tg_enabled(tg)) tg 1331 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg->funcs->program_global_sync( tg 1332 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg, tg 1338 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg->funcs->set_vtg_params( tg 1339 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg, &pipe_ctx->stream->timing); tg 1614 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c struct timing_generator *tg = pipe_ctx->stream_res.tg; tg 1620 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c if (tg->funcs->setup_vertical_interrupt2) tg 1621 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c tg->funcs->setup_vertical_interrupt2(tg, start_line); tg 1669 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg->funcs->disable_crtc(pipe_ctx->stream_res.tg); tg 1671 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg->funcs->enable_optc_clock(pipe_ctx->stream_res.tg, false); tg 1672 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c if (pipe_ctx->stream_res.tg->funcs->set_odm_bypass) tg 1673 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg->funcs->set_odm_bypass( tg 1674 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg, &pipe_ctx->stream->timing); tg 1676 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c if (pipe_ctx->stream_res.tg->funcs->set_drr) tg 1677 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg->funcs->set_drr( tg 1678 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg, NULL); tg 1690 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->pipe_idx, pipe_ctx->stream_res.tg->inst); tg 1899 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c if (pipe_ctx->stream_res.tg->funcs->set_gsl != NULL && tg 1900 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg->funcs->set_gsl_source_select != NULL) { tg 1901 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg->funcs->set_gsl( tg 1902 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg, tg 1905 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg->funcs->set_gsl_source_select( tg 1906 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg, group_idx, enable ? 4 : 0); tg 1931 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c struct timing_generator *tg = pipe_ctx->stream_res.tg; tg 1958 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c tg->funcs->set_early_control(tg, early_control); tg 2025 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c struct timing_generator *tg = dc->res_pool->timing_generators[i]; tg 2027 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c if (tg->funcs->is_tg_enabled(tg)) tg 2028 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c dcn20_init_blank(dc, tg); tg 2032 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c struct timing_generator *tg = dc->res_pool->timing_generators[i]; tg 2034 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c if (tg->funcs->is_tg_enabled(tg)) tg 2035 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c tg->funcs->lock(tg); tg 2056 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c struct timing_generator *tg = dc->res_pool->timing_generators[i]; tg 2061 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg = tg; tg 2087 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c struct timing_generator *tg = dc->res_pool->timing_generators[i]; tg 2089 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c if (tg->funcs->is_tg_enabled(tg)) tg 2090 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c tg->funcs->unlock(tg); tg 2098 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c pipe_ctx->stream_res.tg = NULL; tg 2103 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c struct timing_generator *tg = dc->res_pool->timing_generators[i]; tg 2105 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.c tg->funcs->tg_init(tg); tg 97 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hwseq.h struct timing_generator *tg); tg 1448 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c pixel_clk_params->controller_id = pipe_ctx->stream_res.tg->inst + 1; tg 1941 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c pipes[pipe_cnt].pipe.dest.otg_inst = res_ctx->pipe_ctx[i].stream_res.tg->inst; tg 2537 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c if (dc->debug.force_odm_combine & (1 << pipe->stream_res.tg->inst)) { tg 2950 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c idle_pipe->stream_res.tg = head_pipe->stream_res.tg; tg 241 drivers/gpu/drm/amd/display/dc/inc/core_types.h struct timing_generator *tg; tg 137 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h bool (*validate_timing)(struct timing_generator *tg, tg 139 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h void (*program_timing)(struct timing_generator *tg, tg 159 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h bool (*enable_crtc)(struct timing_generator *tg); tg 160 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h bool (*disable_crtc)(struct timing_generator *tg); tg 161 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h bool (*is_counter_moving)(struct timing_generator *tg); tg 162 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h void (*get_position)(struct timing_generator *tg, tg 165 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h uint32_t (*get_frame_count)(struct timing_generator *tg); tg 167 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h struct timing_generator *tg, tg 175 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h bool (*is_matching_timing)(struct timing_generator *tg, tg 177 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h void (*set_early_control)(struct timing_generator *tg, tg 179 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h void (*wait_for_state)(struct timing_generator *tg, tg 181 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h void (*set_blank)(struct timing_generator *tg, tg 183 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h bool (*is_blanked)(struct timing_generator *tg); tg 184 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h void (*set_overscan_blank_color) (struct timing_generator *tg, const struct tg_color *color); tg 185 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h void (*set_blank_color)(struct timing_generator *tg, const struct tg_color *color); tg 186 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h void (*set_colors)(struct timing_generator *tg, tg 190 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h void (*disable_vga)(struct timing_generator *tg); tg 191 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h bool (*did_triggered_reset_occur)(struct timing_generator *tg); tg 192 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h void (*setup_global_swap_lock)(struct timing_generator *tg, tg 194 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h void (*unlock)(struct timing_generator *tg); tg 195 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h void (*lock)(struct timing_generator *tg); tg 196 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h void (*lock_doublebuffer_disable)(struct timing_generator *tg); tg 197 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h void (*lock_doublebuffer_enable)(struct timing_generator *tg); tg 199 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h void(*triplebuffer_unlock)(struct timing_generator *tg); tg 200 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h void(*triplebuffer_lock)(struct timing_generator *tg); tg 202 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h void (*enable_reset_trigger)(struct timing_generator *tg, tg 204 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h void (*enable_crtc_reset)(struct timing_generator *tg, tg 207 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h void (*disable_reset_trigger)(struct timing_generator *tg); tg 208 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h void (*tear_down_global_swap_lock)(struct timing_generator *tg); tg 209 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h void (*enable_advanced_request)(struct timing_generator *tg, tg 211 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h void (*set_drr)(struct timing_generator *tg, const struct drr_params *params); tg 212 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h void (*set_static_screen_control)(struct timing_generator *tg, tg 215 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h struct timing_generator *tg, tg 219 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h bool (*arm_vert_intr)(struct timing_generator *tg, uint8_t width); tg 221 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h void (*program_global_sync)(struct timing_generator *tg, tg 226 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h void (*enable_optc_clock)(struct timing_generator *tg, bool enable); tg 227 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h void (*program_stereo)(struct timing_generator *tg, tg 229 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h bool (*is_stereo_left_eye)(struct timing_generator *tg); tg 231 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h void (*set_blank_data_double_buffer)(struct timing_generator *tg, bool enable); tg 233 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h void (*tg_init)(struct timing_generator *tg); tg 234 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h bool (*is_tg_enabled)(struct timing_generator *tg); tg 235 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h bool (*is_optc_underflow_occurred)(struct timing_generator *tg); tg 236 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h void (*clear_optc_underflow)(struct timing_generator *tg); tg 252 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h bool (*configure_crc)(struct timing_generator *tg, tg 259 drivers/gpu/drm/amd/display/dc/inc/hw/timing_generator.h bool (*get_crc)(struct timing_generator *tg, tg 286 drivers/gpu/drm/amd/display/dc/inc/hw_sequencer.h void (*init_blank)(struct dc *dc, struct timing_generator *tg); tg 350 drivers/gpu/drm/amd/display/dc/inc/hw_sequencer.h struct timing_generator *tg); tg 214 drivers/gpu/drm/amd/display/dc/irq/dce110/irq_service_dce110.c struct timing_generator *tg = tg 215 drivers/gpu/drm/amd/display/dc/irq/dce110/irq_service_dce110.c core_dc->current_state->res_ctx.pipe_ctx[pipe_offset].stream_res.tg; tg 218 drivers/gpu/drm/amd/display/dc/irq/dce110/irq_service_dce110.c if (!tg || !tg->funcs->arm_vert_intr(tg, 2)) { tg 2248 drivers/gpu/drm/drm_connector.c struct drm_tile_group *tg = container_of(kref, struct drm_tile_group, refcount); tg 2249 drivers/gpu/drm/drm_connector.c struct drm_device *dev = tg->dev; tg 2251 drivers/gpu/drm/drm_connector.c idr_remove(&dev->mode_config.tile_idr, tg->id); tg 2253 drivers/gpu/drm/drm_connector.c kfree(tg); tg 2264 drivers/gpu/drm/drm_connector.c struct drm_tile_group *tg) tg 2266 drivers/gpu/drm/drm_connector.c kref_put(&tg->refcount, drm_tile_group_free); tg 2283 drivers/gpu/drm/drm_connector.c struct drm_tile_group *tg; tg 2286 drivers/gpu/drm/drm_connector.c idr_for_each_entry(&dev->mode_config.tile_idr, tg, id) { tg 2287 drivers/gpu/drm/drm_connector.c if (!memcmp(tg->group_data, topology, 8)) { tg 2288 drivers/gpu/drm/drm_connector.c if (!kref_get_unless_zero(&tg->refcount)) tg 2289 drivers/gpu/drm/drm_connector.c tg = NULL; tg 2291 drivers/gpu/drm/drm_connector.c return tg; tg 2313 drivers/gpu/drm/drm_connector.c struct drm_tile_group *tg; tg 2316 drivers/gpu/drm/drm_connector.c tg = kzalloc(sizeof(*tg), GFP_KERNEL); tg 2317 drivers/gpu/drm/drm_connector.c if (!tg) tg 2320 drivers/gpu/drm/drm_connector.c kref_init(&tg->refcount); tg 2321 drivers/gpu/drm/drm_connector.c memcpy(tg->group_data, topology, 8); tg 2322 drivers/gpu/drm/drm_connector.c tg->dev = dev; tg 2325 drivers/gpu/drm/drm_connector.c ret = idr_alloc(&dev->mode_config.tile_idr, tg, 1, 0, GFP_KERNEL); tg 2327 drivers/gpu/drm/drm_connector.c tg->id = ret; tg 2329 drivers/gpu/drm/drm_connector.c kfree(tg); tg 2330 drivers/gpu/drm/drm_connector.c tg = NULL; tg 2334 drivers/gpu/drm/drm_connector.c return tg; tg 5384 drivers/gpu/drm/drm_edid.c struct drm_tile_group *tg; tg 5411 drivers/gpu/drm/drm_edid.c tg = drm_mode_get_tile_group(connector->dev, tile->topology_id); tg 5412 drivers/gpu/drm/drm_edid.c if (!tg) { tg 5413 drivers/gpu/drm/drm_edid.c tg = drm_mode_create_tile_group(connector->dev, tile->topology_id); tg 5415 drivers/gpu/drm/drm_edid.c if (!tg) tg 5418 drivers/gpu/drm/drm_edid.c if (connector->tile_group != tg) { tg 5424 drivers/gpu/drm/drm_edid.c connector->tile_group = tg; tg 5427 drivers/gpu/drm/drm_edid.c drm_mode_put_tile_group(connector->dev, tg); tg 395 drivers/hwmon/nct6683.c const struct sensor_template_group *tg, tg 409 drivers/hwmon/nct6683.c t = tg->templates; tg 431 drivers/hwmon/nct6683.c group->is_visible = tg->is_visible; tg 434 drivers/hwmon/nct6683.c t = tg->templates; tg 437 drivers/hwmon/nct6683.c (*t)->dev_attr.attr.name, tg->base + i); tg 1282 drivers/hwmon/nct6775.c const struct sensor_template_group *tg, tg 1296 drivers/hwmon/nct6775.c t = tg->templates; tg 1318 drivers/hwmon/nct6775.c group->is_visible = tg->is_visible; tg 1321 drivers/hwmon/nct6775.c t = tg->templates; tg 1324 drivers/hwmon/nct6775.c (*t)->dev_attr.attr.name, tg->base + i); tg 623 drivers/media/platform/qcom/camss/camss-csid.c struct csid_testgen_config *tg = &csid->testgen; tg 640 drivers/media/platform/qcom/camss/camss-csid.c if (!tg->enabled && tg 644 drivers/media/platform/qcom/camss/camss-csid.c if (tg->enabled) { tg 675 drivers/media/platform/qcom/camss/camss-csid.c val = tg->payload_mode; tg 736 drivers/media/platform/qcom/camss/camss-csid.c if (tg->enabled) { tg 742 drivers/media/platform/qcom/camss/camss-csid.c if (tg->enabled) { tg 1021 drivers/media/platform/qcom/camss/camss-csid.c struct csid_testgen_config *tg = &csid->testgen; tg 1027 drivers/media/platform/qcom/camss/camss-csid.c tg->enabled = !!value; tg 1031 drivers/media/platform/qcom/camss/camss-csid.c tg->payload_mode = CSID_PAYLOAD_MODE_INCREMENTING; tg 1034 drivers/media/platform/qcom/camss/camss-csid.c tg->payload_mode = CSID_PAYLOAD_MODE_ALTERNATING_55_AA; tg 1037 drivers/media/platform/qcom/camss/camss-csid.c tg->payload_mode = CSID_PAYLOAD_MODE_ALL_ZEROES; tg 1040 drivers/media/platform/qcom/camss/camss-csid.c tg->payload_mode = CSID_PAYLOAD_MODE_ALL_ONES; tg 1043 drivers/media/platform/qcom/camss/camss-csid.c tg->payload_mode = CSID_PAYLOAD_MODE_RANDOM; tg 87 drivers/net/ethernet/mscc/ocelot_ace.c u32 tg; /* TG_DAT */ tg 141 drivers/net/ethernet/mscc/ocelot_ace.c ocelot_write(oc, data->tg, S2_CACHE_TG_DAT); tg 153 drivers/net/ethernet/mscc/ocelot_ace.c data->tg = ocelot_read(oc, S2_CACHE_TG_DAT); tg 348 drivers/net/ethernet/mscc/ocelot_ace.c data.tg = (data.tg & ~data.tg_mask); tg 350 drivers/net/ethernet/mscc/ocelot_ace.c data.tg |= data.tg_value; tg 177 drivers/nfc/pn533/pn533.c u8 tg; tg 871 drivers/nfc/pn533/pn533.c static int pn533_target_found(struct pn533 *dev, u8 tg, u8 *tgdata, tg 880 drivers/nfc/pn533/pn533.c if (tg != 1) tg 971 drivers/nfc/pn533/pn533.c u8 nbtg, tg, *tgdata; tg 979 drivers/nfc/pn533/pn533.c tg = resp->data[1]; tg 984 drivers/nfc/pn533/pn533.c rc = pn533_target_found(dev, tg, tgdata, tgdata_len); tg 379 drivers/tty/serial/atmel_serial.c if (iso7816conf->tg > 255) { tg 424 drivers/tty/serial/atmel_serial.c atmel_uart_writel(port, ATMEL_US_TTGR, iso7816conf->tg); tg 2233 drivers/tty/serial/atmel_serial.c atmel_uart_writel(port, ATMEL_US_TTGR, port->iso7816.tg); tg 236 drivers/video/fbdev/w100fb.c struct w100_tg_info *tg = par->mach->tg; tg 245 drivers/video/fbdev/w100fb.c if(tg && tg->suspend) tg 246 drivers/video/fbdev/w100fb.c tg->suspend(par); tg 253 drivers/video/fbdev/w100fb.c if(tg && tg->resume) tg 254 drivers/video/fbdev/w100fb.c tg->resume(par); tg 414 drivers/video/fbdev/w100fb.c struct w100_tg_info *tg = par->mach->tg; tg 430 drivers/video/fbdev/w100fb.c if (!par->blanked && tg && tg->change) tg 431 drivers/video/fbdev/w100fb.c tg->change(par); tg 608 drivers/video/fbdev/w100fb.c struct w100_tg_info *tg = par->mach->tg; tg 611 drivers/video/fbdev/w100fb.c if(tg && tg->suspend) tg 612 drivers/video/fbdev/w100fb.c tg->suspend(par); tg 623 drivers/video/fbdev/w100fb.c struct w100_tg_info *tg = par->mach->tg; tg 628 drivers/video/fbdev/w100fb.c if(tg && tg->resume) tg 629 drivers/video/fbdev/w100fb.c tg->resume(par); tg 1569 include/drm/drm_connector.h struct drm_tile_group *tg); tg 145 include/uapi/linux/serial.h __u32 tg; tg 117 include/video/w100fb.h struct w100_tg_info *tg; tg 14 kernel/sched/autogroup.c autogroup_default.tg = &root_task_group; tg 20 kernel/sched/autogroup.c void autogroup_free(struct task_group *tg) tg 22 kernel/sched/autogroup.c kfree(tg->autogroup); tg 31 kernel/sched/autogroup.c ag->tg->rt_se = NULL; tg 32 kernel/sched/autogroup.c ag->tg->rt_rq = NULL; tg 34 kernel/sched/autogroup.c sched_offline_group(ag->tg); tg 35 kernel/sched/autogroup.c sched_destroy_group(ag->tg); tg 66 kernel/sched/autogroup.c struct task_group *tg; tg 71 kernel/sched/autogroup.c tg = sched_create_group(&root_task_group); tg 72 kernel/sched/autogroup.c if (IS_ERR(tg)) tg 78 kernel/sched/autogroup.c ag->tg = tg; tg 87 kernel/sched/autogroup.c free_rt_sched_group(tg); tg 88 kernel/sched/autogroup.c tg->rt_se = root_task_group.rt_se; tg 89 kernel/sched/autogroup.c tg->rt_rq = root_task_group.rt_rq; tg 91 kernel/sched/autogroup.c tg->autogroup = ag; tg 93 kernel/sched/autogroup.c sched_online_group(tg, &root_task_group); tg 107 kernel/sched/autogroup.c bool task_wants_autogroup(struct task_struct *p, struct task_group *tg) tg 109 kernel/sched/autogroup.c if (tg != &root_task_group) tg 236 kernel/sched/autogroup.c err = sched_group_set_shares(ag->tg, shares); tg 250 kernel/sched/autogroup.c if (!task_group_is_autogroup(ag->tg)) tg 262 kernel/sched/autogroup.c int autogroup_path(struct task_group *tg, char *buf, int buflen) tg 264 kernel/sched/autogroup.c if (!task_group_is_autogroup(tg)) tg 267 kernel/sched/autogroup.c return snprintf(buf, buflen, "%s-%ld", "/autogroup", tg->autogroup->id); tg 11 kernel/sched/autogroup.h struct task_group *tg; tg 18 kernel/sched/autogroup.h extern void autogroup_free(struct task_group *tg); tg 20 kernel/sched/autogroup.h static inline bool task_group_is_autogroup(struct task_group *tg) tg 22 kernel/sched/autogroup.h return !!tg->autogroup; tg 25 kernel/sched/autogroup.h extern bool task_wants_autogroup(struct task_struct *p, struct task_group *tg); tg 28 kernel/sched/autogroup.h autogroup_task_group(struct task_struct *p, struct task_group *tg) tg 32 kernel/sched/autogroup.h if (enabled && task_wants_autogroup(p, tg)) tg 33 kernel/sched/autogroup.h return p->signal->autogroup->tg; tg 35 kernel/sched/autogroup.h return tg; tg 38 kernel/sched/autogroup.h extern int autogroup_path(struct task_group *tg, char *buf, int buflen); tg 43 kernel/sched/autogroup.h static inline void autogroup_free(struct task_group *tg) { } tg 44 kernel/sched/autogroup.h static inline bool task_group_is_autogroup(struct task_group *tg) tg 50 kernel/sched/autogroup.h autogroup_task_group(struct task_struct *p, struct task_group *tg) tg 52 kernel/sched/autogroup.h return tg; tg 55 kernel/sched/autogroup.h static inline int autogroup_path(struct task_group *tg, char *buf, int buflen) tg 741 kernel/sched/core.c int tg_nop(struct task_group *tg, void *data) tg 1098 kernel/sched/core.c struct task_group *tg = &root_task_group; tg 1100 kernel/sched/core.c uclamp_se_set(&tg->uclamp_req[UCLAMP_MIN], tg 1102 kernel/sched/core.c uclamp_se_set(&tg->uclamp_req[UCLAMP_MAX], tg 6914 kernel/sched/core.c static inline void alloc_uclamp_sched_group(struct task_group *tg, tg 6921 kernel/sched/core.c uclamp_se_set(&tg->uclamp_req[clamp_id], tg 6923 kernel/sched/core.c tg->uclamp[clamp_id] = parent->uclamp[clamp_id]; tg 6928 kernel/sched/core.c static void sched_free_group(struct task_group *tg) tg 6930 kernel/sched/core.c free_fair_sched_group(tg); tg 6931 kernel/sched/core.c free_rt_sched_group(tg); tg 6932 kernel/sched/core.c autogroup_free(tg); tg 6933 kernel/sched/core.c kmem_cache_free(task_group_cache, tg); tg 6939 kernel/sched/core.c struct task_group *tg; tg 6941 kernel/sched/core.c tg = kmem_cache_alloc(task_group_cache, GFP_KERNEL | __GFP_ZERO); tg 6942 kernel/sched/core.c if (!tg) tg 6945 kernel/sched/core.c if (!alloc_fair_sched_group(tg, parent)) tg 6948 kernel/sched/core.c if (!alloc_rt_sched_group(tg, parent)) tg 6951 kernel/sched/core.c alloc_uclamp_sched_group(tg, parent); tg 6953 kernel/sched/core.c return tg; tg 6956 kernel/sched/core.c sched_free_group(tg); tg 6960 kernel/sched/core.c void sched_online_group(struct task_group *tg, struct task_group *parent) tg 6965 kernel/sched/core.c list_add_rcu(&tg->list, &task_groups); tg 6970 kernel/sched/core.c tg->parent = parent; tg 6971 kernel/sched/core.c INIT_LIST_HEAD(&tg->children); tg 6972 kernel/sched/core.c list_add_rcu(&tg->siblings, &parent->children); tg 6975 kernel/sched/core.c online_fair_sched_group(tg); tg 6985 kernel/sched/core.c void sched_destroy_group(struct task_group *tg) tg 6988 kernel/sched/core.c call_rcu(&tg->rcu, sched_free_group_rcu); tg 6991 kernel/sched/core.c void sched_offline_group(struct task_group *tg) tg 6996 kernel/sched/core.c unregister_fair_sched_group(tg); tg 6999 kernel/sched/core.c list_del_rcu(&tg->list); tg 7000 kernel/sched/core.c list_del_rcu(&tg->siblings); tg 7006 kernel/sched/core.c struct task_group *tg; tg 7013 kernel/sched/core.c tg = container_of(task_css_check(tsk, cpu_cgrp_id, true), tg 7015 kernel/sched/core.c tg = autogroup_task_group(tsk, tg); tg 7016 kernel/sched/core.c tsk->sched_task_group = tg; tg 7077 kernel/sched/core.c struct task_group *tg; tg 7084 kernel/sched/core.c tg = sched_create_group(parent); tg 7085 kernel/sched/core.c if (IS_ERR(tg)) tg 7088 kernel/sched/core.c return &tg->css; tg 7094 kernel/sched/core.c struct task_group *tg = css_tg(css); tg 7098 kernel/sched/core.c sched_online_group(tg, parent); tg 7110 kernel/sched/core.c struct task_group *tg = css_tg(css); tg 7112 kernel/sched/core.c sched_offline_group(tg); tg 7117 kernel/sched/core.c struct task_group *tg = css_tg(css); tg 7122 kernel/sched/core.c sched_free_group(tg); tg 7276 kernel/sched/core.c struct task_group *tg; tg 7285 kernel/sched/core.c tg = css_tg(of_css(of)); tg 7286 kernel/sched/core.c if (tg->uclamp_req[clamp_id].value != req.util) tg 7287 kernel/sched/core.c uclamp_se_set(&tg->uclamp_req[clamp_id], req.util, false); tg 7293 kernel/sched/core.c tg->uclamp_pct[clamp_id] = req.percent; tg 7321 kernel/sched/core.c struct task_group *tg; tg 7327 kernel/sched/core.c tg = css_tg(seq_css(sf)); tg 7328 kernel/sched/core.c util_clamp = tg->uclamp_req[clamp_id].value; tg 7336 kernel/sched/core.c percent = tg->uclamp_pct[clamp_id]; tg 7366 kernel/sched/core.c struct task_group *tg = css_tg(css); tg 7368 kernel/sched/core.c return (u64) scale_load_down(tg->shares); tg 7377 kernel/sched/core.c static int __cfs_schedulable(struct task_group *tg, u64 period, u64 runtime); tg 7379 kernel/sched/core.c static int tg_set_cfs_bandwidth(struct task_group *tg, u64 period, u64 quota) tg 7382 kernel/sched/core.c struct cfs_bandwidth *cfs_b = &tg->cfs_bandwidth; tg 7384 kernel/sched/core.c if (tg == &root_task_group) tg 7409 kernel/sched/core.c ret = __cfs_schedulable(tg, period, quota); tg 7434 kernel/sched/core.c struct cfs_rq *cfs_rq = tg->cfs_rq[i]; tg 7455 kernel/sched/core.c static int tg_set_cfs_quota(struct task_group *tg, long cfs_quota_us) tg 7459 kernel/sched/core.c period = ktime_to_ns(tg->cfs_bandwidth.period); tg 7467 kernel/sched/core.c return tg_set_cfs_bandwidth(tg, period, quota); tg 7470 kernel/sched/core.c static long tg_get_cfs_quota(struct task_group *tg) tg 7474 kernel/sched/core.c if (tg->cfs_bandwidth.quota == RUNTIME_INF) tg 7477 kernel/sched/core.c quota_us = tg->cfs_bandwidth.quota; tg 7483 kernel/sched/core.c static int tg_set_cfs_period(struct task_group *tg, long cfs_period_us) tg 7491 kernel/sched/core.c quota = tg->cfs_bandwidth.quota; tg 7493 kernel/sched/core.c return tg_set_cfs_bandwidth(tg, period, quota); tg 7496 kernel/sched/core.c static long tg_get_cfs_period(struct task_group *tg) tg 7500 kernel/sched/core.c cfs_period_us = ktime_to_ns(tg->cfs_bandwidth.period); tg 7531 kernel/sched/core.c struct task_group *tg; tg 7539 kernel/sched/core.c static u64 normalize_cfs_quota(struct task_group *tg, tg 7544 kernel/sched/core.c if (tg == d->tg) { tg 7548 kernel/sched/core.c period = tg_get_cfs_period(tg); tg 7549 kernel/sched/core.c quota = tg_get_cfs_quota(tg); tg 7559 kernel/sched/core.c static int tg_cfs_schedulable_down(struct task_group *tg, void *data) tg 7562 kernel/sched/core.c struct cfs_bandwidth *cfs_b = &tg->cfs_bandwidth; tg 7565 kernel/sched/core.c if (!tg->parent) { tg 7568 kernel/sched/core.c struct cfs_bandwidth *parent_b = &tg->parent->cfs_bandwidth; tg 7570 kernel/sched/core.c quota = normalize_cfs_quota(tg, d); tg 7592 kernel/sched/core.c static int __cfs_schedulable(struct task_group *tg, u64 period, u64 quota) tg 7596 kernel/sched/core.c .tg = tg, tg 7615 kernel/sched/core.c struct task_group *tg = css_tg(seq_css(sf)); tg 7616 kernel/sched/core.c struct cfs_bandwidth *cfs_b = &tg->cfs_bandwidth; tg 7622 kernel/sched/core.c if (schedstat_enabled() && tg != &root_task_group) { tg 7627 kernel/sched/core.c ws += schedstat_val(tg->se[i]->statistics.wait_sum); tg 7721 kernel/sched/core.c struct task_group *tg = css_tg(css); tg 7722 kernel/sched/core.c struct cfs_bandwidth *cfs_b = &tg->cfs_bandwidth; tg 7742 kernel/sched/core.c struct task_group *tg = css_tg(css); tg 7743 kernel/sched/core.c u64 weight = scale_load_down(tg->shares); tg 7836 kernel/sched/core.c struct task_group *tg = css_tg(seq_css(sf)); tg 7838 kernel/sched/core.c cpu_period_quota_print(sf, tg_get_cfs_period(tg), tg_get_cfs_quota(tg)); tg 7845 kernel/sched/core.c struct task_group *tg = css_tg(of_css(of)); tg 7846 kernel/sched/core.c u64 period = tg_get_cfs_period(tg); tg 7852 kernel/sched/core.c ret = tg_set_cfs_bandwidth(tg, period, quota); tg 375 kernel/sched/debug.c static void print_cfs_group_stats(struct seq_file *m, int cpu, struct task_group *tg) tg 377 kernel/sched/debug.c struct sched_entity *se = tg->se[cpu]; tg 422 kernel/sched/debug.c static char *task_group_path(struct task_group *tg) tg 424 kernel/sched/debug.c if (autogroup_path(tg, group_path, PATH_MAX)) tg 427 kernel/sched/debug.c cgroup_path(tg->css.cgroup, group_path, PATH_MAX); tg 493 kernel/sched/debug.c SEQ_printf(m, "cfs_rq[%d]:%s\n", cpu, task_group_path(cfs_rq->tg)); tg 546 kernel/sched/debug.c atomic_long_read(&cfs_rq->tg->load_avg)); tg 557 kernel/sched/debug.c print_cfs_group_stats(m, cpu, cfs_rq->tg); tg 565 kernel/sched/debug.c SEQ_printf(m, "rt_rq[%d]:%s\n", cpu, task_group_path(rt_rq->tg)); tg 283 kernel/sched/fair.c if (cfs_rq && task_group_is_autogroup(cfs_rq->tg)) tg 284 kernel/sched/fair.c autogroup_path(cfs_rq->tg, path, len); tg 285 kernel/sched/fair.c else if (cfs_rq && cfs_rq->tg->css.cgroup) tg 286 kernel/sched/fair.c cgroup_path(cfs_rq->tg->css.cgroup, path, len); tg 310 kernel/sched/fair.c if (cfs_rq->tg->parent && tg 311 kernel/sched/fair.c cfs_rq->tg->parent->cfs_rq[cpu]->on_list) { tg 319 kernel/sched/fair.c &(cfs_rq->tg->parent->cfs_rq[cpu]->leaf_cfs_rq_list)); tg 329 kernel/sched/fair.c if (!cfs_rq->tg->parent) { tg 2997 kernel/sched/fair.c struct task_group *tg = cfs_rq->tg; tg 2999 kernel/sched/fair.c tg_shares = READ_ONCE(tg->shares); tg 3003 kernel/sched/fair.c tg_weight = atomic_long_read(&tg->load_avg); tg 3091 kernel/sched/fair.c runnable = shares = READ_ONCE(gcfs_rq->tg->shares); tg 3156 kernel/sched/fair.c if (cfs_rq->tg == &root_task_group) tg 3160 kernel/sched/fair.c atomic_long_add(delta, &cfs_rq->tg->load_avg); tg 4376 kernel/sched/fair.c static inline struct cfs_bandwidth *tg_cfs_bandwidth(struct task_group *tg) tg 4378 kernel/sched/fair.c return &tg->cfs_bandwidth; tg 4384 kernel/sched/fair.c struct task_group *tg = cfs_rq->tg; tg 4385 kernel/sched/fair.c struct cfs_bandwidth *cfs_b = tg_cfs_bandwidth(tg); tg 4453 kernel/sched/fair.c static inline int throttled_lb_pair(struct task_group *tg, tg 4458 kernel/sched/fair.c src_cfs_rq = tg->cfs_rq[src_cpu]; tg 4459 kernel/sched/fair.c dest_cfs_rq = tg->cfs_rq[dest_cpu]; tg 4465 kernel/sched/fair.c static int tg_unthrottle_up(struct task_group *tg, void *data) tg 4468 kernel/sched/fair.c struct cfs_rq *cfs_rq = tg->cfs_rq[cpu_of(rq)]; tg 4483 kernel/sched/fair.c static int tg_throttle_down(struct task_group *tg, void *data) tg 4486 kernel/sched/fair.c struct cfs_rq *cfs_rq = tg->cfs_rq[cpu_of(rq)]; tg 4501 kernel/sched/fair.c struct cfs_bandwidth *cfs_b = tg_cfs_bandwidth(cfs_rq->tg); tg 4506 kernel/sched/fair.c se = cfs_rq->tg->se[cpu_of(rq_of(cfs_rq))]; tg 4510 kernel/sched/fair.c walk_tg_tree_from(cfs_rq->tg, tg_throttle_down, tg_nop, (void *)rq); tg 4561 kernel/sched/fair.c struct cfs_bandwidth *cfs_b = tg_cfs_bandwidth(cfs_rq->tg); tg 4566 kernel/sched/fair.c se = cfs_rq->tg->se[cpu_of(rq)]; tg 4578 kernel/sched/fair.c walk_tg_tree_from(cfs_rq->tg, tg_nop, tg_unthrottle_up, (void *)rq); tg 4784 kernel/sched/fair.c struct cfs_bandwidth *cfs_b = tg_cfs_bandwidth(cfs_rq->tg); tg 4881 kernel/sched/fair.c static void sync_throttle(struct task_group *tg, int cpu) tg 4888 kernel/sched/fair.c if (!tg->parent) tg 4891 kernel/sched/fair.c cfs_rq = tg->cfs_rq[cpu]; tg 4892 kernel/sched/fair.c pcfs_rq = tg->parent->cfs_rq[cpu]; tg 5038 kernel/sched/fair.c struct task_group *tg; tg 5043 kernel/sched/fair.c list_for_each_entry_rcu(tg, &task_groups, list) { tg 5044 kernel/sched/fair.c struct cfs_bandwidth *cfs_b = &tg->cfs_bandwidth; tg 5045 kernel/sched/fair.c struct cfs_rq *cfs_rq = tg->cfs_rq[cpu_of(rq)]; tg 5057 kernel/sched/fair.c struct task_group *tg; tg 5062 kernel/sched/fair.c list_for_each_entry_rcu(tg, &task_groups, list) { tg 5063 kernel/sched/fair.c struct cfs_rq *cfs_rq = tg->cfs_rq[cpu_of(rq)]; tg 5095 kernel/sched/fair.c static inline void sync_throttle(struct task_group *tg, int cpu) {} tg 5108 kernel/sched/fair.c static inline int throttled_lb_pair(struct task_group *tg, tg 5120 kernel/sched/fair.c static inline struct cfs_bandwidth *tg_cfs_bandwidth(struct task_group *tg) tg 7608 kernel/sched/fair.c se = cfs_rq->tg->se[cpu]; tg 7635 kernel/sched/fair.c struct sched_entity *se = cfs_rq->tg->se[cpu_of(rq)]; tg 10261 kernel/sched/fair.c void free_fair_sched_group(struct task_group *tg) tg 10265 kernel/sched/fair.c destroy_cfs_bandwidth(tg_cfs_bandwidth(tg)); tg 10268 kernel/sched/fair.c if (tg->cfs_rq) tg 10269 kernel/sched/fair.c kfree(tg->cfs_rq[i]); tg 10270 kernel/sched/fair.c if (tg->se) tg 10271 kernel/sched/fair.c kfree(tg->se[i]); tg 10274 kernel/sched/fair.c kfree(tg->cfs_rq); tg 10275 kernel/sched/fair.c kfree(tg->se); tg 10278 kernel/sched/fair.c int alloc_fair_sched_group(struct task_group *tg, struct task_group *parent) tg 10284 kernel/sched/fair.c tg->cfs_rq = kcalloc(nr_cpu_ids, sizeof(cfs_rq), GFP_KERNEL); tg 10285 kernel/sched/fair.c if (!tg->cfs_rq) tg 10287 kernel/sched/fair.c tg->se = kcalloc(nr_cpu_ids, sizeof(se), GFP_KERNEL); tg 10288 kernel/sched/fair.c if (!tg->se) tg 10291 kernel/sched/fair.c tg->shares = NICE_0_LOAD; tg 10293 kernel/sched/fair.c init_cfs_bandwidth(tg_cfs_bandwidth(tg)); tg 10307 kernel/sched/fair.c init_tg_cfs_entry(tg, cfs_rq, se, i, parent->se[i]); tg 10319 kernel/sched/fair.c void online_fair_sched_group(struct task_group *tg) tg 10328 kernel/sched/fair.c se = tg->se[i]; tg 10332 kernel/sched/fair.c sync_throttle(tg, i); tg 10337 kernel/sched/fair.c void unregister_fair_sched_group(struct task_group *tg) tg 10344 kernel/sched/fair.c if (tg->se[cpu]) tg 10345 kernel/sched/fair.c remove_entity_load_avg(tg->se[cpu]); tg 10351 kernel/sched/fair.c if (!tg->cfs_rq[cpu]->on_list) tg 10357 kernel/sched/fair.c list_del_leaf_cfs_rq(tg->cfs_rq[cpu]); tg 10362 kernel/sched/fair.c void init_tg_cfs_entry(struct task_group *tg, struct cfs_rq *cfs_rq, tg 10368 kernel/sched/fair.c cfs_rq->tg = tg; tg 10372 kernel/sched/fair.c tg->cfs_rq[cpu] = cfs_rq; tg 10373 kernel/sched/fair.c tg->se[cpu] = se; tg 10395 kernel/sched/fair.c int sched_group_set_shares(struct task_group *tg, unsigned long shares) tg 10402 kernel/sched/fair.c if (!tg->se[0]) tg 10408 kernel/sched/fair.c if (tg->shares == shares) tg 10411 kernel/sched/fair.c tg->shares = shares; tg 10414 kernel/sched/fair.c struct sched_entity *se = tg->se[i]; tg 10433 kernel/sched/fair.c void free_fair_sched_group(struct task_group *tg) { } tg 10435 kernel/sched/fair.c int alloc_fair_sched_group(struct task_group *tg, struct task_group *parent) tg 10440 kernel/sched/fair.c void online_fair_sched_group(struct task_group *tg) { } tg 10442 kernel/sched/fair.c void unregister_fair_sched_group(struct task_group *tg) { } tg 138 kernel/sched/rt.c void free_rt_sched_group(struct task_group *tg) tg 142 kernel/sched/rt.c if (tg->rt_se) tg 143 kernel/sched/rt.c destroy_rt_bandwidth(&tg->rt_bandwidth); tg 146 kernel/sched/rt.c if (tg->rt_rq) tg 147 kernel/sched/rt.c kfree(tg->rt_rq[i]); tg 148 kernel/sched/rt.c if (tg->rt_se) tg 149 kernel/sched/rt.c kfree(tg->rt_se[i]); tg 152 kernel/sched/rt.c kfree(tg->rt_rq); tg 153 kernel/sched/rt.c kfree(tg->rt_se); tg 156 kernel/sched/rt.c void init_tg_rt_entry(struct task_group *tg, struct rt_rq *rt_rq, tg 165 kernel/sched/rt.c rt_rq->tg = tg; tg 167 kernel/sched/rt.c tg->rt_rq[cpu] = rt_rq; tg 168 kernel/sched/rt.c tg->rt_se[cpu] = rt_se; tg 183 kernel/sched/rt.c int alloc_rt_sched_group(struct task_group *tg, struct task_group *parent) tg 189 kernel/sched/rt.c tg->rt_rq = kcalloc(nr_cpu_ids, sizeof(rt_rq), GFP_KERNEL); tg 190 kernel/sched/rt.c if (!tg->rt_rq) tg 192 kernel/sched/rt.c tg->rt_se = kcalloc(nr_cpu_ids, sizeof(rt_se), GFP_KERNEL); tg 193 kernel/sched/rt.c if (!tg->rt_se) tg 196 kernel/sched/rt.c init_rt_bandwidth(&tg->rt_bandwidth, tg 211 kernel/sched/rt.c rt_rq->rt_runtime = tg->rt_bandwidth.rt_runtime; tg 212 kernel/sched/rt.c init_tg_rt_entry(tg, rt_rq, rt_se, i, parent->rt_se[i]); tg 251 kernel/sched/rt.c void free_rt_sched_group(struct task_group *tg) { } tg 253 kernel/sched/rt.c int alloc_rt_sched_group(struct task_group *tg, struct task_group *parent) tg 444 kernel/sched/rt.c if (!rt_rq->tg) tg 452 kernel/sched/rt.c return ktime_to_ns(rt_rq->tg->rt_bandwidth.rt_period); tg 457 kernel/sched/rt.c static inline struct task_group *next_task_group(struct task_group *tg) tg 460 kernel/sched/rt.c tg = list_entry_rcu(tg->list.next, tg 462 kernel/sched/rt.c } while (&tg->list != &task_groups && task_group_is_autogroup(tg)); tg 464 kernel/sched/rt.c if (&tg->list == &task_groups) tg 465 kernel/sched/rt.c tg = NULL; tg 467 kernel/sched/rt.c return tg; tg 494 kernel/sched/rt.c rt_se = rt_rq->tg->rt_se[cpu]; tg 512 kernel/sched/rt.c rt_se = rt_rq->tg->rt_se[cpu]; tg 560 kernel/sched/rt.c return &rt_rq->tg->rt_bandwidth; tg 1130 kernel/sched/rt.c if (rt_rq->tg) tg 1131 kernel/sched/rt.c start_rt_bandwidth(&rt_rq->tg->rt_bandwidth); tg 2403 kernel/sched/rt.c static inline int tg_has_rt_tasks(struct task_group *tg) tg 2410 kernel/sched/rt.c if (task_group_is_autogroup(tg)) tg 2414 kernel/sched/rt.c if (rt_task(p) && task_group(p) == tg) tg 2422 kernel/sched/rt.c struct task_group *tg; tg 2427 kernel/sched/rt.c static int tg_rt_schedulable(struct task_group *tg, void *data) tg 2434 kernel/sched/rt.c period = ktime_to_ns(tg->rt_bandwidth.rt_period); tg 2435 kernel/sched/rt.c runtime = tg->rt_bandwidth.rt_runtime; tg 2437 kernel/sched/rt.c if (tg == d->tg) { tg 2451 kernel/sched/rt.c if (rt_bandwidth_enabled() && !runtime && tg_has_rt_tasks(tg)) tg 2465 kernel/sched/rt.c list_for_each_entry_rcu(child, &tg->children, siblings) { tg 2469 kernel/sched/rt.c if (child == d->tg) { tg 2483 kernel/sched/rt.c static int __rt_schedulable(struct task_group *tg, u64 period, u64 runtime) tg 2488 kernel/sched/rt.c .tg = tg, tg 2500 kernel/sched/rt.c static int tg_set_rt_bandwidth(struct task_group *tg, tg 2509 kernel/sched/rt.c if (tg == &root_task_group && rt_runtime == 0) tg 2518 kernel/sched/rt.c err = __rt_schedulable(tg, rt_period, rt_runtime); tg 2522 kernel/sched/rt.c raw_spin_lock_irq(&tg->rt_bandwidth.rt_runtime_lock); tg 2523 kernel/sched/rt.c tg->rt_bandwidth.rt_period = ns_to_ktime(rt_period); tg 2524 kernel/sched/rt.c tg->rt_bandwidth.rt_runtime = rt_runtime; tg 2527 kernel/sched/rt.c struct rt_rq *rt_rq = tg->rt_rq[i]; tg 2533 kernel/sched/rt.c raw_spin_unlock_irq(&tg->rt_bandwidth.rt_runtime_lock); tg 2541 kernel/sched/rt.c int sched_group_set_rt_runtime(struct task_group *tg, long rt_runtime_us) tg 2545 kernel/sched/rt.c rt_period = ktime_to_ns(tg->rt_bandwidth.rt_period); tg 2552 kernel/sched/rt.c return tg_set_rt_bandwidth(tg, rt_period, rt_runtime); tg 2555 kernel/sched/rt.c long sched_group_rt_runtime(struct task_group *tg) tg 2559 kernel/sched/rt.c if (tg->rt_bandwidth.rt_runtime == RUNTIME_INF) tg 2562 kernel/sched/rt.c rt_runtime_us = tg->rt_bandwidth.rt_runtime; tg 2567 kernel/sched/rt.c int sched_group_set_rt_period(struct task_group *tg, u64 rt_period_us) tg 2575 kernel/sched/rt.c rt_runtime = tg->rt_bandwidth.rt_runtime; tg 2577 kernel/sched/rt.c return tg_set_rt_bandwidth(tg, rt_period, rt_runtime); tg 2580 kernel/sched/rt.c long sched_group_rt_period(struct task_group *tg) tg 2584 kernel/sched/rt.c rt_period_us = ktime_to_ns(tg->rt_bandwidth.rt_period); tg 2602 kernel/sched/rt.c int sched_rt_can_attach(struct task_group *tg, struct task_struct *tsk) tg 2605 kernel/sched/rt.c if (rt_task(tsk) && tg->rt_bandwidth.rt_runtime == 0) tg 443 kernel/sched/sched.h extern int tg_nop(struct task_group *tg, void *data); tg 445 kernel/sched/sched.h extern void free_fair_sched_group(struct task_group *tg); tg 446 kernel/sched/sched.h extern int alloc_fair_sched_group(struct task_group *tg, struct task_group *parent); tg 447 kernel/sched/sched.h extern void online_fair_sched_group(struct task_group *tg); tg 448 kernel/sched/sched.h extern void unregister_fair_sched_group(struct task_group *tg); tg 449 kernel/sched/sched.h extern void init_tg_cfs_entry(struct task_group *tg, struct cfs_rq *cfs_rq, tg 458 kernel/sched/sched.h extern void free_rt_sched_group(struct task_group *tg); tg 459 kernel/sched/sched.h extern int alloc_rt_sched_group(struct task_group *tg, struct task_group *parent); tg 460 kernel/sched/sched.h extern void init_tg_rt_entry(struct task_group *tg, struct rt_rq *rt_rq, tg 463 kernel/sched/sched.h extern int sched_group_set_rt_runtime(struct task_group *tg, long rt_runtime_us); tg 464 kernel/sched/sched.h extern int sched_group_set_rt_period(struct task_group *tg, u64 rt_period_us); tg 465 kernel/sched/sched.h extern long sched_group_rt_runtime(struct task_group *tg); tg 466 kernel/sched/sched.h extern long sched_group_rt_period(struct task_group *tg); tg 467 kernel/sched/sched.h extern int sched_rt_can_attach(struct task_group *tg, struct task_struct *tsk); tg 470 kernel/sched/sched.h extern void sched_online_group(struct task_group *tg, tg 472 kernel/sched/sched.h extern void sched_destroy_group(struct task_group *tg); tg 473 kernel/sched/sched.h extern void sched_offline_group(struct task_group *tg); tg 478 kernel/sched/sched.h extern int sched_group_set_shares(struct task_group *tg, unsigned long shares); tg 570 kernel/sched/sched.h struct task_group *tg; /* group that "owns" this runqueue */ tg 628 kernel/sched/sched.h struct task_group *tg; tg 1505 kernel/sched/sched.h struct task_group *tg = task_group(p); tg 1509 kernel/sched/sched.h set_task_rq_fair(&p->se, p->se.cfs_rq, tg->cfs_rq[cpu]); tg 1510 kernel/sched/sched.h p->se.cfs_rq = tg->cfs_rq[cpu]; tg 1511 kernel/sched/sched.h p->se.parent = tg->se[cpu]; tg 1515 kernel/sched/sched.h p->rt.rt_rq = tg->rt_rq[cpu]; tg 1516 kernel/sched/sched.h p->rt.parent = tg->rt_se[cpu]; tg 822 net/nfc/core.c struct nfc_target *tg; tg 830 net/nfc/core.c tg = &dev->targets[i]; tg 831 net/nfc/core.c if (tg->idx == target_idx)