Lines Matching refs:cl

194 eltree_insert(struct hfsc_class *cl)  in eltree_insert()  argument
196 struct rb_node **p = &cl->sched->eligible.rb_node; in eltree_insert()
203 if (cl->cl_e >= cl1->cl_e) in eltree_insert()
208 rb_link_node(&cl->el_node, parent, p); in eltree_insert()
209 rb_insert_color(&cl->el_node, &cl->sched->eligible); in eltree_insert()
213 eltree_remove(struct hfsc_class *cl) in eltree_remove() argument
215 rb_erase(&cl->el_node, &cl->sched->eligible); in eltree_remove()
219 eltree_update(struct hfsc_class *cl) in eltree_update() argument
221 eltree_remove(cl); in eltree_update()
222 eltree_insert(cl); in eltree_update()
229 struct hfsc_class *p, *cl = NULL; in eltree_get_mindl() local
236 if (cl == NULL || p->cl_d < cl->cl_d) in eltree_get_mindl()
237 cl = p; in eltree_get_mindl()
239 return cl; in eltree_get_mindl()
259 vttree_insert(struct hfsc_class *cl) in vttree_insert() argument
261 struct rb_node **p = &cl->cl_parent->vt_tree.rb_node; in vttree_insert()
268 if (cl->cl_vt >= cl1->cl_vt) in vttree_insert()
273 rb_link_node(&cl->vt_node, parent, p); in vttree_insert()
274 rb_insert_color(&cl->vt_node, &cl->cl_parent->vt_tree); in vttree_insert()
278 vttree_remove(struct hfsc_class *cl) in vttree_remove() argument
280 rb_erase(&cl->vt_node, &cl->cl_parent->vt_tree); in vttree_remove()
284 vttree_update(struct hfsc_class *cl) in vttree_update() argument
286 vttree_remove(cl); in vttree_update()
287 vttree_insert(cl); in vttree_update()
291 vttree_firstfit(struct hfsc_class *cl, u64 cur_time) in vttree_firstfit() argument
296 for (n = rb_first(&cl->vt_tree); n != NULL; n = rb_next(n)) { in vttree_firstfit()
308 vttree_get_minvt(struct hfsc_class *cl, u64 cur_time) in vttree_get_minvt() argument
311 if (cl->cl_cfmin > cur_time) in vttree_get_minvt()
314 while (cl->level > 0) { in vttree_get_minvt()
315 cl = vttree_firstfit(cl, cur_time); in vttree_get_minvt()
316 if (cl == NULL) in vttree_get_minvt()
321 if (cl->cl_parent->cl_cvtmin < cl->cl_vt) in vttree_get_minvt()
322 cl->cl_parent->cl_cvtmin = cl->cl_vt; in vttree_get_minvt()
324 return cl; in vttree_get_minvt()
328 cftree_insert(struct hfsc_class *cl) in cftree_insert() argument
330 struct rb_node **p = &cl->cl_parent->cf_tree.rb_node; in cftree_insert()
337 if (cl->cl_f >= cl1->cl_f) in cftree_insert()
342 rb_link_node(&cl->cf_node, parent, p); in cftree_insert()
343 rb_insert_color(&cl->cf_node, &cl->cl_parent->cf_tree); in cftree_insert()
347 cftree_remove(struct hfsc_class *cl) in cftree_remove() argument
349 rb_erase(&cl->cf_node, &cl->cl_parent->cf_tree); in cftree_remove()
353 cftree_update(struct hfsc_class *cl) in cftree_update() argument
355 cftree_remove(cl); in cftree_update()
356 cftree_insert(cl); in cftree_update()
618 init_ed(struct hfsc_class *cl, unsigned int next_len) in init_ed() argument
623 rtsc_min(&cl->cl_deadline, &cl->cl_rsc, cur_time, cl->cl_cumul); in init_ed()
630 cl->cl_eligible = cl->cl_deadline; in init_ed()
631 if (cl->cl_rsc.sm1 <= cl->cl_rsc.sm2) { in init_ed()
632 cl->cl_eligible.dx = 0; in init_ed()
633 cl->cl_eligible.dy = 0; in init_ed()
637 cl->cl_e = rtsc_y2x(&cl->cl_eligible, cl->cl_cumul); in init_ed()
638 cl->cl_d = rtsc_y2x(&cl->cl_deadline, cl->cl_cumul + next_len); in init_ed()
640 eltree_insert(cl); in init_ed()
644 update_ed(struct hfsc_class *cl, unsigned int next_len) in update_ed() argument
646 cl->cl_e = rtsc_y2x(&cl->cl_eligible, cl->cl_cumul); in update_ed()
647 cl->cl_d = rtsc_y2x(&cl->cl_deadline, cl->cl_cumul + next_len); in update_ed()
649 eltree_update(cl); in update_ed()
653 update_d(struct hfsc_class *cl, unsigned int next_len) in update_d() argument
655 cl->cl_d = rtsc_y2x(&cl->cl_deadline, cl->cl_cumul + next_len); in update_d()
659 update_cfmin(struct hfsc_class *cl) in update_cfmin() argument
661 struct rb_node *n = rb_first(&cl->cf_tree); in update_cfmin()
665 cl->cl_cfmin = 0; in update_cfmin()
669 cl->cl_cfmin = p->cl_f; in update_cfmin()
673 init_vf(struct hfsc_class *cl, unsigned int len) in init_vf() argument
682 for (; cl->cl_parent != NULL; cl = cl->cl_parent) { in init_vf()
683 if (go_active && cl->cl_nactive++ == 0) in init_vf()
689 n = rb_last(&cl->cl_parent->vt_tree); in init_vf()
698 if (cl->cl_parent->cl_cvtmin != 0) in init_vf()
699 vt = (cl->cl_parent->cl_cvtmin + vt)/2; in init_vf()
701 if (cl->cl_parent->cl_vtperiod != in init_vf()
702 cl->cl_parentperiod || vt > cl->cl_vt) in init_vf()
703 cl->cl_vt = vt; in init_vf()
711 vt = cl->cl_parent->cl_cvtmax; in init_vf()
712 cl->cl_parent->cl_cvtoff += vt; in init_vf()
713 cl->cl_parent->cl_cvtmax = 0; in init_vf()
714 cl->cl_parent->cl_cvtmin = 0; in init_vf()
715 cl->cl_vt = 0; in init_vf()
718 cl->cl_vtoff = cl->cl_parent->cl_cvtoff - in init_vf()
719 cl->cl_pcvtoff; in init_vf()
722 vt = cl->cl_vt + cl->cl_vtoff; in init_vf()
723 rtsc_min(&cl->cl_virtual, &cl->cl_fsc, vt, in init_vf()
724 cl->cl_total); in init_vf()
725 if (cl->cl_virtual.x == vt) { in init_vf()
726 cl->cl_virtual.x -= cl->cl_vtoff; in init_vf()
727 cl->cl_vtoff = 0; in init_vf()
729 cl->cl_vtadj = 0; in init_vf()
731 cl->cl_vtperiod++; /* increment vt period */ in init_vf()
732 cl->cl_parentperiod = cl->cl_parent->cl_vtperiod; in init_vf()
733 if (cl->cl_parent->cl_nactive == 0) in init_vf()
734 cl->cl_parentperiod++; in init_vf()
735 cl->cl_f = 0; in init_vf()
737 vttree_insert(cl); in init_vf()
738 cftree_insert(cl); in init_vf()
740 if (cl->cl_flags & HFSC_USC) { in init_vf()
746 rtsc_min(&cl->cl_ulimit, &cl->cl_usc, cur_time, in init_vf()
747 cl->cl_total); in init_vf()
749 cl->cl_myf = rtsc_y2x(&cl->cl_ulimit, in init_vf()
750 cl->cl_total); in init_vf()
751 cl->cl_myfadj = 0; in init_vf()
755 f = max(cl->cl_myf, cl->cl_cfmin); in init_vf()
756 if (f != cl->cl_f) { in init_vf()
757 cl->cl_f = f; in init_vf()
758 cftree_update(cl); in init_vf()
760 update_cfmin(cl->cl_parent); in init_vf()
765 update_vf(struct hfsc_class *cl, unsigned int len, u64 cur_time) in update_vf() argument
770 if (cl->qdisc->q.qlen == 0 && cl->cl_flags & HFSC_FSC) in update_vf()
773 for (; cl->cl_parent != NULL; cl = cl->cl_parent) { in update_vf()
774 cl->cl_total += len; in update_vf()
776 if (!(cl->cl_flags & HFSC_FSC) || cl->cl_nactive == 0) in update_vf()
779 if (go_passive && --cl->cl_nactive == 0) in update_vf()
788 if (cl->cl_vt > cl->cl_parent->cl_cvtmax) in update_vf()
789 cl->cl_parent->cl_cvtmax = cl->cl_vt; in update_vf()
792 vttree_remove(cl); in update_vf()
794 cftree_remove(cl); in update_vf()
795 update_cfmin(cl->cl_parent); in update_vf()
803 cl->cl_vt = rtsc_y2x(&cl->cl_virtual, cl->cl_total) in update_vf()
804 - cl->cl_vtoff + cl->cl_vtadj; in update_vf()
811 if (cl->cl_vt < cl->cl_parent->cl_cvtmin) { in update_vf()
812 cl->cl_vtadj += cl->cl_parent->cl_cvtmin - cl->cl_vt; in update_vf()
813 cl->cl_vt = cl->cl_parent->cl_cvtmin; in update_vf()
817 vttree_update(cl); in update_vf()
819 if (cl->cl_flags & HFSC_USC) { in update_vf()
820 cl->cl_myf = cl->cl_myfadj + rtsc_y2x(&cl->cl_ulimit, in update_vf()
821 cl->cl_total); in update_vf()
836 if (cl->cl_myf < myf_bound) { in update_vf()
837 delta = cur_time - cl->cl_myf; in update_vf()
838 cl->cl_myfadj += delta; in update_vf()
839 cl->cl_myf += delta; in update_vf()
844 f = max(cl->cl_myf, cl->cl_cfmin); in update_vf()
845 if (f != cl->cl_f) { in update_vf()
846 cl->cl_f = f; in update_vf()
847 cftree_update(cl); in update_vf()
848 update_cfmin(cl->cl_parent); in update_vf()
854 set_active(struct hfsc_class *cl, unsigned int len) in set_active() argument
856 if (cl->cl_flags & HFSC_RSC) in set_active()
857 init_ed(cl, len); in set_active()
858 if (cl->cl_flags & HFSC_FSC) in set_active()
859 init_vf(cl, len); in set_active()
861 list_add_tail(&cl->dlist, &cl->sched->droplist); in set_active()
865 set_passive(struct hfsc_class *cl) in set_passive() argument
867 if (cl->cl_flags & HFSC_RSC) in set_passive()
868 eltree_remove(cl); in set_passive()
870 list_del(&cl->dlist); in set_passive()
895 hfsc_purge_queue(struct Qdisc *sch, struct hfsc_class *cl) in hfsc_purge_queue() argument
897 unsigned int len = cl->qdisc->q.qlen; in hfsc_purge_queue()
899 qdisc_reset(cl->qdisc); in hfsc_purge_queue()
900 qdisc_tree_decrease_qlen(cl->qdisc, len); in hfsc_purge_queue()
904 hfsc_adjust_levels(struct hfsc_class *cl) in hfsc_adjust_levels() argument
911 list_for_each_entry(p, &cl->children, siblings) { in hfsc_adjust_levels()
915 cl->level = level; in hfsc_adjust_levels()
916 } while ((cl = cl->cl_parent) != NULL); in hfsc_adjust_levels()
932 hfsc_change_rsc(struct hfsc_class *cl, struct tc_service_curve *rsc, in hfsc_change_rsc() argument
935 sc2isc(rsc, &cl->cl_rsc); in hfsc_change_rsc()
936 rtsc_init(&cl->cl_deadline, &cl->cl_rsc, cur_time, cl->cl_cumul); in hfsc_change_rsc()
937 cl->cl_eligible = cl->cl_deadline; in hfsc_change_rsc()
938 if (cl->cl_rsc.sm1 <= cl->cl_rsc.sm2) { in hfsc_change_rsc()
939 cl->cl_eligible.dx = 0; in hfsc_change_rsc()
940 cl->cl_eligible.dy = 0; in hfsc_change_rsc()
942 cl->cl_flags |= HFSC_RSC; in hfsc_change_rsc()
946 hfsc_change_fsc(struct hfsc_class *cl, struct tc_service_curve *fsc) in hfsc_change_fsc() argument
948 sc2isc(fsc, &cl->cl_fsc); in hfsc_change_fsc()
949 rtsc_init(&cl->cl_virtual, &cl->cl_fsc, cl->cl_vt, cl->cl_total); in hfsc_change_fsc()
950 cl->cl_flags |= HFSC_FSC; in hfsc_change_fsc()
954 hfsc_change_usc(struct hfsc_class *cl, struct tc_service_curve *usc, in hfsc_change_usc() argument
957 sc2isc(usc, &cl->cl_usc); in hfsc_change_usc()
958 rtsc_init(&cl->cl_ulimit, &cl->cl_usc, cur_time, cl->cl_total); in hfsc_change_usc()
959 cl->cl_flags |= HFSC_USC; in hfsc_change_usc()
973 struct hfsc_class *cl = (struct hfsc_class *)*arg; in hfsc_change_class() local
1006 if (cl != NULL) { in hfsc_change_class()
1008 if (cl->cl_parent && in hfsc_change_class()
1009 cl->cl_parent->cl_common.classid != parentid) in hfsc_change_class()
1011 if (cl->cl_parent == NULL && parentid != TC_H_ROOT) in hfsc_change_class()
1019 err = gen_replace_estimator(&cl->bstats, NULL, in hfsc_change_class()
1020 &cl->rate_est, in hfsc_change_class()
1029 hfsc_change_rsc(cl, rsc, cur_time); in hfsc_change_class()
1031 hfsc_change_fsc(cl, fsc); in hfsc_change_class()
1033 hfsc_change_usc(cl, usc, cur_time); in hfsc_change_class()
1035 if (cl->qdisc->q.qlen != 0) { in hfsc_change_class()
1036 if (cl->cl_flags & HFSC_RSC) in hfsc_change_class()
1037 update_ed(cl, qdisc_peek_len(cl->qdisc)); in hfsc_change_class()
1038 if (cl->cl_flags & HFSC_FSC) in hfsc_change_class()
1039 update_vf(cl, 0, cur_time); in hfsc_change_class()
1064 cl = kzalloc(sizeof(struct hfsc_class), GFP_KERNEL); in hfsc_change_class()
1065 if (cl == NULL) in hfsc_change_class()
1069 err = gen_new_estimator(&cl->bstats, NULL, &cl->rate_est, in hfsc_change_class()
1073 kfree(cl); in hfsc_change_class()
1079 hfsc_change_rsc(cl, rsc, 0); in hfsc_change_class()
1081 hfsc_change_fsc(cl, fsc); in hfsc_change_class()
1083 hfsc_change_usc(cl, usc, 0); in hfsc_change_class()
1085 cl->cl_common.classid = classid; in hfsc_change_class()
1086 cl->refcnt = 1; in hfsc_change_class()
1087 cl->sched = q; in hfsc_change_class()
1088 cl->cl_parent = parent; in hfsc_change_class()
1089 cl->qdisc = qdisc_create_dflt(sch->dev_queue, in hfsc_change_class()
1091 if (cl->qdisc == NULL) in hfsc_change_class()
1092 cl->qdisc = &noop_qdisc; in hfsc_change_class()
1093 INIT_LIST_HEAD(&cl->children); in hfsc_change_class()
1094 cl->vt_tree = RB_ROOT; in hfsc_change_class()
1095 cl->cf_tree = RB_ROOT; in hfsc_change_class()
1098 qdisc_class_hash_insert(&q->clhash, &cl->cl_common); in hfsc_change_class()
1099 list_add_tail(&cl->siblings, &parent->children); in hfsc_change_class()
1103 cl->cl_pcvtoff = parent->cl_cvtoff; in hfsc_change_class()
1108 *arg = (unsigned long)cl; in hfsc_change_class()
1113 hfsc_destroy_class(struct Qdisc *sch, struct hfsc_class *cl) in hfsc_destroy_class() argument
1117 tcf_destroy_chain(&cl->filter_list); in hfsc_destroy_class()
1118 qdisc_destroy(cl->qdisc); in hfsc_destroy_class()
1119 gen_kill_estimator(&cl->bstats, &cl->rate_est); in hfsc_destroy_class()
1120 if (cl != &q->root) in hfsc_destroy_class()
1121 kfree(cl); in hfsc_destroy_class()
1128 struct hfsc_class *cl = (struct hfsc_class *)arg; in hfsc_delete_class() local
1130 if (cl->level > 0 || cl->filter_cnt > 0 || cl == &q->root) in hfsc_delete_class()
1135 list_del(&cl->siblings); in hfsc_delete_class()
1136 hfsc_adjust_levels(cl->cl_parent); in hfsc_delete_class()
1138 hfsc_purge_queue(sch, cl); in hfsc_delete_class()
1139 qdisc_class_hash_remove(&q->clhash, &cl->cl_common); in hfsc_delete_class()
1141 BUG_ON(--cl->refcnt == 0); in hfsc_delete_class()
1155 struct hfsc_class *head, *cl; in hfsc_classify() local
1161 (cl = hfsc_find_class(skb->priority, sch)) != NULL) in hfsc_classify()
1162 if (cl->level == 0) in hfsc_classify()
1163 return cl; in hfsc_classify()
1178 cl = (struct hfsc_class *)res.class; in hfsc_classify()
1179 if (!cl) { in hfsc_classify()
1180 cl = hfsc_find_class(res.classid, sch); in hfsc_classify()
1181 if (!cl) in hfsc_classify()
1183 if (cl->level >= head->level) in hfsc_classify()
1187 if (cl->level == 0) in hfsc_classify()
1188 return cl; /* hit leaf class */ in hfsc_classify()
1191 tcf = rcu_dereference_bh(cl->filter_list); in hfsc_classify()
1192 head = cl; in hfsc_classify()
1196 cl = hfsc_find_class(TC_H_MAKE(TC_H_MAJ(sch->handle), q->defcls), sch); in hfsc_classify()
1197 if (cl == NULL || cl->level > 0) in hfsc_classify()
1200 return cl; in hfsc_classify()
1207 struct hfsc_class *cl = (struct hfsc_class *)arg; in hfsc_graft_class() local
1209 if (cl->level > 0) in hfsc_graft_class()
1213 cl->cl_common.classid); in hfsc_graft_class()
1219 hfsc_purge_queue(sch, cl); in hfsc_graft_class()
1220 *old = cl->qdisc; in hfsc_graft_class()
1221 cl->qdisc = new; in hfsc_graft_class()
1229 struct hfsc_class *cl = (struct hfsc_class *)arg; in hfsc_class_leaf() local
1231 if (cl->level == 0) in hfsc_class_leaf()
1232 return cl->qdisc; in hfsc_class_leaf()
1240 struct hfsc_class *cl = (struct hfsc_class *)arg; in hfsc_qlen_notify() local
1242 if (cl->qdisc->q.qlen == 0) { in hfsc_qlen_notify()
1243 update_vf(cl, 0, 0); in hfsc_qlen_notify()
1244 set_passive(cl); in hfsc_qlen_notify()
1251 struct hfsc_class *cl = hfsc_find_class(classid, sch); in hfsc_get_class() local
1253 if (cl != NULL) in hfsc_get_class()
1254 cl->refcnt++; in hfsc_get_class()
1256 return (unsigned long)cl; in hfsc_get_class()
1262 struct hfsc_class *cl = (struct hfsc_class *)arg; in hfsc_put_class() local
1264 if (--cl->refcnt == 0) in hfsc_put_class()
1265 hfsc_destroy_class(sch, cl); in hfsc_put_class()
1272 struct hfsc_class *cl = hfsc_find_class(classid, sch); in hfsc_bind_tcf() local
1274 if (cl != NULL) { in hfsc_bind_tcf()
1275 if (p != NULL && p->level <= cl->level) in hfsc_bind_tcf()
1277 cl->filter_cnt++; in hfsc_bind_tcf()
1280 return (unsigned long)cl; in hfsc_bind_tcf()
1286 struct hfsc_class *cl = (struct hfsc_class *)arg; in hfsc_unbind_tcf() local
1288 cl->filter_cnt--; in hfsc_unbind_tcf()
1295 struct hfsc_class *cl = (struct hfsc_class *)arg; in hfsc_tcf_chain() local
1297 if (cl == NULL) in hfsc_tcf_chain()
1298 cl = &q->root; in hfsc_tcf_chain()
1300 return &cl->filter_list; in hfsc_tcf_chain()
1321 hfsc_dump_curves(struct sk_buff *skb, struct hfsc_class *cl) in hfsc_dump_curves() argument
1323 if ((cl->cl_flags & HFSC_RSC) && in hfsc_dump_curves()
1324 (hfsc_dump_sc(skb, TCA_HFSC_RSC, &cl->cl_rsc) < 0)) in hfsc_dump_curves()
1327 if ((cl->cl_flags & HFSC_FSC) && in hfsc_dump_curves()
1328 (hfsc_dump_sc(skb, TCA_HFSC_FSC, &cl->cl_fsc) < 0)) in hfsc_dump_curves()
1331 if ((cl->cl_flags & HFSC_USC) && in hfsc_dump_curves()
1332 (hfsc_dump_sc(skb, TCA_HFSC_USC, &cl->cl_usc) < 0)) in hfsc_dump_curves()
1345 struct hfsc_class *cl = (struct hfsc_class *)arg; in hfsc_dump_class() local
1348 tcm->tcm_parent = cl->cl_parent ? cl->cl_parent->cl_common.classid : in hfsc_dump_class()
1350 tcm->tcm_handle = cl->cl_common.classid; in hfsc_dump_class()
1351 if (cl->level == 0) in hfsc_dump_class()
1352 tcm->tcm_info = cl->qdisc->handle; in hfsc_dump_class()
1357 if (hfsc_dump_curves(skb, cl) < 0) in hfsc_dump_class()
1370 struct hfsc_class *cl = (struct hfsc_class *)arg; in hfsc_dump_class_stats() local
1373 cl->qstats.backlog = cl->qdisc->qstats.backlog; in hfsc_dump_class_stats()
1374 xstats.level = cl->level; in hfsc_dump_class_stats()
1375 xstats.period = cl->cl_vtperiod; in hfsc_dump_class_stats()
1376 xstats.work = cl->cl_total; in hfsc_dump_class_stats()
1377 xstats.rtwork = cl->cl_cumul; in hfsc_dump_class_stats()
1379 if (gnet_stats_copy_basic(d, NULL, &cl->bstats) < 0 || in hfsc_dump_class_stats()
1380 gnet_stats_copy_rate_est(d, &cl->bstats, &cl->rate_est) < 0 || in hfsc_dump_class_stats()
1381 gnet_stats_copy_queue(d, NULL, &cl->qstats, cl->qdisc->q.qlen) < 0) in hfsc_dump_class_stats()
1393 struct hfsc_class *cl; in hfsc_walk() local
1400 hlist_for_each_entry(cl, &q->clhash.hash[i], in hfsc_walk()
1406 if (arg->fn(sch, (unsigned long)cl, arg) < 0) { in hfsc_walk()
1419 struct hfsc_class *cl; in hfsc_schedule_watchdog() local
1422 cl = eltree_get_minel(q); in hfsc_schedule_watchdog()
1423 if (cl) in hfsc_schedule_watchdog()
1424 next_time = cl->cl_e; in hfsc_schedule_watchdog()
1488 hfsc_reset_class(struct hfsc_class *cl) in hfsc_reset_class() argument
1490 cl->cl_total = 0; in hfsc_reset_class()
1491 cl->cl_cumul = 0; in hfsc_reset_class()
1492 cl->cl_d = 0; in hfsc_reset_class()
1493 cl->cl_e = 0; in hfsc_reset_class()
1494 cl->cl_vt = 0; in hfsc_reset_class()
1495 cl->cl_vtadj = 0; in hfsc_reset_class()
1496 cl->cl_vtoff = 0; in hfsc_reset_class()
1497 cl->cl_cvtmin = 0; in hfsc_reset_class()
1498 cl->cl_cvtmax = 0; in hfsc_reset_class()
1499 cl->cl_cvtoff = 0; in hfsc_reset_class()
1500 cl->cl_pcvtoff = 0; in hfsc_reset_class()
1501 cl->cl_vtperiod = 0; in hfsc_reset_class()
1502 cl->cl_parentperiod = 0; in hfsc_reset_class()
1503 cl->cl_f = 0; in hfsc_reset_class()
1504 cl->cl_myf = 0; in hfsc_reset_class()
1505 cl->cl_myfadj = 0; in hfsc_reset_class()
1506 cl->cl_cfmin = 0; in hfsc_reset_class()
1507 cl->cl_nactive = 0; in hfsc_reset_class()
1509 cl->vt_tree = RB_ROOT; in hfsc_reset_class()
1510 cl->cf_tree = RB_ROOT; in hfsc_reset_class()
1511 qdisc_reset(cl->qdisc); in hfsc_reset_class()
1513 if (cl->cl_flags & HFSC_RSC) in hfsc_reset_class()
1514 rtsc_init(&cl->cl_deadline, &cl->cl_rsc, 0, 0); in hfsc_reset_class()
1515 if (cl->cl_flags & HFSC_FSC) in hfsc_reset_class()
1516 rtsc_init(&cl->cl_virtual, &cl->cl_fsc, 0, 0); in hfsc_reset_class()
1517 if (cl->cl_flags & HFSC_USC) in hfsc_reset_class()
1518 rtsc_init(&cl->cl_ulimit, &cl->cl_usc, 0, 0); in hfsc_reset_class()
1525 struct hfsc_class *cl; in hfsc_reset_qdisc() local
1529 hlist_for_each_entry(cl, &q->clhash.hash[i], cl_common.hnode) in hfsc_reset_qdisc()
1530 hfsc_reset_class(cl); in hfsc_reset_qdisc()
1543 struct hfsc_class *cl; in hfsc_destroy_qdisc() local
1547 hlist_for_each_entry(cl, &q->clhash.hash[i], cl_common.hnode) in hfsc_destroy_qdisc()
1548 tcf_destroy_chain(&cl->filter_list); in hfsc_destroy_qdisc()
1551 hlist_for_each_entry_safe(cl, next, &q->clhash.hash[i], in hfsc_destroy_qdisc()
1553 hfsc_destroy_class(sch, cl); in hfsc_destroy_qdisc()
1565 struct hfsc_class *cl; in hfsc_dump_qdisc() local
1570 hlist_for_each_entry(cl, &q->clhash.hash[i], cl_common.hnode) in hfsc_dump_qdisc()
1571 sch->qstats.backlog += cl->qdisc->qstats.backlog; in hfsc_dump_qdisc()
1587 struct hfsc_class *cl; in hfsc_enqueue() local
1590 cl = hfsc_classify(skb, sch, &err); in hfsc_enqueue()
1591 if (cl == NULL) { in hfsc_enqueue()
1598 err = qdisc_enqueue(skb, cl->qdisc); in hfsc_enqueue()
1601 cl->qstats.drops++; in hfsc_enqueue()
1607 if (cl->qdisc->q.qlen == 1) in hfsc_enqueue()
1608 set_active(cl, qdisc_pkt_len(skb)); in hfsc_enqueue()
1619 struct hfsc_class *cl; in hfsc_dequeue() local
1635 cl = eltree_get_mindl(q, cur_time); in hfsc_dequeue()
1636 if (cl) { in hfsc_dequeue()
1643 cl = vttree_get_minvt(&q->root, cur_time); in hfsc_dequeue()
1644 if (cl == NULL) { in hfsc_dequeue()
1651 skb = qdisc_dequeue_peeked(cl->qdisc); in hfsc_dequeue()
1653 qdisc_warn_nonwc("HFSC", cl->qdisc); in hfsc_dequeue()
1657 bstats_update(&cl->bstats, skb); in hfsc_dequeue()
1658 update_vf(cl, qdisc_pkt_len(skb), cur_time); in hfsc_dequeue()
1660 cl->cl_cumul += qdisc_pkt_len(skb); in hfsc_dequeue()
1662 if (cl->qdisc->q.qlen != 0) { in hfsc_dequeue()
1663 if (cl->cl_flags & HFSC_RSC) { in hfsc_dequeue()
1665 next_len = qdisc_peek_len(cl->qdisc); in hfsc_dequeue()
1667 update_ed(cl, next_len); in hfsc_dequeue()
1669 update_d(cl, next_len); in hfsc_dequeue()
1673 set_passive(cl); in hfsc_dequeue()
1687 struct hfsc_class *cl; in hfsc_drop() local
1690 list_for_each_entry(cl, &q->droplist, dlist) { in hfsc_drop()
1691 if (cl->qdisc->ops->drop != NULL && in hfsc_drop()
1692 (len = cl->qdisc->ops->drop(cl->qdisc)) > 0) { in hfsc_drop()
1693 if (cl->qdisc->q.qlen == 0) { in hfsc_drop()
1694 update_vf(cl, 0, 0); in hfsc_drop()
1695 set_passive(cl); in hfsc_drop()
1697 list_move_tail(&cl->dlist, &q->droplist); in hfsc_drop()
1699 cl->qstats.drops++; in hfsc_drop()