Lines Matching refs:cl

168 #define	is_a_parent_class(cl)	((cl)->cl_children != NULL)
245 struct hfsc_class *cl, *parent;
276 cl = hfsc_class_create(hif, &rtsc, &lssc, &ulsc,
278 if (cl == NULL)
288 struct hfsc_class *cl;
293 if ((cl = clh_to_clp(hif, a->qid)) == NULL)
296 return (hfsc_class_destroy(cl));
303 struct hfsc_class *cl;
310 if ((cl = clh_to_clp(hif, a->qid)) == NULL)
316 get_class_stats(&stats, cl);
332 struct hfsc_class *cl;
341 (cl = hif->hif_rootclass->cl_children) != NULL) {
346 for (; cl != NULL; cl = hfsc_nextclass(cl)) {
347 if (!is_a_parent_class(cl)) {
348 (void)hfsc_class_destroy(cl);
374 struct hfsc_class *cl;
376 for (cl = hif->hif_rootclass; cl != NULL; cl = hfsc_nextclass(cl))
377 if (!qempty(cl->cl_q))
378 hfsc_purgeq(cl);
388 struct hfsc_class *cl, *p;
403 cl = malloc(sizeof(struct hfsc_class), M_DEVBUF, M_WAITOK|M_ZERO);
404 if (cl == NULL)
407 cl->cl_q = malloc(sizeof(class_queue_t), M_DEVBUF, M_WAITOK|M_ZERO);
408 if (cl->cl_q == NULL)
411 cl->cl_actc = actlist_alloc();
412 if (cl->cl_actc == NULL)
417 qlimit(cl->cl_q) = qlimit;
418 qtype(cl->cl_q) = Q_DROPTAIL;
419 qlen(cl->cl_q) = 0;
420 cl->cl_flags = flags;
447 cl->cl_red = red_alloc(0, 0,
448 qlimit(cl->cl_q) * 10/100,
449 qlimit(cl->cl_q) * 30/100,
451 if (cl->cl_red != NULL)
452 qtype(cl->cl_q) = Q_RED;
456 cl->cl_red = (red_t *)rio_alloc(0, NULL,
458 if (cl->cl_red != NULL)
459 qtype(cl->cl_q) = Q_RIO;
466 cl->cl_rsc = malloc(sizeof(struct internal_sc), M_DEVBUF,
468 if (cl->cl_rsc == NULL)
470 sc2isc(rsc, cl->cl_rsc);
471 rtsc_init(&cl->cl_deadline, cl->cl_rsc, 0, 0);
472 rtsc_init(&cl->cl_eligible, cl->cl_rsc, 0, 0);
475 cl->cl_fsc = malloc(sizeof(struct internal_sc), M_DEVBUF,
477 if (cl->cl_fsc == NULL)
479 sc2isc(fsc, cl->cl_fsc);
480 rtsc_init(&cl->cl_virtual, cl->cl_fsc, 0, 0);
483 cl->cl_usc = malloc(sizeof(struct internal_sc), M_DEVBUF,
485 if (cl->cl_usc == NULL)
487 sc2isc(usc, cl->cl_usc);
488 rtsc_init(&cl->cl_ulimit, cl->cl_usc, 0, 0);
491 cl->cl_id = hif->hif_classid++;
492 cl->cl_handle = qid;
493 cl->cl_hif = hif;
494 cl->cl_parent = parent;
506 hif->hif_class_tbl[i] = cl;
510 hif->hif_class_tbl[i] = cl;
520 hif->hif_defaultclass = cl;
524 hif->hif_rootclass = cl;
528 parent->cl_children = cl;
532 p->cl_siblings = cl;
537 return (cl);
540 if (cl->cl_actc != NULL)
541 actlist_destroy(cl->cl_actc);
542 if (cl->cl_red != NULL) {
544 if (q_is_rio(cl->cl_q))
545 rio_destroy((rio_t *)cl->cl_red);
548 if (q_is_red(cl->cl_q))
549 red_destroy(cl->cl_red);
552 if (cl->cl_fsc != NULL)
553 free(cl->cl_fsc, M_DEVBUF);
554 if (cl->cl_rsc != NULL)
555 free(cl->cl_rsc, M_DEVBUF);
556 if (cl->cl_usc != NULL)
557 free(cl->cl_usc, M_DEVBUF);
558 if (cl->cl_q != NULL)
559 free(cl->cl_q, M_DEVBUF);
560 free(cl, M_DEVBUF);
565 hfsc_class_destroy(struct hfsc_class *cl)
569 if (cl == NULL)
572 if (is_a_parent_class(cl))
579 acc_discard_filters(&cl->cl_hif->hif_classifier, cl, 0);
582 if (!qempty(cl->cl_q))
583 hfsc_purgeq(cl);
585 if (cl->cl_parent == NULL) {
588 struct hfsc_class *p = cl->cl_parent->cl_children;
590 if (p == cl)
591 cl->cl_parent->cl_children = cl->cl_siblings;
593 if (p->cl_siblings == cl) {
594 p->cl_siblings = cl->cl_siblings;
602 if (cl->cl_hif->hif_class_tbl[i] == cl) {
603 cl->cl_hif->hif_class_tbl[i] = NULL;
607 cl->cl_hif->hif_classes--;
610 actlist_destroy(cl->cl_actc);
612 if (cl->cl_red != NULL) {
614 if (q_is_rio(cl->cl_q))
615 rio_destroy((rio_t *)cl->cl_red);
618 if (q_is_red(cl->cl_q))
619 red_destroy(cl->cl_red);
623 if (cl == cl->cl_hif->hif_rootclass)
624 cl->cl_hif->hif_rootclass = NULL;
625 if (cl == cl->cl_hif->hif_defaultclass)
626 cl->cl_hif->hif_defaultclass = NULL;
628 if (cl->cl_usc != NULL)
629 free(cl->cl_usc, M_DEVBUF);
630 if (cl->cl_fsc != NULL)
631 free(cl->cl_fsc, M_DEVBUF);
632 if (cl->cl_rsc != NULL)
633 free(cl->cl_rsc, M_DEVBUF);
634 free(cl->cl_q, M_DEVBUF);
635 free(cl, M_DEVBUF);
643 * for (cl = hif->hif_rootclass; cl != NULL; cl = hfsc_nextclass(cl))
647 hfsc_nextclass(struct hfsc_class *cl)
649 if (cl->cl_children != NULL)
650 cl = cl->cl_children;
651 else if (cl->cl_siblings != NULL)
652 cl = cl->cl_siblings;
654 while ((cl = cl->cl_parent) != NULL)
655 if (cl->cl_siblings) {
656 cl = cl->cl_siblings;
661 return (cl);
672 struct hfsc_class *cl;
684 cl = NULL;
686 cl = clh_to_clp(hif, ((struct altq_tag *)(t+1))->qid);
689 cl = pktattr->pattr_class;
691 if (cl == NULL || is_a_parent_class(cl)) {
692 cl = hif->hif_defaultclass;
693 if (cl == NULL) {
700 cl->cl_pktattr = pktattr; /* save proto hdr used by ECN */
703 cl->cl_pktattr = NULL;
705 if (hfsc_addq(cl, m) != 0) {
707 PKTCNTR_ADD(&cl->cl_stats.drop_cnt, len);
711 cl->cl_hif->hif_packets++;
714 if (qlen(cl->cl_q) == 1)
715 set_active(cl, m_pktlen(m));
733 struct hfsc_class *cl;
747 cl = hif->hif_pollcache;
750 if (cl->cl_rsc != NULL)
751 realtime = (cl->cl_e <= cur_time);
758 if ((cl = ellist_get_mindl(hif->hif_eligible, cur_time))
769 cl = hif->hif_rootclass;
770 while (is_a_parent_class(cl)) {
772 cl = actlist_firstfit(cl, cur_time);
773 if (cl == NULL) {
784 if (cl->cl_parent->cl_cvtmin < cl->cl_vt)
785 cl->cl_parent->cl_cvtmin = cl->cl_vt;
793 hif->hif_pollcache = cl;
794 m = hfsc_pollq(cl);
799 m = hfsc_getq(cl);
803 cl->cl_hif->hif_packets--;
805 PKTCNTR_ADD(&cl->cl_stats.xmit_cnt, len);
807 update_vf(cl, len, cur_time);
809 cl->cl_cumul += len;
811 if (!qempty(cl->cl_q)) {
812 if (cl->cl_rsc != NULL) {
814 next_len = m_pktlen(qhead(cl->cl_q));
817 update_ed(cl, next_len);
819 update_d(cl, next_len);
823 set_passive(cl);
830 hfsc_addq(struct hfsc_class *cl, struct mbuf *m)
834 if (q_is_rio(cl->cl_q))
835 return rio_addq((rio_t *)cl->cl_red, cl->cl_q,
836 m, cl->cl_pktattr);
839 if (q_is_red(cl->cl_q))
840 return red_addq(cl->cl_red, cl->cl_q, m, cl->cl_pktattr);
842 if (qlen(cl->cl_q) >= qlimit(cl->cl_q)) {
847 if (cl->cl_flags & HFCF_CLEARDSCP)
848 write_dsfield(m, cl->cl_pktattr, 0);
850 _addq(cl->cl_q, m);
856 hfsc_getq(struct hfsc_class *cl)
859 if (q_is_rio(cl->cl_q))
860 return rio_getq((rio_t *)cl->cl_red, cl->cl_q);
863 if (q_is_red(cl->cl_q))
864 return red_getq(cl->cl_red, cl->cl_q);
866 return _getq(cl->cl_q);
870 hfsc_pollq(struct hfsc_class *cl)
872 return qhead(cl->cl_q);
876 hfsc_purgeq(struct hfsc_class *cl)
880 if (qempty(cl->cl_q))
883 while ((m = _getq(cl->cl_q)) != NULL) {
884 PKTCNTR_ADD(&cl->cl_stats.drop_cnt, m_pktlen(m));
886 cl->cl_hif->hif_packets--;
887 IFQ_DEC_LEN(cl->cl_hif->hif_ifq);
889 ASSERT(qlen(cl->cl_q) == 0);
891 update_vf(cl, 0, 0); /* remove cl from the actlist */
892 set_passive(cl);
896 set_active(struct hfsc_class *cl, int len)
898 if (cl->cl_rsc != NULL)
899 init_ed(cl, len);
900 if (cl->cl_fsc != NULL)
901 init_vf(cl, len);
903 cl->cl_stats.period++;
907 set_passive(struct hfsc_class *cl)
909 if (cl->cl_rsc != NULL)
910 ellist_remove(cl);
913 * actlist is now handled in update_vf() so that update_vf(cl, 0, 0)
919 init_ed(struct hfsc_class *cl, int next_len)
926 rtsc_min(&cl->cl_deadline, cl->cl_rsc, cur_time, cl->cl_cumul);
933 cl->cl_eligible = cl->cl_deadline;
934 if (cl->cl_rsc->sm1 <= cl->cl_rsc->sm2) {
935 cl->cl_eligible.dx = 0;
936 cl->cl_eligible.dy = 0;
940 cl->cl_e = rtsc_y2x(&cl->cl_eligible, cl->cl_cumul);
941 cl->cl_d = rtsc_y2x(&cl->cl_deadline, cl->cl_cumul + next_len);
943 ellist_insert(cl);
947 update_ed(struct hfsc_class *cl, int next_len)
949 cl->cl_e = rtsc_y2x(&cl->cl_eligible, cl->cl_cumul);
950 cl->cl_d = rtsc_y2x(&cl->cl_deadline, cl->cl_cumul + next_len);
952 ellist_update(cl);
956 update_d(struct hfsc_class *cl, int next_len)
958 cl->cl_d = rtsc_y2x(&cl->cl_deadline, cl->cl_cumul + next_len);
962 init_vf(struct hfsc_class *cl, int len)
970 for ( ; cl->cl_parent != NULL; cl = cl->cl_parent) {
972 if (go_active && cl->cl_nactive++ == 0)
978 max_cl = actlist_last(cl->cl_parent->cl_actc);
986 if (cl->cl_parent->cl_cvtmin != 0)
987 vt = (cl->cl_parent->cl_cvtmin + vt)/2;
989 if (cl->cl_parent->cl_vtperiod !=
990 cl->cl_parentperiod || vt > cl->cl_vt)
991 cl->cl_vt = vt;
999 vt = cl->cl_parent->cl_cvtmax;
1000 for (p = cl->cl_parent->cl_children; p != NULL;
1003 cl->cl_vt = 0;
1004 cl->cl_parent->cl_cvtmax = 0;
1005 cl->cl_parent->cl_cvtmin = 0;
1007 cl->cl_initvt = cl->cl_vt;
1010 vt = cl->cl_vt + cl->cl_vtoff;
1011 rtsc_min(&cl->cl_virtual, cl->cl_fsc, vt, cl->cl_total);
1012 if (cl->cl_virtual.x == vt) {
1013 cl->cl_virtual.x -= cl->cl_vtoff;
1014 cl->cl_vtoff = 0;
1016 cl->cl_vtadj = 0;
1018 cl->cl_vtperiod++; /* increment vt period */
1019 cl->cl_parentperiod = cl->cl_parent->cl_vtperiod;
1020 if (cl->cl_parent->cl_nactive == 0)
1021 cl->cl_parentperiod++;
1022 cl->cl_f = 0;
1024 actlist_insert(cl);
1026 if (cl->cl_usc != NULL) {
1032 rtsc_min(&cl->cl_ulimit, cl->cl_usc, cur_time,
1033 cl->cl_total);
1035 cl->cl_myf = rtsc_y2x(&cl->cl_ulimit,
1036 cl->cl_total);
1037 cl->cl_myfadj = 0;
1041 if (cl->cl_myf > cl->cl_cfmin)
1042 f = cl->cl_myf;
1044 f = cl->cl_cfmin;
1045 if (f != cl->cl_f) {
1046 cl->cl_f = f;
1047 update_cfmin(cl->cl_parent);
1053 update_vf(struct hfsc_class *cl, int len, u_int64_t cur_time)
1058 go_passive = qempty(cl->cl_q);
1060 for (; cl->cl_parent != NULL; cl = cl->cl_parent) {
1062 cl->cl_total += len;
1064 if (cl->cl_fsc == NULL || cl->cl_nactive == 0)
1067 if (go_passive && --cl->cl_nactive == 0)
1076 if (cl->cl_vt > cl->cl_parent->cl_cvtmax)
1077 cl->cl_parent->cl_cvtmax = cl->cl_vt;
1080 actlist_remove(cl);
1082 update_cfmin(cl->cl_parent);
1090 cl->cl_vt = rtsc_y2x(&cl->cl_virtual, cl->cl_total)
1091 - cl->cl_vtoff + cl->cl_vtadj;
1098 if (cl->cl_vt < cl->cl_parent->cl_cvtmin) {
1099 cl->cl_vtadj += cl->cl_parent->cl_cvtmin - cl->cl_vt;
1100 cl->cl_vt = cl->cl_parent->cl_cvtmin;
1104 actlist_update(cl);
1106 if (cl->cl_usc != NULL) {
1107 cl->cl_myf = cl->cl_myfadj
1108 + rtsc_y2x(&cl->cl_ulimit, cl->cl_total);
1118 if (cl->cl_myf < myf_bound) {
1119 delta = cur_time - cl->cl_myf;
1120 cl->cl_myfadj += delta;
1121 cl->cl_myf += delta;
1126 if (cl->cl_myf > cl->cl_cfmin)
1127 f = cl->cl_myf;
1129 f = cl->cl_cfmin;
1130 if (f != cl->cl_f) {
1131 cl->cl_f = f;
1132 update_cfmin(cl->cl_parent);
1138 update_cfmin(struct hfsc_class *cl)
1143 if (TAILQ_EMPTY(cl->cl_actc)) {
1144 cl->cl_cfmin = 0;
1148 TAILQ_FOREACH(p, cl->cl_actc, cl_actlist) {
1150 cl->cl_cfmin = 0;
1156 cl->cl_cfmin = cfmin;
1185 ellist_insert(struct hfsc_class *cl)
1187 struct hfsc_if *hif = cl->cl_hif;
1192 p->cl_e <= cl->cl_e) {
1193 TAILQ_INSERT_TAIL(hif->hif_eligible, cl, cl_ellist);
1198 if (cl->cl_e < p->cl_e) {
1199 TAILQ_INSERT_BEFORE(p, cl, cl_ellist);
1207 ellist_remove(struct hfsc_class *cl)
1209 struct hfsc_if *hif = cl->cl_hif;
1211 TAILQ_REMOVE(hif->hif_eligible, cl, cl_ellist);
1215 ellist_update(struct hfsc_class *cl)
1217 struct hfsc_if *hif = cl->cl_hif;
1224 p = TAILQ_NEXT(cl, cl_ellist);
1225 if (p == NULL || cl->cl_e <= p->cl_e)
1231 if (last->cl_e <= cl->cl_e) {
1232 TAILQ_REMOVE(hif->hif_eligible, cl, cl_ellist);
1233 TAILQ_INSERT_TAIL(hif->hif_eligible, cl, cl_ellist);
1242 if (cl->cl_e < p->cl_e) {
1243 TAILQ_REMOVE(hif->hif_eligible, cl, cl_ellist);
1244 TAILQ_INSERT_BEFORE(p, cl, cl_ellist);
1255 struct hfsc_class *p, *cl = NULL;
1260 if (cl == NULL || p->cl_d < cl->cl_d)
1261 cl = p;
1263 return (cl);
1287 actlist_insert(struct hfsc_class *cl)
1292 if ((p = TAILQ_LAST(cl->cl_parent->cl_actc, _active)) == NULL
1293 || p->cl_vt <= cl->cl_vt) {
1294 TAILQ_INSERT_TAIL(cl->cl_parent->cl_actc, cl, cl_actlist);
1298 TAILQ_FOREACH(p, cl->cl_parent->cl_actc, cl_actlist) {
1299 if (cl->cl_vt < p->cl_vt) {
1300 TAILQ_INSERT_BEFORE(p, cl, cl_actlist);
1308 actlist_remove(struct hfsc_class *cl)
1310 TAILQ_REMOVE(cl->cl_parent->cl_actc, cl, cl_actlist);
1314 actlist_update(struct hfsc_class *cl)
1323 p = TAILQ_NEXT(cl, cl_actlist);
1324 if (p == NULL || cl->cl_vt < p->cl_vt)
1328 last = TAILQ_LAST(cl->cl_parent->cl_actc, _active);
1330 if (last->cl_vt <= cl->cl_vt) {
1331 TAILQ_REMOVE(cl->cl_parent->cl_actc, cl, cl_actlist);
1332 TAILQ_INSERT_TAIL(cl->cl_parent->cl_actc, cl, cl_actlist);
1341 if (cl->cl_vt < p->cl_vt) {
1342 TAILQ_REMOVE(cl->cl_parent->cl_actc, cl, cl_actlist);
1343 TAILQ_INSERT_BEFORE(p, cl, cl_actlist);
1351 actlist_firstfit(struct hfsc_class *cl, u_int64_t cur_time)
1355 TAILQ_FOREACH(p, cl->cl_actc, cl_actlist) {
1609 get_class_stats(struct hfsc_classstats *sp, struct hfsc_class *cl)
1611 sp->class_id = cl->cl_id;
1612 sp->class_handle = cl->cl_handle;
1614 if (cl->cl_rsc != NULL) {
1615 sp->rsc.m1 = sm2m(cl->cl_rsc->sm1);
1616 sp->rsc.d = dx2d(cl->cl_rsc->dx);
1617 sp->rsc.m2 = sm2m(cl->cl_rsc->sm2);
1623 if (cl->cl_fsc != NULL) {
1624 sp->fsc.m1 = sm2m(cl->cl_fsc->sm1);
1625 sp->fsc.d = dx2d(cl->cl_fsc->dx);
1626 sp->fsc.m2 = sm2m(cl->cl_fsc->sm2);
1632 if (cl->cl_usc != NULL) {
1633 sp->usc.m1 = sm2m(cl->cl_usc->sm1);
1634 sp->usc.d = dx2d(cl->cl_usc->dx);
1635 sp->usc.m2 = sm2m(cl->cl_usc->sm2);
1642 sp->total = cl->cl_total;
1643 sp->cumul = cl->cl_cumul;
1645 sp->d = cl->cl_d;
1646 sp->e = cl->cl_e;
1647 sp->vt = cl->cl_vt;
1648 sp->f = cl->cl_f;
1650 sp->initvt = cl->cl_initvt;
1651 sp->vtperiod = cl->cl_vtperiod;
1652 sp->parentperiod = cl->cl_parentperiod;
1653 sp->nactive = cl->cl_nactive;
1654 sp->vtoff = cl->cl_vtoff;
1655 sp->cvtmax = cl->cl_cvtmax;
1656 sp->myf = cl->cl_myf;
1657 sp->cfmin = cl->cl_cfmin;
1658 sp->cvtmin = cl->cl_cvtmin;
1659 sp->myfadj = cl->cl_myfadj;
1660 sp->vtadj = cl->cl_vtadj;
1665 sp->qlength = qlen(cl->cl_q);
1666 sp->qlimit = qlimit(cl->cl_q);
1667 sp->xmit_cnt = cl->cl_stats.xmit_cnt;
1668 sp->drop_cnt = cl->cl_stats.drop_cnt;
1669 sp->period = cl->cl_stats.period;
1671 sp->qtype = qtype(cl->cl_q);
1673 if (q_is_red(cl->cl_q))
1674 red_getstats(cl->cl_red, &sp->red[0]);
1677 if (q_is_rio(cl->cl_q))
1678 rio_getstats((rio_t *)cl->cl_red, &sp->red[0]);
1687 struct hfsc_class *cl;
1696 if ((cl = hif->hif_class_tbl[i]) != NULL && cl->cl_handle == chandle)
1697 return (cl);
1699 if ((cl = hif->hif_class_tbl[i]) != NULL &&
1700 cl->cl_handle == chandle)
1701 return (cl);
1758 hfsc_class_modify(struct hfsc_class *cl, struct service_curve *rsc,
1767 cl->cl_rsc == NULL) {
1774 cl->cl_fsc == NULL) {
1781 cl->cl_usc == NULL) {
1793 if (cl->cl_rsc != NULL) {
1794 if (!qempty(cl->cl_q))
1795 hfsc_purgeq(cl);
1796 free(cl->cl_rsc, M_DEVBUF);
1797 cl->cl_rsc = NULL;
1800 if (cl->cl_rsc == NULL)
1801 cl->cl_rsc = rsc_tmp;
1802 sc2isc(rsc, cl->cl_rsc);
1803 rtsc_init(&cl->cl_deadline, cl->cl_rsc, cur_time,
1804 cl->cl_cumul);
1805 cl->cl_eligible = cl->cl_deadline;
1806 if (cl->cl_rsc->sm1 <= cl->cl_rsc->sm2) {
1807 cl->cl_eligible.dx = 0;
1808 cl->cl_eligible.dy = 0;
1815 if (cl->cl_fsc != NULL) {
1816 if (!qempty(cl->cl_q))
1817 hfsc_purgeq(cl);
1818 free(cl->cl_fsc, M_DEVBUF);
1819 cl->cl_fsc = NULL;
1822 if (cl->cl_fsc == NULL)
1823 cl->cl_fsc = fsc_tmp;
1824 sc2isc(fsc, cl->cl_fsc);
1825 rtsc_init(&cl->cl_virtual, cl->cl_fsc, cl->cl_vt,
1826 cl->cl_total);
1832 if (cl->cl_usc != NULL) {
1833 free(cl->cl_usc, M_DEVBUF);
1834 cl->cl_usc = NULL;
1835 cl->cl_myf = 0;
1838 if (cl->cl_usc == NULL)
1839 cl->cl_usc = usc_tmp;
1840 sc2isc(usc, cl->cl_usc);
1841 rtsc_init(&cl->cl_ulimit, cl->cl_usc, cur_time,
1842 cl->cl_total);
1846 if (!qempty(cl->cl_q)) {
1847 if (cl->cl_rsc != NULL)
1848 update_ed(cl, m_pktlen(qhead(cl->cl_q)));
1849 if (cl->cl_fsc != NULL)
1850 update_vf(cl, 0, cur_time);
2045 struct hfsc_class *cl, *parent;
2064 if ((cl = hfsc_class_create(hif, &ap->service_curve, NULL, NULL,
2078 struct hfsc_class *cl;
2083 if ((cl = clh_to_clp(hif, ap->class_handle)) == NULL)
2086 return hfsc_class_destroy(cl);
2093 struct hfsc_class *cl;
2101 if ((cl = clh_to_clp(hif, ap->class_handle)) == NULL)
2111 return hfsc_class_modify(cl, rsc, fsc, usc);
2118 struct hfsc_class *cl;
2123 if ((cl = clh_to_clp(hif, ap->class_handle)) == NULL)
2126 if (is_a_parent_class(cl)) {
2134 cl, &ap->filter_handle);
2153 struct hfsc_class *cl;
2167 for (cl = hif->hif_rootclass, n = 0; cl != NULL && n < nclasses;
2168 cl = hfsc_nextclass(cl), n++)
2176 for (n = 0; cl != NULL && n < nclasses; cl = hfsc_nextclass(cl), n++) {
2178 get_class_stats(&stats, cl);