Lines Matching defs:cl

262 	struct fairq_class *cl;
269 if ((cl = fif->fif_classes[pri]) != NULL)
270 fairq_class_destroy(fif, cl);
279 struct fairq_class *cl;
285 if ((cl = fif->fif_classes[pri]) != NULL && cl->cl_head)
286 fairq_purgeq(fif, cl, 0, NULL, NULL);
302 struct fairq_class *cl;
308 if ((cl = fif->fif_classes[pri]) != NULL)
309 fairq_updateq(fif, cl, ev);
318 struct fairq_class *cl;
332 cl = fairq_class_create(fif, priority, qlimit, bandwidth,
334 if (cl == NULL)
338 *clp = cl;
351 struct fairq_class *cl;
412 if ((cl = fif->fif_classes[pri]) != NULL) {
414 if (cl->cl_head)
415 fairq_purgeq(fif, cl, 0, NULL, NULL);
417 if (cl->cl_qtype == Q_RIO)
418 rio_destroy(cl->cl_rio);
421 if (cl->cl_qtype == Q_RED)
422 red_destroy(cl->cl_red);
425 if (cl->cl_qtype == Q_BLUE)
426 blue_destroy(cl->cl_blue);
428 if (cl->cl_qtype == Q_SFB && cl->cl_sfb != NULL)
429 sfb_destroy(cl->cl_sfb);
430 cl->cl_qalg.ptr = NULL;
431 cl->cl_qtype = Q_DROPTAIL;
432 cl->cl_qstate = QS_RUNNING;
434 cl = zalloc(fairq_cl_zone);
435 if (cl == NULL)
437 bzero(cl, fairq_cl_size);
438 cl->cl_nbuckets = nbuckets;
439 cl->cl_nbucket_mask = nbuckets - 1;
441 cl->cl_buckets = _MALLOC(sizeof (struct fairq_bucket) *
442 cl->cl_nbuckets, M_DEVBUF, M_WAITOK|M_ZERO);
443 if (cl->cl_buckets == NULL)
445 cl->cl_head = NULL;
448 fif->fif_classes[pri] = cl;
450 fif->fif_default = cl;
456 cl->cl_qlimit = qlimit;
457 for (i = 0; i < cl->cl_nbuckets; ++i) {
458 _qinit(&cl->cl_buckets[i].queue, Q_DROPTAIL, qlimit);
460 cl->cl_bandwidth = bandwidth / 8; /* cvt to bytes per second */
461 cl->cl_qtype = Q_DROPTAIL;
462 cl->cl_qstate = QS_RUNNING;
463 cl->cl_flags = flags;
464 cl->cl_pri = pri;
467 cl->cl_fif = fif;
468 cl->cl_handle = qid;
469 cl->cl_hogs_m1 = hogs_m1 / 8;
470 cl->cl_lssc_m1 = lssc_m1 / 8; /* NOT YET USED */
471 cl->cl_bw_current = 0;
479 cl->cl_qflags = 0;
482 cl->cl_qflags |= BLUEF_ECN;
484 cl->cl_qflags |= SFBF_ECN;
486 cl->cl_qflags |= REDF_ECN;
488 cl->cl_qflags |= RIOF_ECN;
492 cl->cl_qflags |= SFBF_FLOWCTL;
496 cl->cl_qflags |= RIOF_CLEARDSCP;
512 cl->cl_rio =
513 rio_alloc(ifp, 0, NULL, cl->cl_qflags, pkttime);
514 if (cl->cl_rio != NULL)
515 cl->cl_qtype = Q_RIO;
520 cl->cl_red = red_alloc(ifp, 0, 0,
521 cl->cl_qlimit * 10/100,
522 cl->cl_qlimit * 30/100,
523 cl->cl_qflags, pkttime);
524 if (cl->cl_red != NULL)
525 cl->cl_qtype = Q_RED;
531 cl->cl_blue = blue_alloc(ifp, 0, 0, cl->cl_qflags);
532 if (cl->cl_blue != NULL)
533 cl->cl_qtype = Q_BLUE;
537 if (!(cl->cl_flags & FARF_LAZY))
538 cl->cl_sfb = sfb_alloc(ifp, cl->cl_handle,
539 cl->cl_qlimit, cl->cl_qflags);
540 if (cl->cl_sfb != NULL || (cl->cl_flags & FARF_LAZY))
541 cl->cl_qtype = Q_SFB;
548 cl->cl_handle, cl->cl_pri, cl->cl_qlimit, flags, FARF_BITS);
551 return (cl);
554 if (cl->cl_buckets != NULL)
555 _FREE(cl->cl_buckets, M_DEVBUF);
557 if (cl != NULL) {
558 if (cl->cl_qalg.ptr != NULL) {
560 if (cl->cl_qtype == Q_RIO)
561 rio_destroy(cl->cl_rio);
564 if (cl->cl_qtype == Q_RED)
565 red_destroy(cl->cl_red);
568 if (cl->cl_qtype == Q_BLUE)
569 blue_destroy(cl->cl_blue);
571 if (cl->cl_qtype == Q_SFB && cl->cl_sfb != NULL)
572 sfb_destroy(cl->cl_sfb);
573 cl->cl_qalg.ptr = NULL;
574 cl->cl_qtype = Q_DROPTAIL;
575 cl->cl_qstate = QS_RUNNING;
577 zfree(fairq_cl_zone, cl);
585 struct fairq_class *cl;
589 if ((cl = fairq_clh_to_clp(fif, qid)) == NULL)
592 return (fairq_class_destroy(fif, cl));
596 fairq_class_destroy(struct fairq_if *fif, struct fairq_class *cl)
603 if (cl->cl_head)
604 fairq_purgeq(fif, cl, 0, NULL, NULL);
606 fif->fif_classes[cl->cl_pri] = NULL;
607 if (fif->fif_poll_cache == cl)
609 if (fif->fif_maxpri == cl->cl_pri) {
610 for (pri = cl->cl_pri; pri >= 0; pri--)
619 if (cl->cl_qalg.ptr != NULL) {
621 if (cl->cl_qtype == Q_RIO)
622 rio_destroy(cl->cl_rio);
625 if (cl->cl_qtype == Q_RED)
626 red_destroy(cl->cl_red);
629 if (cl->cl_qtype == Q_BLUE)
630 blue_destroy(cl->cl_blue);
632 if (cl->cl_qtype == Q_SFB && cl->cl_sfb != NULL)
633 sfb_destroy(cl->cl_sfb);
634 cl->cl_qalg.ptr = NULL;
635 cl->cl_qtype = Q_DROPTAIL;
636 cl->cl_qstate = QS_RUNNING;
639 if (fif->fif_default == cl)
645 cl->cl_handle, cl->cl_pri);
648 _FREE(cl->cl_buckets, M_DEVBUF);
649 cl->cl_head = NULL; /* sanity */
650 cl->cl_polled = NULL; /* sanity */
651 cl->cl_buckets = NULL; /* sanity */
653 zfree(fairq_cl_zone, cl);
659 fairq_enqueue(struct fairq_if *fif, struct fairq_class *cl, struct mbuf *m,
666 VERIFY(cl == NULL || cl->cl_fif == fif);
668 if (cl == NULL) {
669 cl = fairq_clh_to_clp(fif, t->pftag_qid);
670 if (cl == NULL) {
671 cl = fif->fif_default;
672 if (cl == NULL) {
680 cl->cl_flags |= FARF_HAS_PACKETS;
683 ret = fairq_addq(cl, m, t);
694 PKTCNTR_ADD(&cl->cl_dropcnt, 1, len);
723 struct fairq_class *cl;
755 if ((cl = fif->fif_classes[pri]) == NULL)
757 if ((cl->cl_flags & FARF_HAS_PACKETS) == 0)
759 m = fairq_pollq(cl, cur_time, &hit_limit);
761 cl->cl_flags &= ~FARF_HAS_PACKETS;
770 best_cl = cl;
781 scale = cl->cl_bw_current * 100 / cl->cl_bandwidth;
783 best_cl = cl;
808 fairq_addq(struct fairq_class *cl, struct mbuf *m, struct pf_mtag *t)
810 struct ifclassq *ifq = cl->cl_fif->fif_ifq;
823 if (cl->cl_head)
824 b = cl->cl_head->prev;
826 b = &cl->cl_buckets[0];
828 hindex = (hash & cl->cl_nbucket_mask);
829 b = &cl->cl_buckets[hindex];
841 if (cl->cl_head == NULL) {
842 cl->cl_head = b;
846 b->next = cl->cl_head;
847 b->prev = cl->cl_head->prev;
851 if (b->bw_delta && cl->cl_hogs_m1) {
853 if (bw < cl->cl_hogs_m1)
854 cl->cl_head = b;
860 if (cl->cl_qtype == Q_RIO)
861 return (rio_addq(cl->cl_rio, &b->queue, m, t));
865 if (cl->cl_qtype == Q_RED)
866 return (red_addq(cl->cl_red, &b->queue, m, t));
870 if (cl->cl_qtype == Q_BLUE)
871 return (blue_addq(cl->cl_blue, &b->queue, m, t));
874 if (cl->cl_qtype == Q_SFB) {
875 if (cl->cl_sfb == NULL) {
876 struct ifnet *ifp = FAIRQIF_IFP(cl->cl_fif);
878 VERIFY(cl->cl_flags & FARF_LAZY);
881 cl->cl_sfb = sfb_alloc(ifp, cl->cl_handle,
882 cl->cl_qlimit, cl->cl_qflags);
883 if (cl->cl_sfb == NULL) {
885 cl->cl_qtype = Q_DROPTAIL;
886 cl->cl_flags &= ~FARF_SFB;
887 cl->cl_qflags &= ~(SFBF_ECN | SFBF_FLOWCTL);
892 fairq_style(cl->cl_fif), cl->cl_handle,
893 cl->cl_pri);
896 if (cl->cl_sfb != NULL)
897 return (sfb_addq(cl->cl_sfb, &b->queue, m, t));
904 if (cl->cl_flags & FARF_CLEARDSCP)
913 fairq_getq(struct fairq_class *cl, u_int64_t cur_time)
918 IFCQ_LOCK_ASSERT_HELD(cl->cl_fif->fif_ifq);
920 b = fairq_selectq(cl, 0);
924 else if (cl->cl_qtype == Q_RIO)
925 m = rio_getq(cl->cl_rio, &b->queue);
928 else if (cl->cl_qtype == Q_RED)
929 m = red_getq(cl->cl_red, &b->queue);
932 else if (cl->cl_qtype == Q_BLUE)
933 m = blue_getq(cl->cl_blue, &b->queue);
935 else if (cl->cl_qtype == Q_SFB && cl->cl_sfb != NULL)
936 m = sfb_getq(cl->cl_sfb, &b->queue);
949 delta = (cur_time - cl->cl_last_time);
952 cl->cl_bw_delta += delta;
953 cl->cl_bw_bytes += m->m_pkthdr.len;
954 cl->cl_last_time = cur_time;
955 if (cl->cl_bw_delta > machclk_freq) {
956 cl->cl_bw_delta -= cl->cl_bw_delta >> 2;
957 cl->cl_bw_bytes -= cl->cl_bw_bytes >> 2;
983 fairq_pollq(struct fairq_class *cl, u_int64_t cur_time, int *hit_limit)
990 IFCQ_LOCK_ASSERT_HELD(cl->cl_fif->fif_ifq);
993 b = fairq_selectq(cl, 1);
1004 delta = cur_time - cl->cl_last_time;
1007 cl->cl_bw_delta += delta;
1008 cl->cl_last_time = cur_time;
1009 if (cl->cl_bw_delta) {
1010 bw = cl->cl_bw_bytes * machclk_freq / cl->cl_bw_delta;
1012 if (bw > cl->cl_bandwidth)
1014 cl->cl_bw_current = bw;
1017 bw, cl->cl_bandwidth, *hit_limit, b);
1028 fairq_selectq(struct fairq_class *cl, int ispoll)
1033 IFCQ_LOCK_ASSERT_HELD(cl->cl_fif->fif_ifq);
1035 if (ispoll == 0 && cl->cl_polled) {
1036 b = cl->cl_polled;
1037 cl->cl_polled = NULL;
1041 while ((b = cl->cl_head) != NULL) {
1047 cl->cl_head = b->next;
1048 if (cl->cl_head == b) {
1049 cl->cl_head = NULL;
1061 if (cl->cl_hogs_m1 == 0) {
1062 cl->cl_head = b->next;
1065 if (bw >= cl->cl_hogs_m1) {
1066 cl->cl_head = b->next;
1079 cl->cl_polled = b;
1084 fairq_purgeq(struct fairq_if *fif, struct fairq_class *cl, u_int32_t flow,
1096 while ((b = fairq_selectq(cl, 0)) != NULL) {
1103 if (cl->cl_qtype == Q_RIO)
1104 rio_purgeq(cl->cl_rio, &b->queue, flow, &cnt, &len);
1108 if (cl->cl_qtype == Q_RED)
1109 red_purgeq(cl->cl_red, &b->queue, flow, &cnt, &len);
1113 if (cl->cl_qtype == Q_BLUE)
1114 blue_purgeq(cl->cl_blue, &b->queue, flow, &cnt, &len);
1117 if (cl->cl_qtype == Q_SFB && cl->cl_sfb != NULL)
1118 sfb_purgeq(cl->cl_sfb, &b->queue, flow, &cnt, &len);
1127 PKTCNTR_ADD(&cl->cl_dropcnt, cnt, len);
1140 cl->cl_handle, cl->cl_pri, qlen, qlen(&b->queue),
1152 fairq_updateq(struct fairq_if *fif, struct fairq_class *cl, cqev_t ev)
1159 cl->cl_handle, cl->cl_pri, ifclassq_ev2str(ev));
1163 if (cl->cl_qtype == Q_RIO)
1164 return (rio_updateq(cl->cl_rio, ev));
1167 if (cl->cl_qtype == Q_RED)
1168 return (red_updateq(cl->cl_red, ev));
1171 if (cl->cl_qtype == Q_BLUE)
1172 return (blue_updateq(cl->cl_blue, ev));
1174 if (cl->cl_qtype == Q_SFB && cl->cl_sfb != NULL)
1175 return (sfb_updateq(cl->cl_sfb, ev));
1182 struct fairq_class *cl;
1187 if ((cl = fairq_clh_to_clp(fif, qid)) == NULL)
1190 sp->class_handle = cl->cl_handle;
1191 sp->priority = cl->cl_pri;
1192 sp->qlimit = cl->cl_qlimit;
1193 sp->xmit_cnt = cl->cl_xmitcnt;
1194 sp->drop_cnt = cl->cl_dropcnt;
1195 sp->qtype = cl->cl_qtype;
1196 sp->qstate = cl->cl_qstate;
1199 if (cl->cl_head) {
1200 b = cl->cl_head;
1204 } while (b != cl->cl_head);
1208 if (cl->cl_qtype == Q_RED)
1209 red_getstats(cl->cl_red, &sp->red[0]);
1212 if (cl->cl_qtype == Q_RIO)
1213 rio_getstats(cl->cl_rio, &sp->red[0]);
1216 if (cl->cl_qtype == Q_BLUE)
1217 blue_getstats(cl->cl_blue, &sp->blue);
1219 if (cl->cl_qtype == Q_SFB && cl->cl_sfb != NULL)
1220 sfb_getstats(cl->cl_sfb, &sp->sfb);
1229 struct fairq_class *cl;
1235 if ((cl = fif->fif_classes[idx]) != NULL &&
1236 cl->cl_handle == chandle)
1237 return (cl);
1269 ifq->ifcq_disc_slots[i].cl = NULL;