Lines Matching refs:ifd

101 #define	reset_cutoff(ifd)	{ ifd->cutoff_ = RM_MAXDEPTH; }
180 * 'pri' on the interface given by 'ifd'.
237 rmc_newclass(int pri, struct rm_ifdat *ifd, u_int32_t nsecPerByte,
304 cl->ifdat_ = ifd;
311 ifq = ifd->ifq_;
404 if ((peer = ifd->active_[pri]) != NULL) {
407 while (peer->peer_ != ifd->active_[pri])
411 ifd->active_[pri] = cl;
430 if (ifd->wrr_) {
431 ifd->num_[pri]++;
432 ifd->alloc_[pri] += cl->allotment_;
433 rmc_wrr_set_weights(ifd);
443 struct rm_ifdat *ifd;
446 ifd = cl->ifdat_;
471 if (ifd->wrr_) {
472 ifd->alloc_[cl->pri_] += cl->allotment_ - old_allotment;
473 rmc_wrr_set_weights(ifd);
488 rmc_wrr_set_weights(struct rm_ifdat *ifd)
498 if (ifd->num_[i] == 0) {
499 ifd->M_[i] = 0;
501 ifd->M_[i] =
502 ifd->alloc_[i] / (ifd->num_[i] * ifd->maxpkt_);
511 if (ifd->active_[i] != NULL) {
512 clh = cl = ifd->active_[i];
515 if (ifd->M_[i] == 0) {
519 cl->allotment_ / ifd->M_[i];
528 rmc_get_weight(struct rm_ifdat *ifd, int pri)
531 return (ifd->M_[pri]);
609 rmc_delete_class(struct rm_ifdat *ifd, struct rm_class *cl)
654 if ((p = ifd->active_[cl->pri_]) != NULL) {
664 if (ifd->active_[cl->pri_] == cl)
665 ifd->active_[cl->pri_] = cl->peer_;
668 ifd->active_[cl->pri_] = NULL;
675 if (ifd->wrr_) {
676 ifd->alloc_[cl->pri_] -= cl->allotment_;
677 ifd->num_[cl->pri_]--;
678 rmc_wrr_set_weights(ifd);
715 * associated with the output portion of interface 'ifp'. 'ifd' is
729 rmc_init(struct ifclassq *ifq, struct rm_ifdat *ifd, u_int32_t nsecPerByte,
753 bzero((char *)ifd, sizeof (*ifd));
755 ifd->ifq_ = ifq;
756 ifd->restart = restart;
757 ifd->maxqueued_ = maxqueued;
758 ifd->ns_per_byte_ = nsecPerByte;
759 ifd->maxpkt_ = mtu;
760 ifd->wrr_ = (flags & RMCF_WRR) ? 1 : 0;
761 ifd->efficient_ = (flags & RMCF_EFFICIENT) ? 1 : 0;
763 ifd->maxiftime_ = mtu * nsecPerByte / 1000 * 16;
765 ifd->maxiftime_ /= 4;
768 reset_cutoff(ifd);
769 CBQTRACE(rmc_init, 'INIT', ifd->cutoff_);
775 ifd->alloc_[i] = 0;
776 ifd->M_[i] = 0;
777 ifd->num_[i] = 0;
778 ifd->na_[i] = 0;
779 ifd->active_[i] = NULL;
785 ifd->qi_ = 0;
786 ifd->qo_ = 0;
788 ifd->class_[i] = NULL;
789 ifd->curlen_[i] = 0;
790 ifd->borrowed_[i] = NULL;
796 if ((ifd->root_ = rmc_newclass(0, ifd, nsecPerByte,
802 ifd->root_->depth_ = 0;
822 struct rm_ifdat *ifd = cl->ifdat_;
828 if (ifd->cutoff_ > 0) {
830 if (ifd->cutoff_ > cl->depth_)
831 ifd->cutoff_ = cl->depth_;
842 borrow->depth_ < ifd->cutoff_) {
844 ifd->cutoff_ = borrow->depth_;
846 ifd->cutoff_);
864 ifd->na_[cpri]++;
877 * rmc_tl_satisfied(struct rm_ifdat *ifd, struct timeval *now) - Check all
882 rmc_tl_satisfied(struct rm_ifdat *ifd, struct timeval *now)
888 if ((bp = ifd->active_[i]) != NULL) {
892 ifd->cutoff_ = p->depth_;
900 reset_cutoff(ifd);
945 struct rm_ifdat *ifd = cl->ifdat_;
947 ifd->borrowed_[ifd->qi_] = NULL;
968 (cl->depth_ > ifd->cutoff_)) {
990 CBQTRACE(rmc_under_limit, 'ffou', ifd->cutoff_);
1006 ifd->borrowed_[ifd->qi_] = cl;
1028 _rmc_wrr_dequeue_next(struct rm_ifdat *ifd, cqdq_op_t op)
1042 if (op == CLASSQDQ_REMOVE && ifd->pollcache_) {
1043 cl = ifd->pollcache_;
1045 if (ifd->efficient_) {
1051 ifd->pollcache_ = NULL;
1055 ifd->pollcache_ = NULL;
1056 ifd->borrowed_[ifd->qi_] = NULL;
1062 if (ifd->na_[cpri] == 0)
1075 cl = ifd->active_[cpri];
1089 ifd->borrowed_[ifd->qi_] = NULL;
1097 } while (cl != ifd->active_[cpri]);
1112 if (first != NULL && ifd->cutoff_ < ifd->root_->depth_) {
1113 ifd->cutoff_++;
1114 CBQTRACE(_rmc_wrr_dequeue_next, 'ojda', ifd->cutoff_);
1123 reset_cutoff(ifd);
1124 CBQTRACE(_rmc_wrr_dequeue_next, 'otsr', ifd->cutoff_);
1126 if (!ifd->efficient_ || first == NULL)
1137 ifd->borrowed_[ifd->qi_] = cl->borrow_;
1138 ifd->cutoff_ = cl->borrow_->depth_;
1150 ifd->na_[cpri]--;
1159 ifd->active_[cl->pri_] = cl->peer_;
1161 ifd->active_[cl->pri_] = cl;
1163 ifd->class_[ifd->qi_] = cl;
1164 ifd->curlen_[ifd->qi_] = m_pktlen(m);
1165 ifd->now_[ifd->qi_] = now;
1166 ifd->qi_ = (ifd->qi_ + 1) % ifd->maxqueued_;
1167 ifd->queued_++;
1171 ifd->pollcache_ = cl;
1183 _rmc_prr_dequeue_next(struct rm_ifdat *ifd, cqdq_op_t op)
1196 if (op == CLASSQDQ_REMOVE && ifd->pollcache_) {
1197 cl = ifd->pollcache_;
1199 ifd->pollcache_ = NULL;
1203 ifd->pollcache_ = NULL;
1204 ifd->borrowed_[ifd->qi_] = NULL;
1210 if (ifd->na_[cpri] == 0)
1212 cl = ifd->active_[cpri];
1223 } while (cl != ifd->active_[cpri]);
1231 if (first != NULL && ifd->cutoff_ < ifd->root_->depth_) {
1232 ifd->cutoff_++;
1241 reset_cutoff(ifd);
1242 if (!ifd->efficient_ || first == NULL)
1253 ifd->borrowed_[ifd->qi_] = cl->borrow_;
1254 ifd->cutoff_ = cl->borrow_->depth_;
1266 ifd->na_[cpri]--;
1268 ifd->active_[cpri] = cl->peer_;
1270 ifd->class_[ifd->qi_] = cl;
1271 ifd->curlen_[ifd->qi_] = m_pktlen(m);
1272 ifd->now_[ifd->qi_] = now;
1273 ifd->qi_ = (ifd->qi_ + 1) % ifd->maxqueued_;
1274 ifd->queued_++;
1278 ifd->pollcache_ = cl;
1285 * rmc_dequeue_next(struct rm_ifdat *ifd, struct timeval *now) - this function
1298 rmc_dequeue_next(struct rm_ifdat *ifd, cqdq_op_t mode)
1300 if (ifd->queued_ >= ifd->maxqueued_)
1302 else if (ifd->wrr_)
1303 return (_rmc_wrr_dequeue_next(ifd, mode));
1305 return (_rmc_prr_dequeue_next(ifd, mode));
1322 rmc_update_class_util(struct rm_ifdat *ifd)
1333 if ((cl = ifd->class_[ifd->qo_]) == NULL)
1336 pktlen = ifd->curlen_[ifd->qo_];
1337 borrowed = ifd->borrowed_[ifd->qo_];
1350 * as a result, ifd->qi_ and ifd->qo_ are always synced.
1352 nowp = &ifd->now_[ifd->qo_];
1355 pkt_time = ifd->curlen_[ifd->qo_] * ifd->ns_per_byte_;
1358 pkt_time = ifd->curlen_[ifd->qo_] * ifd->ns_per_byte_ / 1000;
1361 if (TV_LT(nowp, &ifd->ifnow_)) {
1370 TV_DELTA(&ifd->ifnow_, nowp, iftime);
1371 if (iftime+pkt_time < ifd->maxiftime_) {
1372 TV_ADD_DELTA(&ifd->ifnow_, pkt_time, &ifd->ifnow_);
1374 TV_ADD_DELTA(nowp, ifd->maxiftime_, &ifd->ifnow_);
1377 TV_ADD_DELTA(nowp, pkt_time, &ifd->ifnow_);
1380 if (TV_LT(nowp, &ifd->ifnow_)) {
1381 TV_ADD_DELTA(&ifd->ifnow_, pkt_time, &ifd->ifnow_);
1383 TV_ADD_DELTA(nowp, pkt_time, &ifd->ifnow_);
1388 TV_DELTA(&ifd->ifnow_, &cl->last_, idle);
1441 cl->last_ = ifd->ifnow_;
1457 cl = ifd->class_[ifd->qo_];
1458 if (borrowed && (ifd->cutoff_ >= borrowed->depth_)) {
1461 rmc_tl_satisfied(ifd, nowp);
1462 CBQTRACE(rmc_update_class_util, 'broe', ifd->cutoff_);
1464 ifd->cutoff_ = borrowed->depth_;
1473 ifd->borrowed_[ifd->qo_] = NULL;
1474 ifd->class_[ifd->qo_] = NULL;
1475 ifd->qo_ = (ifd->qo_ + 1) % ifd->maxqueued_;
1476 ifd->queued_--;
1492 struct rm_ifdat *ifd = cl->ifdat_;
1495 IFCQ_CONVERT_LOCK(ifd->ifq_);
1498 ifd->na_[cl->pri_]--;
1505 struct rm_ifdat *ifd = cl->ifdat_;
1506 struct ifclassq *ifq = ifd->ifq_;
1541 ifd->na_[cl->pri_]--;
1645 struct rm_ifdat *ifd = cl->ifdat_;
1651 if (ifd->queued_ < ifd->maxqueued_ && ifd->restart != NULL) {
1653 (ifd->restart)(ifd->ifq_);