Lines Matching defs:bat_priv

131  * @bat_priv: the bat priv with all the soft interface information
139 batadv_iv_ogm_orig_get(struct batadv_priv *bat_priv, const u8 *addr)
144 orig_node = batadv_orig_hash_find(bat_priv, addr);
148 orig_node = batadv_orig_node_new(bat_priv, addr);
155 hash_added = batadv_hash_add(bat_priv->orig_hash, batadv_compare_orig,
277 batadv_iv_ogm_emit_send_time(const struct batadv_priv *bat_priv)
281 msecs = atomic_read(&bat_priv->orig_interval) - BATADV_JITTER;
294 static u8 batadv_hop_penalty(u8 tq, const struct batadv_priv *bat_priv)
296 int hop_penalty = atomic_read(&bat_priv->hop_penalty);
335 struct batadv_priv *bat_priv = netdev_priv(hard_iface->soft_iface);
368 batadv_dbg(BATADV_DBG_BATMAN, bat_priv,
389 batadv_inc_counter(bat_priv, BATADV_CNT_MGMT_TX);
390 batadv_add_counter(bat_priv, BATADV_CNT_MGMT_TX_BYTES,
427 * @bat_priv: the bat priv with all the soft interface information
439 struct batadv_priv *bat_priv,
482 primary_if = batadv_primary_if_get_selected(bat_priv);
543 struct batadv_priv *bat_priv = netdev_priv(if_incoming->soft_iface);
548 atomic_t *queue_left = own_packet ? NULL : &bat_priv->batman_queue_left;
550 if (atomic_read(&bat_priv->aggregated_ogms) &&
563 queue_left, bat_priv, skb);
587 batadv_forw_packet_ogmv1_queue(bat_priv, forw_packet_aggr, send_time);
610 * @bat_priv: the bat priv with all the soft interface information
618 static void batadv_iv_ogm_queue_add(struct batadv_priv *bat_priv,
639 spin_lock_bh(&bat_priv->forw_bat_list_lock);
641 if (atomic_read(&bat_priv->aggregated_ogms) && !own_packet) {
643 &bat_priv->forw_bat_list, list) {
645 bat_priv, packet_len,
661 spin_unlock_bh(&bat_priv->forw_bat_list_lock);
667 if (!own_packet && atomic_read(&bat_priv->aggregated_ogms))
677 spin_unlock_bh(&bat_priv->forw_bat_list_lock);
689 struct batadv_priv *bat_priv = netdev_priv(if_incoming->soft_iface);
693 batadv_dbg(BATADV_DBG_BATMAN, bat_priv, "ttl exceeded\n");
717 bat_priv);
719 batadv_dbg(BATADV_DBG_BATMAN, bat_priv,
728 batadv_iv_ogm_queue_add(bat_priv, (unsigned char *)batadv_ogm_packet,
742 struct batadv_priv *bat_priv = netdev_priv(hard_iface->soft_iface);
743 struct batadv_hashtable *hash = bat_priv->orig_hash;
764 batadv_bit_get_packet(bat_priv, word, 1, 0);
781 struct batadv_priv *bat_priv = netdev_priv(hard_iface->soft_iface);
805 primary_if = batadv_primary_if_get_selected(bat_priv);
811 batadv_tt_local_commit_changes(bat_priv);
812 tvlv_len = batadv_tvlv_container_ogm_append(bat_priv, ogm_buff,
827 send_time = batadv_iv_ogm_emit_send_time(bat_priv);
833 batadv_iv_ogm_queue_add(bat_priv, *ogm_buff, *ogm_buff_len,
849 batadv_iv_ogm_queue_add(bat_priv, *ogm_buff,
904 * @bat_priv: the bat priv with all the soft interface information
914 batadv_iv_ogm_orig_update(struct batadv_priv *bat_priv,
932 batadv_dbg(BATADV_DBG_BATMAN, bat_priv,
972 orig_tmp = batadv_iv_ogm_orig_get(bat_priv, ethhdr->h_source);
984 batadv_dbg(BATADV_DBG_BATMAN, bat_priv,
1040 batadv_update_route(bat_priv, orig_node, if_outgoing, neigh_node);
1068 struct batadv_priv *bat_priv = netdev_priv(if_incoming->soft_iface);
1164 bat_priv);
1175 batadv_dbg(BATADV_DBG_BATMAN, bat_priv,
1210 struct batadv_priv *bat_priv = netdev_priv(if_incoming->soft_iface);
1225 orig_node = batadv_iv_ogm_orig_get(bat_priv, batadv_ogm_packet->orig);
1240 batadv_window_protected(bat_priv, seq_diff,
1272 need_update |= batadv_bit_get_packet(bat_priv, bitmap,
1283 batadv_dbg(BATADV_DBG_BATMAN, bat_priv,
1312 struct batadv_priv *bat_priv = netdev_priv(if_incoming->soft_iface);
1346 batadv_dbg(BATADV_DBG_BATMAN, bat_priv,
1353 batadv_dbg(BATADV_DBG_BATMAN, bat_priv,
1382 batadv_dbg(BATADV_DBG_BATMAN, bat_priv,
1389 batadv_tvlv_ogm_receive(bat_priv, ogm_packet, orig_node);
1397 orig_neigh_node = batadv_iv_ogm_orig_get(bat_priv,
1404 batadv_nc_update_nc_node(bat_priv, orig_node, orig_neigh_node,
1414 batadv_dbg(BATADV_DBG_BATMAN, bat_priv,
1435 batadv_iv_ogm_orig_update(bat_priv, orig_node,
1453 batadv_dbg(BATADV_DBG_BATMAN, bat_priv,
1463 batadv_dbg(BATADV_DBG_BATMAN, bat_priv,
1470 batadv_dbg(BATADV_DBG_BATMAN, bat_priv,
1476 batadv_dbg(BATADV_DBG_BATMAN, bat_priv,
1481 batadv_dbg(BATADV_DBG_BATMAN, bat_priv,
1552 struct batadv_priv *bat_priv = netdev_priv(if_incoming->soft_iface);
1589 batadv_dbg(BATADV_DBG_BATMAN, bat_priv,
1620 batadv_dbg(BATADV_DBG_BATMAN, bat_priv,
1627 orig_neigh_node = batadv_iv_ogm_orig_get(bat_priv,
1635 batadv_dbg(BATADV_DBG_BATMAN, bat_priv,
1642 batadv_dbg(BATADV_DBG_BATMAN, bat_priv,
1649 batadv_dbg(BATADV_DBG_BATMAN, bat_priv,
1655 orig_node = batadv_iv_ogm_orig_get(bat_priv, ogm_packet->orig);
1667 if (hard_iface->soft_iface != bat_priv->soft_iface)
1687 struct batadv_priv *bat_priv;
1693 bat_priv = netdev_priv(forw_packet->if_incoming->soft_iface);
1695 if (atomic_read(&bat_priv->mesh_state) == BATADV_MESH_DEACTIVATING) {
1717 &bat_priv->forw_bat_list_lock))
1724 struct batadv_priv *bat_priv = netdev_priv(if_incoming->soft_iface);
1738 if (bat_priv->algo_ops->iface.enable != batadv_iv_ogm_iface_enable)
1741 batadv_inc_counter(bat_priv, BATADV_CNT_MGMT_RX);
1742 batadv_add_counter(bat_priv, BATADV_CNT_MGMT_RX_BYTES,
1803 * @bat_priv: The bat priv with all the soft interface information
1813 struct batadv_priv *bat_priv,
1866 * @bat_priv: The bat priv with all the soft interface information
1877 struct batadv_priv *bat_priv,
1905 bat_priv, if_outgoing,
1928 * @bat_priv: The bat priv with all the soft interface information
1938 struct batadv_priv *bat_priv,
1950 if (batadv_iv_ogm_orig_dump_entry(msg, portid, seq, bat_priv,
1969 * @bat_priv: The bat priv with all the soft interface information
1974 struct batadv_priv *bat_priv,
1977 struct batadv_hashtable *hash = bat_priv->orig_hash;
1989 bat_priv, if_outgoing, head,
2091 * @bat_priv: The bat priv with all the soft interface information
2101 struct batadv_priv *bat_priv,
2128 * @bat_priv: The bat priv with all the soft interface information
2133 struct batadv_priv *bat_priv,
2147 bat_priv,
2155 if (hard_iface->soft_iface != bat_priv->soft_iface)
2163 bat_priv,
2239 * @bat_priv: the bat priv with all the soft interface information
2241 static void batadv_iv_init_sel_class(struct batadv_priv *bat_priv)
2244 atomic_set(&bat_priv->gw.sel_class, 20);
2248 batadv_iv_gw_get_best_gw_node(struct batadv_priv *bat_priv)
2260 hlist_for_each_entry_rcu(gw_node, &bat_priv->gw.gateway_list, list) {
2276 switch (atomic_read(&bat_priv->gw.sel_class)) {
2324 static bool batadv_iv_gw_is_eligible(struct batadv_priv *bat_priv,
2336 if (atomic_read(&bat_priv->gw.sel_class) <= 2)
2371 if ((atomic_read(&bat_priv->gw.sel_class) > 3) &&
2372 (orig_tq_avg - gw_tq_avg < atomic_read(&bat_priv->gw.sel_class)))
2375 batadv_dbg(BATADV_DBG_BATMAN, bat_priv,
2394 * @bat_priv: The bat priv with all the soft interface information
2401 struct batadv_priv *bat_priv,
2418 curr_gw = batadv_gw_get_selected_gw_node(bat_priv);
2469 * @bat_priv: The bat priv with all the soft interface information
2472 struct batadv_priv *bat_priv)
2479 spin_lock_bh(&bat_priv->gw.list_lock);
2480 cb->seq = bat_priv->gw.generation << 1 | 1;
2482 hlist_for_each_entry(gw_node, &bat_priv->gw.gateway_list, list) {
2486 if (batadv_iv_gw_dump_entry(msg, portid, cb, bat_priv,
2495 spin_unlock_bh(&bat_priv->gw.list_lock);