Lines Matching defs:bat_priv

48  * @bat_priv: the bat priv with all the soft interface information
55 struct batadv_orig_node *batadv_v_ogm_orig_get(struct batadv_priv *bat_priv,
61 orig_node = batadv_orig_hash_find(bat_priv, addr);
65 orig_node = batadv_orig_node_new(bat_priv, addr);
70 hash_added = batadv_hash_add(bat_priv->orig_hash, batadv_compare_orig,
99 * @bat_priv: the bat priv with all the soft interface information
101 static void batadv_v_ogm_start_timer(struct batadv_priv *bat_priv)
107 if (delayed_work_pending(&bat_priv->bat_v.ogm_wq))
110 msecs = atomic_read(&bat_priv->orig_interval) - BATADV_JITTER;
112 queue_delayed_work(batadv_event_workqueue, &bat_priv->bat_v.ogm_wq,
124 struct batadv_priv *bat_priv = netdev_priv(hard_iface->soft_iface);
131 batadv_inc_counter(bat_priv, BATADV_CNT_MGMT_TX);
132 batadv_add_counter(bat_priv, BATADV_CNT_MGMT_TX_BYTES,
242 struct batadv_priv *bat_priv = netdev_priv(hard_iface->soft_iface);
244 if (!atomic_read(&bat_priv->aggregated_ogms)) {
260 * @bat_priv: the bat priv with all the soft interface information
262 static void batadv_v_ogm_send_softif(struct batadv_priv *bat_priv)
272 lockdep_assert_held(&bat_priv->bat_v.ogm_buff_mutex);
274 if (atomic_read(&bat_priv->mesh_state) == BATADV_MESH_DEACTIVATING)
277 ogm_buff = bat_priv->bat_v.ogm_buff;
278 ogm_buff_len = bat_priv->bat_v.ogm_buff_len;
282 batadv_tt_local_commit_changes(bat_priv);
283 tvlv_len = batadv_tvlv_container_ogm_append(bat_priv, &ogm_buff,
287 bat_priv->bat_v.ogm_buff = ogm_buff;
288 bat_priv->bat_v.ogm_buff_len = ogm_buff_len;
298 ogm_packet->seqno = htonl(atomic_read(&bat_priv->bat_v.ogm_seqno));
299 atomic_inc(&bat_priv->bat_v.ogm_seqno);
305 if (hard_iface->soft_iface != bat_priv->soft_iface)
329 batadv_dbg(BATADV_DBG_BATMAN, bat_priv, "OGM2 from ourselves on %s suppressed: %s\n",
336 batadv_dbg(BATADV_DBG_BATMAN, bat_priv,
358 batadv_v_ogm_start_timer(bat_priv);
370 struct batadv_priv *bat_priv;
373 bat_priv = container_of(bat_v, struct batadv_priv, bat_v);
375 mutex_lock(&bat_priv->bat_v.ogm_buff_mutex);
376 batadv_v_ogm_send_softif(bat_priv);
377 mutex_unlock(&bat_priv->bat_v.ogm_buff_mutex);
411 struct batadv_priv *bat_priv = netdev_priv(hard_iface->soft_iface);
414 batadv_v_ogm_start_timer(bat_priv);
438 struct batadv_priv *bat_priv = netdev_priv(primary_iface->soft_iface);
441 mutex_lock(&bat_priv->bat_v.ogm_buff_mutex);
442 if (!bat_priv->bat_v.ogm_buff)
445 ogm_packet = (struct batadv_ogm2_packet *)bat_priv->bat_v.ogm_buff;
449 mutex_unlock(&bat_priv->bat_v.ogm_buff_mutex);
455 * @bat_priv: the bat priv with all the soft interface information
475 static u32 batadv_v_forward_penalty(struct batadv_priv *bat_priv,
481 int hop_penalty = atomic_read(&bat_priv->hop_penalty);
508 * @bat_priv: the bat priv with all the soft interface information
518 static void batadv_v_ogm_forward(struct batadv_priv *bat_priv,
556 batadv_dbg(BATADV_DBG_BATMAN, bat_priv, "ttl exceeded\n");
580 batadv_dbg(BATADV_DBG_BATMAN, bat_priv,
595 * @bat_priv: the bat priv with all the soft interface information
607 static int batadv_v_ogm_metric_update(struct batadv_priv *bat_priv,
628 batadv_window_protected(bat_priv, seq_diff,
632 batadv_dbg(BATADV_DBG_BATMAN, bat_priv,
635 batadv_dbg(BATADV_DBG_BATMAN, bat_priv,
658 path_throughput = batadv_v_forward_penalty(bat_priv, if_incoming,
678 * @bat_priv: the bat priv with all the soft interface information
688 static bool batadv_v_ogm_route_update(struct batadv_priv *bat_priv,
706 orig_neigh_node = batadv_v_ogm_orig_get(bat_priv, ethhdr->h_source);
718 batadv_dbg(BATADV_DBG_BATMAN, bat_priv,
728 batadv_dbg(BATADV_DBG_BATMAN, bat_priv,
760 batadv_update_route(bat_priv, orig_node, if_outgoing, neigh_node);
773 * @bat_priv: the bat priv with all the soft interface information
782 batadv_v_ogm_process_per_outif(struct batadv_priv *bat_priv,
794 seqno_age = batadv_v_ogm_metric_update(bat_priv, ogm2, orig_node,
804 batadv_tvlv_containers_process(bat_priv, BATADV_OGM2, orig_node,
810 forward = batadv_v_ogm_route_update(bat_priv, ethhdr, ogm2, orig_node,
816 batadv_v_ogm_forward(bat_priv, ogm2, orig_node, neigh_node,
855 struct batadv_priv *bat_priv = netdev_priv(if_incoming->soft_iface);
870 batadv_dbg(BATADV_DBG_BATMAN, bat_priv,
877 if (batadv_is_my_mac(bat_priv, ogm_packet->orig)) {
878 batadv_dbg(BATADV_DBG_BATMAN, bat_priv,
887 batadv_dbg(BATADV_DBG_BATMAN, bat_priv,
895 batadv_dbg(BATADV_DBG_BATMAN, bat_priv,
900 orig_node = batadv_v_ogm_orig_get(bat_priv, ogm_packet->orig);
920 batadv_v_ogm_process_per_outif(bat_priv, ethhdr, ogm_packet, orig_node,
929 if (hard_iface->soft_iface != bat_priv->soft_iface)
956 batadv_dbg(BATADV_DBG_BATMAN, bat_priv, "OGM2 packet from %pM on %s suppressed: %s\n",
964 batadv_v_ogm_process_per_outif(bat_priv, ethhdr, ogm_packet,
988 struct batadv_priv *bat_priv = netdev_priv(if_incoming->soft_iface);
998 if (strcmp(bat_priv->algo_ops->name, "BATMAN_V") != 0)
1005 if (batadv_is_my_mac(bat_priv, ethhdr->h_source))
1008 batadv_inc_counter(bat_priv, BATADV_CNT_MGMT_RX);
1009 batadv_add_counter(bat_priv, BATADV_CNT_MGMT_RX_BYTES,
1039 * @bat_priv: the bat priv with all the soft interface information
1043 int batadv_v_ogm_init(struct batadv_priv *bat_priv)
1049 bat_priv->bat_v.ogm_buff_len = BATADV_OGM2_HLEN;
1050 ogm_buff = kzalloc(bat_priv->bat_v.ogm_buff_len, GFP_ATOMIC);
1054 bat_priv->bat_v.ogm_buff = ogm_buff;
1064 atomic_set(&bat_priv->bat_v.ogm_seqno, random_seqno);
1065 INIT_DELAYED_WORK(&bat_priv->bat_v.ogm_wq, batadv_v_ogm_send);
1067 mutex_init(&bat_priv->bat_v.ogm_buff_mutex);
1074 * @bat_priv: the bat priv with all the soft interface information
1076 void batadv_v_ogm_free(struct batadv_priv *bat_priv)
1078 cancel_delayed_work_sync(&bat_priv->bat_v.ogm_wq);
1080 mutex_lock(&bat_priv->bat_v.ogm_buff_mutex);
1082 kfree(bat_priv->bat_v.ogm_buff);
1083 bat_priv->bat_v.ogm_buff = NULL;
1084 bat_priv->bat_v.ogm_buff_len = 0;
1086 mutex_unlock(&bat_priv->bat_v.ogm_buff_mutex);