Lines Matching refs:ign

78 static void ip6gre_tunnel_link(struct ip6gre_net *ign, struct ip6_tnl *t);
124 struct ip6gre_net *ign = net_generic(net, ip6gre_net_id);
132 for_each_ip_tunnel_rcu(t, ign->tunnels_r_l[h0 ^ h1]) {
157 for_each_ip_tunnel_rcu(t, ign->tunnels_r[h0 ^ h1]) {
181 for_each_ip_tunnel_rcu(t, ign->tunnels_l[h1]) {
207 for_each_ip_tunnel_rcu(t, ign->tunnels_wc[h1]) {
235 t = rcu_dereference(ign->collect_md_tun_erspan);
237 t = rcu_dereference(ign->collect_md_tun);
242 ndev = READ_ONCE(ign->fb_tunnel_dev);
249 static struct ip6_tnl __rcu **__ip6gre_bucket(struct ip6gre_net *ign,
264 return &ign->tunnels[prio][h];
267 static void ip6gre_tunnel_link_md(struct ip6gre_net *ign, struct ip6_tnl *t)
270 rcu_assign_pointer(ign->collect_md_tun, t);
273 static void ip6erspan_tunnel_link_md(struct ip6gre_net *ign, struct ip6_tnl *t)
276 rcu_assign_pointer(ign->collect_md_tun_erspan, t);
279 static void ip6gre_tunnel_unlink_md(struct ip6gre_net *ign, struct ip6_tnl *t)
282 rcu_assign_pointer(ign->collect_md_tun, NULL);
285 static void ip6erspan_tunnel_unlink_md(struct ip6gre_net *ign,
289 rcu_assign_pointer(ign->collect_md_tun_erspan, NULL);
292 static inline struct ip6_tnl __rcu **ip6gre_bucket(struct ip6gre_net *ign,
295 return __ip6gre_bucket(ign, &t->parms);
298 static void ip6gre_tunnel_link(struct ip6gre_net *ign, struct ip6_tnl *t)
300 struct ip6_tnl __rcu **tp = ip6gre_bucket(ign, t);
306 static void ip6gre_tunnel_unlink(struct ip6gre_net *ign, struct ip6_tnl *t)
311 for (tp = ip6gre_bucket(ign, t);
331 struct ip6gre_net *ign = net_generic(net, ip6gre_net_id);
333 for (tp = __ip6gre_bucket(ign, parms);
352 struct ip6gre_net *ign = net_generic(net, ip6gre_net_id);
385 ip6gre_tunnel_link(ign, nt);
396 struct ip6gre_net *ign = net_generic(t->net, ip6gre_net_id);
398 ip6erspan_tunnel_unlink_md(ign, t);
399 ip6gre_tunnel_unlink(ign, t);
407 struct ip6gre_net *ign = net_generic(t->net, ip6gre_net_id);
409 ip6gre_tunnel_unlink_md(ign, t);
410 ip6gre_tunnel_unlink(ign, t);
411 if (ign->fb_tunnel_dev == dev)
412 WRITE_ONCE(ign->fb_tunnel_dev, NULL);
1276 struct ip6gre_net *ign = net_generic(net, ip6gre_net_id);
1282 if (dev == ign->fb_tunnel_dev) {
1320 if (dev != ign->fb_tunnel_dev && cmd == SIOCCHGTUNNEL) {
1329 ip6gre_tunnel_unlink(ign, t);
1332 ip6gre_tunnel_link(ign, t);
1353 if (dev == ign->fb_tunnel_dev) {
1363 if (t == netdev_priv(ign->fb_tunnel_dev))
1570 struct ip6gre_net *ign = net_generic(net, ip6gre_net_id);
1585 t = rtnl_dereference(ign->tunnels[prio][h]);
1602 struct ip6gre_net *ign = net_generic(net, ip6gre_net_id);
1614 ign->fb_tunnel_dev = ndev;
1615 dev_net_set(ign->fb_tunnel_dev, net);
1619 ign->fb_tunnel_dev->features |= NETIF_F_NETNS_LOCAL;
1622 ip6gre_fb_tunnel_init(ign->fb_tunnel_dev);
1623 ign->fb_tunnel_dev->rtnl_link_ops = &ip6gre_link_ops;
1625 err = register_netdev(ign->fb_tunnel_dev);
1629 rcu_assign_pointer(ign->tunnels_wc[0],
1630 netdev_priv(ign->fb_tunnel_dev));
2018 struct ip6gre_net *ign;
2022 ign = net_generic(net, ip6gre_net_id);
2025 if (rtnl_dereference(ign->collect_md_tun))
2035 ip6gre_tunnel_link_md(ign, nt);
2048 struct ip6gre_net *ign = net_generic(net, ip6gre_net_id);
2051 if (dev == ign->fb_tunnel_dev)
2080 struct ip6gre_net *ign = net_generic(t->net, ip6gre_net_id);
2087 ip6gre_tunnel_unlink_md(ign, t);
2088 ip6gre_tunnel_unlink(ign, t);
2090 ip6gre_tunnel_link_md(ign, t);
2091 ip6gre_tunnel_link(ign, t);
2098 struct ip6gre_net *ign = net_generic(net, ip6gre_net_id);
2100 if (dev != ign->fb_tunnel_dev)
2251 struct ip6gre_net *ign;
2256 ign = net_generic(net, ip6gre_net_id);
2259 if (rtnl_dereference(ign->collect_md_tun_erspan))
2269 ip6erspan_tunnel_link_md(ign, nt);
2293 struct ip6gre_net *ign = net_generic(dev_net(dev), ip6gre_net_id);
2302 ip6gre_tunnel_unlink_md(ign, t);
2303 ip6gre_tunnel_unlink(ign, t);
2305 ip6erspan_tunnel_link_md(ign, t);
2306 ip6gre_tunnel_link(ign, t);