Lines Matching refs:softnet_data

227 static inline void backlog_lock_irq_save(struct softnet_data *sd,
236 static inline void backlog_lock_irq_disable(struct softnet_data *sd)
244 static inline void backlog_unlock_irq_restore(struct softnet_data *sd,
253 static inline void backlog_unlock_irq_enable(struct softnet_data *sd)
452 DEFINE_PER_CPU_ALIGNED(struct softnet_data, softnet_data);
453 EXPORT_PER_CPU_SYMBOL(softnet_data);
3119 struct softnet_data *sd;
3123 sd = this_cpu_ptr(&softnet_data);
3187 skb->next = __this_cpu_read(softnet_data.completion_queue);
3188 __this_cpu_write(softnet_data.completion_queue, skb);
3945 return __this_cpu_read(softnet_data.xmit.skip_txqueue);
3950 __this_cpu_write(softnet_data.xmit.skip_txqueue, skip);
4472 static inline void ____napi_schedule(struct softnet_data *sd,
4551 head = READ_ONCE(per_cpu(softnet_data, next_cpu).input_queue_head);
4634 ((int)(READ_ONCE(per_cpu(softnet_data, tcpu).input_queue_head) -
4689 ((int)(READ_ONCE(per_cpu(softnet_data, cpu).input_queue_head) -
4704 struct softnet_data *sd = data;
4715 struct softnet_data *sd = data;
4731 static void napi_schedule_rps(struct softnet_data *sd)
4733 struct softnet_data *mysd = this_cpu_ptr(&softnet_data);
4756 void kick_defer_list_purge(struct softnet_data *sd, unsigned int cpu)
4781 struct softnet_data *sd;
4787 sd = this_cpu_ptr(&softnet_data);
4821 struct softnet_data *sd;
4832 sd = &per_cpu(softnet_data, cpu);
5216 struct softnet_data *sd = this_cpu_ptr(&softnet_data);
5438 __this_cpu_inc(softnet_data.processed);
5922 struct softnet_data *sd;
5925 sd = this_cpu_ptr(&softnet_data);
5950 struct softnet_data *sd = &per_cpu(softnet_data, cpu);
6004 static void net_rps_send_ipi(struct softnet_data *remsd)
6008 struct softnet_data *next = remsd->rps_ipi_next;
6021 static void net_rps_action_and_irq_enable(struct softnet_data *sd)
6024 struct softnet_data *remsd = sd->rps_ipi_list;
6038 static bool sd_has_rps_ipi_waiting(struct softnet_data *sd)
6049 struct softnet_data *sd = container_of(napi, struct softnet_data, backlog);
6112 ____napi_schedule(this_cpu_ptr(&softnet_data), n);
6162 ____napi_schedule(this_cpu_ptr(&softnet_data), n);
6253 static void skb_defer_free_flush(struct softnet_data *sd)
6398 skb_defer_free_flush(this_cpu_ptr(&softnet_data));
6826 struct softnet_data *sd;
6834 sd = this_cpu_ptr(&softnet_data);
6871 struct softnet_data *sd = this_cpu_ptr(&softnet_data);
11466 struct softnet_data *sd, *oldsd, *remsd = NULL;
11470 sd = &per_cpu(softnet_data, cpu);
11471 oldsd = &per_cpu(softnet_data, oldcpu);
11855 struct softnet_data *sd = per_cpu_ptr(&softnet_data, cpu);
11863 struct softnet_data *sd = per_cpu_ptr(&softnet_data, cpu);
11870 struct softnet_data *sd = per_cpu_ptr(&softnet_data, cpu);
11915 struct softnet_data *sd = &per_cpu(softnet_data, i);