Lines Matching defs:val

668 int sk_set_peek_off(struct sock *sk, int val);
679 static inline void sk_peek_offset_bwd(struct sock *sk, int val)
684 off = max_t(s32, off - val, 0);
689 static inline void sk_peek_offset_fwd(struct sock *sk, int val)
691 sk_peek_offset_bwd(sk, -val);
1036 static inline void sk_wmem_queued_add(struct sock *sk, int val)
1038 WRITE_ONCE(sk->sk_wmem_queued, sk->sk_wmem_queued + val);
1041 static inline void sk_forward_alloc_add(struct sock *sk, int val)
1044 WRITE_ONCE(sk->sk_forward_alloc, sk->sk_forward_alloc + val);
1415 int val = this_cpu_xchg(*proto->per_cpu_fw_alloc, 0);
1417 if (val)
1418 atomic_long_add(val, proto->memory_allocated);
1422 sk_memory_allocated_add(const struct sock *sk, int val)
1426 val = this_cpu_add_return(*proto->per_cpu_fw_alloc, val);
1428 if (unlikely(val >= READ_ONCE(sysctl_mem_pcpu_rsv)))
1433 sk_memory_allocated_sub(const struct sock *sk, int val)
1437 val = this_cpu_sub_return(*proto->per_cpu_fw_alloc, val);
1439 if (unlikely(val <= -READ_ONCE(sysctl_mem_pcpu_rsv)))
1482 int val[PROTO_INUSE_NR];
1486 const struct proto *prot, int val)
1488 this_cpu_add(net->core.prot_inuse->val[prot->inuse_idx], val);
1491 static inline void sock_inuse_add(const struct net *net, int val)
1493 this_cpu_add(net->core.prot_inuse->all, val);
1500 const struct proto *prot, int val)
1504 static inline void sock_inuse_add(const struct net *net, int val)
1992 int val = READ_ONCE(sk->sk_tx_queue_mapping);
1994 if (val != NO_QUEUE_MAPPING)
1995 return val;
2530 u32 val;
2535 val = min(sk->sk_sndbuf, sk->sk_wmem_queued >> 1);
2536 val = max_t(u32, val, sk_unused_reserved_mem(sk));
2538 WRITE_ONCE(sk->sk_sndbuf, max_t(u32, val, SOCK_MIN_SNDBUF));
2908 static inline void sk_pacing_shift_update(struct sock *sk, int val)
2910 if (!sk || !sk_fullsock(sk) || READ_ONCE(sk->sk_pacing_shift) == val)
2912 WRITE_ONCE(sk->sk_pacing_shift, val);
2946 void sock_set_rcvbuf(struct sock *sk, int val);
2947 void sock_set_mark(struct sock *sk, u32 val);