Searched refs:__always_inline (Results 326 - 350 of 734) sorted by relevance

<<11121314151617181920>>

/linux-master/include/linux/
H A Dhash.h74 static __always_inline u32 hash_64_generic(u64 val, unsigned int bits)
H A Dhardirq.h22 static __always_inline void rcu_irq_enter_check_tick(void)
H A Drcuref.h79 static __always_inline __must_check bool __rcuref_put(rcuref_t *ref)
H A Drwsem.h165 static __always_inline int rwsem_is_locked(const struct rw_semaphore *sem)
170 static __always_inline void rwsem_assert_held_nolockdep(const struct rw_semaphore *sem)
175 static __always_inline void rwsem_assert_held_write_nolockdep(const struct rw_semaphore *sem)
180 static __always_inline int rwsem_is_contended(struct rw_semaphore *sem)
/linux-master/lib/
H A Dstrnlen_user.c23 static __always_inline long do_strnlen_user(const char __user *src, unsigned long count, unsigned long max)
/linux-master/tools/testing/selftests/bpf/progs/
H A Dtailcall_bpf2bpf4.c22 static __always_inline int subprog_noise(void)
H A Dtest_attach_probe.c82 static __always_inline bool verify_sleepable_user_copy(void)
H A Duprobe_multi.c29 static __always_inline bool verify_sleepable_user_copy(void)
H A Dtest_bpf_ma.c60 static __always_inline void batch_alloc(struct bpf_map *map, unsigned int batch, unsigned int idx)
87 static __always_inline void batch_free(struct bpf_map *map, unsigned int batch, unsigned int idx)
109 static __always_inline void batch_percpu_alloc(struct bpf_map *map, unsigned int batch,
137 static __always_inline void batch_percpu_free(struct bpf_map *map, unsigned int batch,
H A Dtest_seg6_loop.c55 static __always_inline struct ip6_srh_t *get_srh(struct __sk_buff *skb)
89 static __always_inline int update_tlv_pad(struct __sk_buff *skb,
119 static __always_inline int is_valid_tlv_boundary(struct __sk_buff *skb,
181 static __always_inline int add_tlv(struct __sk_buff *skb,
H A Dtest_sysctl_loop1.c24 static __always_inline int is_tcp_mem(struct bpf_sysctl *ctx)
H A Dtest_sysctl_prog.c25 static __always_inline int is_tcp_mem(struct bpf_sysctl *ctx)
/linux-master/kernel/sched/
H A Dclock.c96 static __always_inline struct sched_clock_data *this_scd(void)
247 static __always_inline u64 wrap_min(u64 x, u64 y)
252 static __always_inline u64 wrap_max(u64 x, u64 y)
263 static __always_inline u64 sched_clock_local(struct sched_clock_data *scd)
/linux-master/scripts/atomic/
H A Dgen-atomic-long.sh55 static __always_inline ${ret}
/linux-master/tools/testing/selftests/arm64/signal/
H A Dtest_signals_utils.h60 static __always_inline bool get_current_context(struct tdescr *td,
/linux-master/include/uapi/linux/
H A Dioprio.h110 static __always_inline __u16 ioprio_value(int prioclass, int priolevel,
/linux-master/samples/bpf/
H A Dsyscall_tp_kern.c46 static __always_inline void count(void *map)
/linux-master/drivers/gpu/drm/nouveau/include/nvkm/core/
H A Devent.h36 static __always_inline int
/linux-master/arch/x86/kvm/vmx/
H A Dhyperv_evmcs.h129 static __always_inline int evmcs_field_offset(unsigned long field,
/linux-master/arch/x86/include/asm/
H A Dptrace.h209 static __always_inline int user_mode(struct pt_regs *regs)
218 static __always_inline int v8086_mode(struct pt_regs *regs)
262 static __always_inline bool ip_within_syscall_gap(struct pt_regs *regs)
312 static __always_inline bool regs_irqs_disabled(struct pt_regs *regs)
H A Dspec-ctrl.h84 static __always_inline void __update_spec_ctrl(u64 val)
/linux-master/arch/s390/lib/
H A Dcsum-partial.c19 static __always_inline __wsum csum_copy(void *dst, const void *src, int len, __wsum sum, bool copy)
/linux-master/arch/arm64/include/asm/
H A Darchrandom.h62 static __always_inline bool __cpu_has_rng(void)
/linux-master/arch/riscv/include/asm/
H A Dbitops.h48 static __always_inline unsigned long variable__ffs(unsigned long word)
77 static __always_inline unsigned long variable__fls(unsigned long word)
106 static __always_inline int variable_ffs(int x)
138 static __always_inline int variable_fls(unsigned int x)
/linux-master/arch/loongarch/include/asm/
H A Dpercpu.h40 static __always_inline unsigned long __percpu_##op(void *ptr, \
71 static __always_inline unsigned long __percpu_read(void __percpu *ptr, int size)
108 static __always_inline void __percpu_write(void __percpu *ptr, unsigned long val, int size)
140 static __always_inline unsigned long __percpu_xchg(void *ptr, unsigned long val, int size)

Completed in 367 milliseconds

<<11121314151617181920>>