Searched refs:__always_inline (Results 51 - 75 of 730) sorted by relevance

1234567891011>>

/linux-master/arch/powerpc/include/asm/book3s/32/
H A Dkup.h18 static __always_inline void kuap_lock_one(unsigned long addr)
24 static __always_inline void kuap_unlock_one(unsigned long addr)
30 static __always_inline void uaccess_begin_32s(unsigned long addr)
44 static __always_inline void uaccess_end_32s(unsigned long addr)
58 static __always_inline void __kuap_save_and_lock(struct pt_regs *regs)
71 static __always_inline void kuap_user_restore(struct pt_regs *regs)
75 static __always_inline void __kuap_kernel_restore(struct pt_regs *regs, unsigned long kuap)
90 static __always_inline unsigned long __kuap_get_and_assert_locked(void)
100 static __always_inline void allow_user_access(void __user *to, const void __user *from,
112 static __always_inline voi
[all...]
/linux-master/tools/include/asm-generic/bitops/
H A Dnon-atomic.h16 static __always_inline void
25 static __always_inline void
43 static __always_inline void
61 static __always_inline bool
81 static __always_inline bool
93 static __always_inline bool
109 static __always_inline bool
/linux-master/include/asm-generic/
H A Dqspinlock.h51 static __always_inline int queued_spin_is_locked(struct qspinlock *lock)
71 static __always_inline int queued_spin_value_unlocked(struct qspinlock lock)
81 static __always_inline int queued_spin_is_contended(struct qspinlock *lock)
90 static __always_inline int queued_spin_trylock(struct qspinlock *lock)
107 static __always_inline void queued_spin_lock(struct qspinlock *lock)
123 static __always_inline void queued_spin_unlock(struct qspinlock *lock)
133 static __always_inline bool virt_spin_lock(struct qspinlock *lock)
/linux-master/include/asm-generic/bitops/
H A Dgeneric-non-atomic.h27 static __always_inline void
36 static __always_inline void
54 static __always_inline void
72 static __always_inline bool
92 static __always_inline bool
104 static __always_inline bool
120 static __always_inline bool
136 static __always_inline bool
165 static __always_inline bool
/linux-master/include/linux/
H A Dinstrumented.h24 static __always_inline void instrument_read(const volatile void *v, size_t size)
38 static __always_inline void instrument_write(const volatile void *v, size_t size)
52 static __always_inline void instrument_read_write(const volatile void *v, size_t size)
66 static __always_inline void instrument_atomic_read(const volatile void *v, size_t size)
80 static __always_inline void instrument_atomic_write(const volatile void *v, size_t size)
94 static __always_inline void instrument_atomic_read_write(const volatile void *v, size_t size)
109 static __always_inline void
126 static __always_inline void
143 static __always_inline void
H A Dkasan-enabled.h11 static __always_inline bool kasan_enabled(void)
/linux-master/arch/x86/include/asm/
H A Dpreempt.h24 static __always_inline int preempt_count(void)
29 static __always_inline void preempt_count_set(int pc)
58 static __always_inline void set_preempt_need_resched(void)
63 static __always_inline void clear_preempt_need_resched(void)
68 static __always_inline bool test_preempt_need_resched(void)
77 static __always_inline void __preempt_count_add(int val)
82 static __always_inline void __preempt_count_sub(int val)
92 static __always_inline bool __preempt_count_dec_and_test(void)
101 static __always_inline bool should_resched(int preempt_offset)
H A Djump_label.h25 static __always_inline bool arch_static_branch(struct static_key *key, bool branch)
39 static __always_inline bool arch_static_branch(struct static_key * const key, const bool branch)
53 static __always_inline bool arch_static_branch_jump(struct static_key * const key, const bool branch)
H A Dbitops.h9 * __always_inline to avoid problems with older gcc's inlining heuristics.
51 static __always_inline void
65 static __always_inline void
71 static __always_inline void
84 static __always_inline void
91 static __always_inline void
97 static __always_inline bool arch_xor_unlock_is_negative_byte(unsigned long mask,
109 static __always_inline void
115 static __always_inline void
121 static __always_inline voi
[all...]
H A Dfsgsbase.h24 static __always_inline unsigned long rdfsbase(void)
33 static __always_inline unsigned long rdgsbase(void)
42 static __always_inline void wrfsbase(unsigned long fsbase)
47 static __always_inline void wrgsbase(unsigned long gsbase)
H A Dkvmclock.h9 static __always_inline struct pvclock_vcpu_time_info *this_cpu_pvti(void)
/linux-master/arch/powerpc/include/asm/
H A Dcpu_has_feature.h10 static __always_inline bool early_cpu_has_feature(unsigned long feature)
23 static __always_inline bool cpu_has_feature(unsigned long feature)
49 static __always_inline bool cpu_has_feature(unsigned long feature)
H A Dstackprotector.h20 static __always_inline void boot_init_stack_canary(void)
/linux-master/arch/powerpc/include/asm/vdso/
H A Dgettimeofday.h16 static __always_inline int do_syscall_2(const unsigned long _r0, const unsigned long _r3,
36 static __always_inline
44 static __always_inline
50 static __always_inline
60 static __always_inline
66 static __always_inline
72 static __always_inline
78 static __always_inline
85 static __always_inline u64 __arch_get_hw_counter(s32 clock_mode,
94 static __always_inline
[all...]
H A Dvsyscall.h13 static __always_inline
/linux-master/arch/s390/include/asm/
H A Dfpu-insn.h39 static __always_inline void fpu_cefbr(u8 f1, s32 val)
47 static __always_inline unsigned long fpu_cgebr(u8 f2, u8 mode)
58 static __always_inline void fpu_debr(u8 f1, u8 f2)
66 static __always_inline void fpu_ld(unsigned short fpr, freg_t *reg)
75 static __always_inline void fpu_ldgr(u8 f1, u32 val)
83 static __always_inline void fpu_lfpc(unsigned int *fpc)
120 static __always_inline void fpu_std(unsigned short fpr, freg_t *reg)
129 static __always_inline void fpu_sfpc(unsigned int fpc)
137 static __always_inline void fpu_stfpc(unsigned int *fpc)
146 static __always_inline voi
[all...]
H A Datomic.h18 static __always_inline int arch_atomic_read(const atomic_t *v)
24 static __always_inline void arch_atomic_set(atomic_t *v, int i)
30 static __always_inline int arch_atomic_add_return(int i, atomic_t *v)
36 static __always_inline int arch_atomic_fetch_add(int i, atomic_t *v)
42 static __always_inline void arch_atomic_add(int i, atomic_t *v)
53 static __always_inline void arch_atomic_##op(int i, atomic_t *v) \
57 static __always_inline int arch_atomic_fetch_##op(int i, atomic_t *v) \
77 static __always_inline int arch_atomic_cmpxchg(atomic_t *v, int old, int new)
85 static __always_inline s64 arch_atomic64_read(const atomic64_t *v)
91 static __always_inline voi
[all...]
/linux-master/arch/arm64/include/asm/
H A Dcurrent.h15 static __always_inline struct task_struct *get_current(void)
H A Dvirt.h114 static __always_inline bool is_kernel_in_hyp_mode(void)
121 static __always_inline bool has_vhe(void)
138 static __always_inline bool is_protected_kvm_enabled(void)
146 static __always_inline bool has_hvhe(void)
/linux-master/arch/csky/include/asm/vdso/
H A Dvsyscall.h12 static __always_inline struct vdso_data *__csky_get_k_vdso_data(void)
/linux-master/arch/arm/include/asm/
H A Dstackprotector.h28 static __always_inline void boot_init_stack_canary(void)
/linux-master/arch/riscv/include/asm/
H A Dstackprotector.h14 static __always_inline void boot_init_stack_canary(void)
/linux-master/include/vdso/
H A Dhelpers.h9 static __always_inline u32 vdso_read_begin(const struct vdso_data *vd)
20 static __always_inline u32 vdso_read_retry(const struct vdso_data *vd,
30 static __always_inline void vdso_write_begin(struct vdso_data *vd)
42 static __always_inline void vdso_write_end(struct vdso_data *vd)
/linux-master/arch/arm/include/asm/vdso/
H A Dvsyscall.h17 static __always_inline
24 static __always_inline
/linux-master/arch/arm64/include/asm/vdso/
H A Dvsyscall.h17 static __always_inline
24 static __always_inline

Completed in 252 milliseconds

1234567891011>>