1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef _KSTACK_H
3#define _KSTACK_H
4
5#include <linux/thread_info.h>
6#include <linux/sched.h>
7#include <asm/ptrace.h>
8#include <asm/irq.h>
9
10/* SP must be STACK_BIAS adjusted already.  */
11static inline bool kstack_valid(struct thread_info *tp, unsigned long sp)
12{
13	unsigned long base = (unsigned long) tp;
14
15	/* Stack pointer must be 16-byte aligned.  */
16	if (sp & (16UL - 1))
17		return false;
18
19	if (sp >= (base + sizeof(struct thread_info)) &&
20	    sp <= (base + THREAD_SIZE - sizeof(struct sparc_stackf)))
21		return true;
22
23	if (hardirq_stack[tp->cpu]) {
24		base = (unsigned long) hardirq_stack[tp->cpu];
25		if (sp >= base &&
26		    sp <= (base + THREAD_SIZE - sizeof(struct sparc_stackf)))
27			return true;
28		base = (unsigned long) softirq_stack[tp->cpu];
29		if (sp >= base &&
30		    sp <= (base + THREAD_SIZE - sizeof(struct sparc_stackf)))
31			return true;
32	}
33	return false;
34}
35
36/* Does "regs" point to a valid pt_regs trap frame?  */
37static inline bool kstack_is_trap_frame(struct thread_info *tp, struct pt_regs *regs)
38{
39	unsigned long base = (unsigned long) tp;
40	unsigned long addr = (unsigned long) regs;
41
42	if (addr >= base &&
43	    addr <= (base + THREAD_SIZE - sizeof(*regs)))
44		goto check_magic;
45
46	if (hardirq_stack[tp->cpu]) {
47		base = (unsigned long) hardirq_stack[tp->cpu];
48		if (addr >= base &&
49		    addr <= (base + THREAD_SIZE - sizeof(*regs)))
50			goto check_magic;
51		base = (unsigned long) softirq_stack[tp->cpu];
52		if (addr >= base &&
53		    addr <= (base + THREAD_SIZE - sizeof(*regs)))
54			goto check_magic;
55	}
56	return false;
57
58check_magic:
59	if ((regs->magic & ~0x1ff) == PT_REGS_MAGIC)
60		return true;
61	return false;
62
63}
64
65static inline __attribute__((always_inline)) void *set_hardirq_stack(void)
66{
67	void *orig_sp, *sp = hardirq_stack[smp_processor_id()];
68
69	__asm__ __volatile__("mov %%sp, %0" : "=r" (orig_sp));
70	if (orig_sp < sp ||
71	    orig_sp > (sp + THREAD_SIZE)) {
72		sp += THREAD_SIZE - 192 - STACK_BIAS;
73		__asm__ __volatile__("mov %0, %%sp" : : "r" (sp));
74	}
75
76	return orig_sp;
77}
78
79static inline __attribute__((always_inline)) void restore_hardirq_stack(void *orig_sp)
80{
81	__asm__ __volatile__("mov %0, %%sp" : : "r" (orig_sp));
82}
83
84#endif /* _KSTACK_H */
85