1/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License.  See the file "COPYING" in the main directory of this archive for
4 * more details.
5 *
6 * Copyright (C) 2009 DSLab, Lanzhou University, China
7 * Author: Wu Zhangjin <wuzhangjin@gmail.com>
8 */
9
10#ifndef _ASM_MIPS_FTRACE_H
11#define _ASM_MIPS_FTRACE_H
12
13#ifdef CONFIG_FUNCTION_TRACER
14
15#define MCOUNT_ADDR ((unsigned long)(_mcount))
16#define MCOUNT_INSN_SIZE 4		/* sizeof mcount call */
17
18#ifndef __ASSEMBLY__
19extern void _mcount(void);
20#define mcount _mcount
21
22#define safe_load(load, src, dst, error)		\
23do {							\
24	asm volatile (					\
25		"1: " load " %[tmp_dst], 0(%[tmp_src])\n"	\
26		"   li %[tmp_err], 0\n"			\
27		"2: .insn\n"				\
28							\
29		".section .fixup, \"ax\"\n"		\
30		"3: li %[tmp_err], 1\n"			\
31		"   j 2b\n"				\
32		".previous\n"				\
33							\
34		".section\t__ex_table,\"a\"\n\t"	\
35		STR(PTR_WD) "\t1b, 3b\n\t"		\
36		".previous\n"				\
37							\
38		: [tmp_dst] "=&r" (dst), [tmp_err] "=r" (error)\
39		: [tmp_src] "r" (src)			\
40		: "memory"				\
41	);						\
42} while (0)
43
44#define safe_store(store, src, dst, error)	\
45do {						\
46	asm volatile (				\
47		"1: " store " %[tmp_src], 0(%[tmp_dst])\n"\
48		"   li %[tmp_err], 0\n"		\
49		"2: .insn\n"			\
50						\
51		".section .fixup, \"ax\"\n"	\
52		"3: li %[tmp_err], 1\n"		\
53		"   j 2b\n"			\
54		".previous\n"			\
55						\
56		".section\t__ex_table,\"a\"\n\t"\
57		STR(PTR_WD) "\t1b, 3b\n\t"	\
58		".previous\n"			\
59						\
60		: [tmp_err] "=r" (error)	\
61		: [tmp_dst] "r" (dst), [tmp_src] "r" (src)\
62		: "memory"			\
63	);					\
64} while (0)
65
66#define safe_load_code(dst, src, error) \
67	safe_load(STR(lw), src, dst, error)
68#define safe_store_code(src, dst, error) \
69	safe_store(STR(sw), src, dst, error)
70
71#define safe_load_stack(dst, src, error) \
72	safe_load(STR(PTR_L), src, dst, error)
73
74#define safe_store_stack(src, dst, error) \
75	safe_store(STR(PTR_S), src, dst, error)
76
77
78#ifdef CONFIG_DYNAMIC_FTRACE
79static inline unsigned long ftrace_call_adjust(unsigned long addr)
80{
81	return addr;
82}
83
84struct dyn_arch_ftrace {
85};
86
87#endif /*  CONFIG_DYNAMIC_FTRACE */
88
89void prepare_ftrace_return(unsigned long *parent_ra_addr, unsigned long self_ra,
90			   unsigned long fp);
91
92#endif /* __ASSEMBLY__ */
93#endif /* CONFIG_FUNCTION_TRACER */
94#endif /* _ASM_MIPS_FTRACE_H */
95