1/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License.  See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 2005 Thiemo Seufer
7 * Copyright (C) 2005  MIPS Technologies, Inc.	All rights reserved.
8 *	Author: Maciej W. Rozycki <macro@mips.com>
9 */
10
11
12#include <asm/addrspace.h>
13#include <asm/bug.h>
14#include <asm/cacheflush.h>
15
16#ifndef CKSEG2
17#define CKSEG2 CKSSEG
18#endif
19#ifndef TO_PHYS_MASK
20#define TO_PHYS_MASK -1
21#endif
22
23/*
24 * FUNC is executed in one of the uncached segments, depending on its
25 * original address as follows:
26 *
27 * 1. If the original address is in CKSEG0 or CKSEG1, then the uncached
28 *    segment used is CKSEG1.
29 * 2. If the original address is in XKPHYS, then the uncached segment
30 *    used is XKPHYS(2).
31 * 3. Otherwise it's a bug.
32 *
33 * The same remapping is done with the stack pointer.  Stack handling
34 * works because we don't handle stack arguments or more complex return
35 * values, so we can avoid sharing the same stack area between a cached
36 * and the uncached mode.
37 */
38unsigned long run_uncached(void *func)
39{
40	register long ret __asm__("$2");
41	long lfunc = (long)func, ufunc;
42	long usp;
43	long sp;
44
45	__asm__("move %0, $sp" : "=r" (sp));
46
47	if (sp >= (long)CKSEG0 && sp < (long)CKSEG2)
48		usp = CKSEG1ADDR(sp);
49#ifdef CONFIG_64BIT
50	else if ((long long)sp >= (long long)PHYS_TO_XKPHYS(0, 0) &&
51		 (long long)sp < (long long)PHYS_TO_XKPHYS(8, 0))
52		usp = PHYS_TO_XKPHYS(K_CALG_UNCACHED,
53				     XKPHYS_TO_PHYS((long long)sp));
54#endif
55	else {
56		BUG();
57		usp = sp;
58	}
59	if (lfunc >= (long)CKSEG0 && lfunc < (long)CKSEG2)
60		ufunc = CKSEG1ADDR(lfunc);
61#ifdef CONFIG_64BIT
62	else if ((long long)lfunc >= (long long)PHYS_TO_XKPHYS(0, 0) &&
63		 (long long)lfunc < (long long)PHYS_TO_XKPHYS(8, 0))
64		ufunc = PHYS_TO_XKPHYS(K_CALG_UNCACHED,
65				       XKPHYS_TO_PHYS((long long)lfunc));
66#endif
67	else {
68		BUG();
69		ufunc = lfunc;
70	}
71
72	__asm__ __volatile__ (
73		"	move	$16, $sp\n"
74		"	move	$sp, %1\n"
75		"	jalr	%2\n"
76		"	move	$sp, $16"
77		: "=r" (ret)
78		: "r" (usp), "r" (ufunc)
79		: "$16", "$31");
80
81	return ret;
82}
83