1/*
2 * Copyright (C) 2010 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 *    notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 *    notice, this list of conditions and the following disclaimer in the
11 *    documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
14 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
15 * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
17 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
20 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
21 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
23 * THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#ifndef SpecializedThunkJIT_h
27#define SpecializedThunkJIT_h
28
29#if ENABLE(JIT)
30
31#include "Executable.h"
32#include "JSInterfaceJIT.h"
33#include "LinkBuffer.h"
34
35namespace JSC {
36
37    class SpecializedThunkJIT : public JSInterfaceJIT {
38    public:
39        static const int ThisArgument = -1;
40        SpecializedThunkJIT(int expectedArgCount)
41        {
42            // Check that we have the expected number of arguments
43            m_failures.append(branch32(NotEqual, payloadFor(JSStack::ArgumentCount), TrustedImm32(expectedArgCount + 1)));
44        }
45
46        void loadDoubleArgument(int argument, FPRegisterID dst, RegisterID scratch)
47        {
48            unsigned src = CallFrame::argumentOffset(argument);
49            m_failures.append(emitLoadDouble(src, dst, scratch));
50        }
51
52        void loadCellArgument(int argument, RegisterID dst)
53        {
54            unsigned src = CallFrame::argumentOffset(argument);
55            m_failures.append(emitLoadJSCell(src, dst));
56        }
57
58        void loadJSStringArgument(VM& vm, int argument, RegisterID dst)
59        {
60            loadCellArgument(argument, dst);
61            m_failures.append(branchPtr(NotEqual, Address(dst, JSCell::structureOffset()), TrustedImmPtr(vm.stringStructure.get())));
62        }
63
64        void loadInt32Argument(int argument, RegisterID dst, Jump& failTarget)
65        {
66            unsigned src = CallFrame::argumentOffset(argument);
67            failTarget = emitLoadInt32(src, dst);
68        }
69
70        void loadInt32Argument(int argument, RegisterID dst)
71        {
72            Jump conversionFailed;
73            loadInt32Argument(argument, dst, conversionFailed);
74            m_failures.append(conversionFailed);
75        }
76
77        void appendFailure(const Jump& failure)
78        {
79            m_failures.append(failure);
80        }
81
82        void returnJSValue(RegisterID src)
83        {
84            if (src != regT0)
85                move(src, regT0);
86            loadPtr(payloadFor(JSStack::CallerFrame, callFrameRegister), callFrameRegister);
87            ret();
88        }
89
90        void returnDouble(FPRegisterID src)
91        {
92#if USE(JSVALUE64)
93            moveDoubleTo64(src, regT0);
94            Jump zero = branchTest64(Zero, regT0);
95            sub64(tagTypeNumberRegister, regT0);
96            Jump done = jump();
97            zero.link(this);
98            move(tagTypeNumberRegister, regT0);
99            done.link(this);
100#else
101            storeDouble(src, Address(stackPointerRegister, -(int)sizeof(double)));
102            loadPtr(Address(stackPointerRegister, OBJECT_OFFSETOF(JSValue, u.asBits.tag) - sizeof(double)), regT1);
103            loadPtr(Address(stackPointerRegister, OBJECT_OFFSETOF(JSValue, u.asBits.payload) - sizeof(double)), regT0);
104            Jump lowNonZero = branchTestPtr(NonZero, regT1);
105            Jump highNonZero = branchTestPtr(NonZero, regT0);
106            move(TrustedImm32(0), regT0);
107            move(TrustedImm32(Int32Tag), regT1);
108            lowNonZero.link(this);
109            highNonZero.link(this);
110#endif
111            loadPtr(payloadFor(JSStack::CallerFrame, callFrameRegister), callFrameRegister);
112            ret();
113        }
114
115        void returnInt32(RegisterID src)
116        {
117            if (src != regT0)
118                move(src, regT0);
119            tagReturnAsInt32();
120            loadPtr(payloadFor(JSStack::CallerFrame, callFrameRegister), callFrameRegister);
121            ret();
122        }
123
124        void returnJSCell(RegisterID src)
125        {
126            if (src != regT0)
127                move(src, regT0);
128            tagReturnAsJSCell();
129            loadPtr(payloadFor(JSStack::CallerFrame, callFrameRegister), callFrameRegister);
130            ret();
131        }
132
133        MacroAssemblerCodeRef finalize(VM& vm, MacroAssemblerCodePtr fallback, const char* thunkKind)
134        {
135            LinkBuffer patchBuffer(vm, this, GLOBAL_THUNK_ID);
136            patchBuffer.link(m_failures, CodeLocationLabel(fallback));
137            for (unsigned i = 0; i < m_calls.size(); i++)
138                patchBuffer.link(m_calls[i].first, m_calls[i].second);
139            return FINALIZE_CODE(patchBuffer, ("Specialized thunk for %s", thunkKind));
140        }
141
142        // Assumes that the target function uses fpRegister0 as the first argument
143        // and return value. Like any sensible architecture would.
144        void callDoubleToDouble(FunctionPtr function)
145        {
146            m_calls.append(std::make_pair(call(), function));
147        }
148
149        void callDoubleToDoublePreservingReturn(FunctionPtr function)
150        {
151            if (!isX86())
152                preserveReturnAddressAfterCall(regT3);
153            callDoubleToDouble(function);
154            if (!isX86())
155                restoreReturnAddressBeforeReturn(regT3);
156        }
157
158    private:
159
160        void tagReturnAsInt32()
161        {
162#if USE(JSVALUE64)
163            or64(tagTypeNumberRegister, regT0);
164#else
165            move(TrustedImm32(JSValue::Int32Tag), regT1);
166#endif
167        }
168
169        void tagReturnAsJSCell()
170        {
171#if USE(JSVALUE32_64)
172            move(TrustedImm32(JSValue::CellTag), regT1);
173#endif
174        }
175
176        MacroAssembler::JumpList m_failures;
177        Vector<std::pair<Call, FunctionPtr> > m_calls;
178    };
179
180}
181
182#endif // ENABLE(JIT)
183
184#endif // SpecializedThunkJIT_h
185