1/* 2 * Copyright (C) 2008, 2013 Apple Inc. All rights reserved. 3 * 4 * Redistribution and use in source and binary forms, with or without 5 * modification, are permitted provided that the following conditions 6 * are met: 7 * 1. Redistributions of source code must retain the above copyright 8 * notice, this list of conditions and the following disclaimer. 9 * 2. Redistributions in binary form must reproduce the above copyright 10 * notice, this list of conditions and the following disclaimer in the 11 * documentation and/or other materials provided with the distribution. 12 * 13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY 14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR 17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY 21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 24 */ 25 26#include "config.h" 27 28#if ENABLE(JIT) 29#if USE(JSVALUE32_64) 30#include "JIT.h" 31 32#include "Arguments.h" 33#include "CodeBlock.h" 34#include "Interpreter.h" 35#include "JITInlines.h" 36#include "JITStubCall.h" 37#include "JSArray.h" 38#include "JSFunction.h" 39#include "Operations.h" 40#include "RepatchBuffer.h" 41#include "ResultType.h" 42#include "SamplingTool.h" 43#include <wtf/StringPrintStream.h> 44 45#ifndef NDEBUG 46#include <stdio.h> 47#endif 48 49using namespace std; 50 51namespace JSC { 52 53void JIT::emit_op_call_put_result(Instruction* instruction) 54{ 55 int dst = instruction[1].u.operand; 56 emitValueProfilingSite(); 57 emitStore(dst, regT1, regT0); 58} 59 60void JIT::emit_op_ret(Instruction* currentInstruction) 61{ 62 unsigned dst = currentInstruction[1].u.operand; 63 64 emitLoad(dst, regT1, regT0); 65 emitGetFromCallFrameHeaderPtr(JSStack::ReturnPC, regT2); 66 emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, callFrameRegister); 67 68 restoreReturnAddressBeforeReturn(regT2); 69 ret(); 70} 71 72void JIT::emit_op_ret_object_or_this(Instruction* currentInstruction) 73{ 74 unsigned result = currentInstruction[1].u.operand; 75 unsigned thisReg = currentInstruction[2].u.operand; 76 77 emitLoad(result, regT1, regT0); 78 Jump notJSCell = branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)); 79 loadPtr(Address(regT0, JSCell::structureOffset()), regT2); 80 Jump notObject = emitJumpIfNotObject(regT2); 81 82 emitGetFromCallFrameHeaderPtr(JSStack::ReturnPC, regT2); 83 emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, callFrameRegister); 84 85 restoreReturnAddressBeforeReturn(regT2); 86 ret(); 87 88 notJSCell.link(this); 89 notObject.link(this); 90 emitLoad(thisReg, regT1, regT0); 91 92 emitGetFromCallFrameHeaderPtr(JSStack::ReturnPC, regT2); 93 emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, callFrameRegister); 94 95 restoreReturnAddressBeforeReturn(regT2); 96 ret(); 97} 98 99void JIT::emitSlow_op_call(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) 100{ 101 compileOpCallSlowCase(op_call, currentInstruction, iter, m_callLinkInfoIndex++); 102} 103 104void JIT::emitSlow_op_call_eval(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) 105{ 106 compileOpCallSlowCase(op_call_eval, currentInstruction, iter, m_callLinkInfoIndex); 107} 108 109void JIT::emitSlow_op_call_varargs(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) 110{ 111 compileOpCallSlowCase(op_call_varargs, currentInstruction, iter, m_callLinkInfoIndex++); 112} 113 114void JIT::emitSlow_op_construct(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter) 115{ 116 compileOpCallSlowCase(op_construct, currentInstruction, iter, m_callLinkInfoIndex++); 117} 118 119void JIT::emit_op_call(Instruction* currentInstruction) 120{ 121 compileOpCall(op_call, currentInstruction, m_callLinkInfoIndex++); 122} 123 124void JIT::emit_op_call_eval(Instruction* currentInstruction) 125{ 126 compileOpCall(op_call_eval, currentInstruction, m_callLinkInfoIndex); 127} 128 129void JIT::emit_op_call_varargs(Instruction* currentInstruction) 130{ 131 compileOpCall(op_call_varargs, currentInstruction, m_callLinkInfoIndex++); 132} 133 134void JIT::emit_op_construct(Instruction* currentInstruction) 135{ 136 compileOpCall(op_construct, currentInstruction, m_callLinkInfoIndex++); 137} 138 139void JIT::compileLoadVarargs(Instruction* instruction) 140{ 141 int thisValue = instruction[2].u.operand; 142 int arguments = instruction[3].u.operand; 143 int firstFreeRegister = instruction[4].u.operand; 144 145 JumpList slowCase; 146 JumpList end; 147 bool canOptimize = m_codeBlock->usesArguments() 148 && arguments == m_codeBlock->argumentsRegister() 149 && !m_codeBlock->symbolTable()->slowArguments(); 150 151 if (canOptimize) { 152 emitLoadTag(arguments, regT1); 153 slowCase.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::EmptyValueTag))); 154 155 load32(payloadFor(JSStack::ArgumentCount), regT2); 156 slowCase.append(branch32(Above, regT2, TrustedImm32(Arguments::MaxArguments + 1))); 157 // regT2: argumentCountIncludingThis 158 159 move(regT2, regT3); 160 add32(TrustedImm32(firstFreeRegister + JSStack::CallFrameHeaderSize), regT3); 161 lshift32(TrustedImm32(3), regT3); 162 addPtr(callFrameRegister, regT3); 163 // regT3: newCallFrame 164 165 slowCase.append(branchPtr(Below, AbsoluteAddress(m_vm->interpreter->stack().addressOfEnd()), regT3)); 166 167 // Initialize ArgumentCount. 168 store32(regT2, payloadFor(JSStack::ArgumentCount, regT3)); 169 170 // Initialize 'this'. 171 emitLoad(thisValue, regT1, regT0); 172 store32(regT0, Address(regT3, OBJECT_OFFSETOF(JSValue, u.asBits.payload) + (CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))))); 173 store32(regT1, Address(regT3, OBJECT_OFFSETOF(JSValue, u.asBits.tag) + (CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))))); 174 175 // Copy arguments. 176 neg32(regT2); 177 end.append(branchAdd32(Zero, TrustedImm32(1), regT2)); 178 // regT2: -argumentCount; 179 180 Label copyLoop = label(); 181 load32(BaseIndex(callFrameRegister, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload) +(CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register)))), regT0); 182 load32(BaseIndex(callFrameRegister, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag) +(CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register)))), regT1); 183 store32(regT0, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload) +(CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))))); 184 store32(regT1, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag) +(CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))))); 185 branchAdd32(NonZero, TrustedImm32(1), regT2).linkTo(copyLoop, this); 186 187 end.append(jump()); 188 } 189 190 if (canOptimize) 191 slowCase.link(this); 192 193 JITStubCall stubCall(this, cti_op_load_varargs); 194 stubCall.addArgument(thisValue); 195 stubCall.addArgument(arguments); 196 stubCall.addArgument(Imm32(firstFreeRegister)); 197 stubCall.call(regT3); 198 199 if (canOptimize) 200 end.link(this); 201} 202 203void JIT::compileCallEval() 204{ 205 JITStubCall stubCall(this, cti_op_call_eval); // Initializes ScopeChain; ReturnPC; CodeBlock. 206 stubCall.call(); 207 addSlowCase(branch32(Equal, regT1, TrustedImm32(JSValue::EmptyValueTag))); 208 emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, callFrameRegister); 209 210 sampleCodeBlock(m_codeBlock); 211} 212 213void JIT::compileCallEvalSlowCase(Vector<SlowCaseEntry>::iterator& iter) 214{ 215 linkSlowCase(iter); 216 217 emitLoad(JSStack::Callee, regT1, regT0); 218 emitNakedCall(m_vm->getCTIStub(virtualCallGenerator).code()); 219 220 sampleCodeBlock(m_codeBlock); 221} 222 223void JIT::compileOpCall(OpcodeID opcodeID, Instruction* instruction, unsigned callLinkInfoIndex) 224{ 225 int callee = instruction[1].u.operand; 226 227 /* Caller always: 228 - Updates callFrameRegister to callee callFrame. 229 - Initializes ArgumentCount; CallerFrame; Callee. 230 231 For a JS call: 232 - Caller initializes ScopeChain. 233 - Callee initializes ReturnPC; CodeBlock. 234 - Callee restores callFrameRegister before return. 235 236 For a non-JS call: 237 - Caller initializes ScopeChain; ReturnPC; CodeBlock. 238 - Caller restores callFrameRegister after return. 239 */ 240 241 if (opcodeID == op_call_varargs) 242 compileLoadVarargs(instruction); 243 else { 244 int argCount = instruction[2].u.operand; 245 int registerOffset = instruction[3].u.operand; 246 247 if (opcodeID == op_call && shouldEmitProfiling()) { 248 emitLoad(registerOffset + CallFrame::argumentOffsetIncludingThis(0), regT0, regT1); 249 Jump done = branch32(NotEqual, regT0, TrustedImm32(JSValue::CellTag)); 250 loadPtr(Address(regT1, JSCell::structureOffset()), regT1); 251 storePtr(regT1, instruction[5].u.arrayProfile->addressOfLastSeenStructure()); 252 done.link(this); 253 } 254 255 addPtr(TrustedImm32(registerOffset * sizeof(Register)), callFrameRegister, regT3); 256 257 store32(TrustedImm32(argCount), payloadFor(JSStack::ArgumentCount, regT3)); 258 } // regT3 holds newCallFrame with ArgumentCount initialized. 259 260 storePtr(TrustedImmPtr(instruction), tagFor(JSStack::ArgumentCount, callFrameRegister)); 261 emitLoad(callee, regT1, regT0); // regT1, regT0 holds callee. 262 263 storePtr(callFrameRegister, Address(regT3, JSStack::CallerFrame * static_cast<int>(sizeof(Register)))); 264 emitStore(JSStack::Callee, regT1, regT0, regT3); 265 move(regT3, callFrameRegister); 266 267 if (opcodeID == op_call_eval) { 268 compileCallEval(); 269 return; 270 } 271 272 addSlowCase(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag))); 273 274 DataLabelPtr addressOfLinkedFunctionCheck; 275 BEGIN_UNINTERRUPTED_SEQUENCE(sequenceOpCall); 276 Jump slowCase = branchPtrWithPatch(NotEqual, regT0, addressOfLinkedFunctionCheck, TrustedImmPtr(0)); 277 END_UNINTERRUPTED_SEQUENCE(sequenceOpCall); 278 279 addSlowCase(slowCase); 280 281 ASSERT(m_callStructureStubCompilationInfo.size() == callLinkInfoIndex); 282 m_callStructureStubCompilationInfo.append(StructureStubCompilationInfo()); 283 m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathBegin = addressOfLinkedFunctionCheck; 284 m_callStructureStubCompilationInfo[callLinkInfoIndex].callType = CallLinkInfo::callTypeFor(opcodeID); 285 m_callStructureStubCompilationInfo[callLinkInfoIndex].bytecodeIndex = m_bytecodeOffset; 286 287 loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_scope)), regT1); 288 emitPutCellToCallFrameHeader(regT1, JSStack::ScopeChain); 289 m_callStructureStubCompilationInfo[callLinkInfoIndex].hotPathOther = emitNakedCall(); 290 291 sampleCodeBlock(m_codeBlock); 292} 293 294void JIT::compileOpCallSlowCase(OpcodeID opcodeID, Instruction*, Vector<SlowCaseEntry>::iterator& iter, unsigned callLinkInfoIndex) 295{ 296 if (opcodeID == op_call_eval) { 297 compileCallEvalSlowCase(iter); 298 return; 299 } 300 301 linkSlowCase(iter); 302 linkSlowCase(iter); 303 304 m_callStructureStubCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(opcodeID == op_construct ? m_vm->getCTIStub(linkConstructGenerator).code() : m_vm->getCTIStub(linkCallGenerator).code()); 305 306 sampleCodeBlock(m_codeBlock); 307} 308 309void JIT::privateCompileClosureCall(CallLinkInfo* callLinkInfo, CodeBlock* calleeCodeBlock, Structure* expectedStructure, ExecutableBase* expectedExecutable, MacroAssemblerCodePtr codePtr) 310{ 311 JumpList slowCases; 312 313 slowCases.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag))); 314 slowCases.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(expectedStructure))); 315 slowCases.append(branchPtr(NotEqual, Address(regT0, JSFunction::offsetOfExecutable()), TrustedImmPtr(expectedExecutable))); 316 317 loadPtr(Address(regT0, JSFunction::offsetOfScopeChain()), regT1); 318 emitPutCellToCallFrameHeader(regT1, JSStack::ScopeChain); 319 320 Call call = nearCall(); 321 Jump done = jump(); 322 323 slowCases.link(this); 324 move(TrustedImmPtr(callLinkInfo->callReturnLocation.executableAddress()), regT2); 325 restoreReturnAddressBeforeReturn(regT2); 326 Jump slow = jump(); 327 328 LinkBuffer patchBuffer(*m_vm, this, m_codeBlock); 329 330 patchBuffer.link(call, FunctionPtr(codePtr.executableAddress())); 331 patchBuffer.link(done, callLinkInfo->hotPathOther.labelAtOffset(0)); 332 patchBuffer.link(slow, CodeLocationLabel(m_vm->getCTIStub(virtualCallGenerator).code())); 333 334 RefPtr<ClosureCallStubRoutine> stubRoutine = adoptRef(new ClosureCallStubRoutine( 335 FINALIZE_CODE( 336 patchBuffer, 337 ("Baseline closure call stub for %s, return point %p, target %p (%s)", 338 toCString(*m_codeBlock).data(), 339 callLinkInfo->hotPathOther.labelAtOffset(0).executableAddress(), 340 codePtr.executableAddress(), 341 toCString(pointerDump(calleeCodeBlock)).data())), 342 *m_vm, m_codeBlock->ownerExecutable(), expectedStructure, expectedExecutable, 343 callLinkInfo->codeOrigin)); 344 345 RepatchBuffer repatchBuffer(m_codeBlock); 346 347 repatchBuffer.replaceWithJump( 348 RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo->hotPathBegin), 349 CodeLocationLabel(stubRoutine->code().code())); 350 repatchBuffer.relink(callLinkInfo->callReturnLocation, m_vm->getCTIStub(virtualCallGenerator).code()); 351 352 callLinkInfo->stub = stubRoutine.release(); 353} 354 355} // namespace JSC 356 357#endif // USE(JSVALUE32_64) 358#endif // ENABLE(JIT) 359