1/*
2 * Copyright (C) 2011, 2012, 2013, 2014 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 *    notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 *    notice, this list of conditions and the following disclaimer in the
11 *    documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "Repatch.h"
28
29#if ENABLE(JIT)
30
31#include "AccessorCallJITStubRoutine.h"
32#include "CCallHelpers.h"
33#include "DFGOperations.h"
34#include "DFGSpeculativeJIT.h"
35#include "FTLThunks.h"
36#include "GCAwareJITStubRoutine.h"
37#include "GetterSetter.h"
38#include "JIT.h"
39#include "JITInlines.h"
40#include "LinkBuffer.h"
41#include "JSCInlines.h"
42#include "PolymorphicGetByIdList.h"
43#include "PolymorphicPutByIdList.h"
44#include "RegExpMatchesArray.h"
45#include "RepatchBuffer.h"
46#include "ScratchRegisterAllocator.h"
47#include "StackAlignment.h"
48#include "StructureRareDataInlines.h"
49#include "StructureStubClearingWatchpoint.h"
50#include "ThunkGenerators.h"
51#include <wtf/StringPrintStream.h>
52
53namespace JSC {
54
55// Beware: in this code, it is not safe to assume anything about the following registers
56// that would ordinarily have well-known values:
57// - tagTypeNumberRegister
58// - tagMaskRegister
59
60static FunctionPtr readCallTarget(RepatchBuffer& repatchBuffer, CodeLocationCall call)
61{
62    FunctionPtr result = MacroAssembler::readCallTarget(call);
63#if ENABLE(FTL_JIT)
64    CodeBlock* codeBlock = repatchBuffer.codeBlock();
65    if (codeBlock->jitType() == JITCode::FTLJIT) {
66        return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
67            MacroAssemblerCodePtr::createFromExecutableAddress(
68                result.executableAddress())).callTarget());
69    }
70#else
71    UNUSED_PARAM(repatchBuffer);
72#endif // ENABLE(FTL_JIT)
73    return result;
74}
75
76static void repatchCall(RepatchBuffer& repatchBuffer, CodeLocationCall call, FunctionPtr newCalleeFunction)
77{
78#if ENABLE(FTL_JIT)
79    CodeBlock* codeBlock = repatchBuffer.codeBlock();
80    if (codeBlock->jitType() == JITCode::FTLJIT) {
81        VM& vm = *codeBlock->vm();
82        FTL::Thunks& thunks = *vm.ftlThunks;
83        FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
84            MacroAssemblerCodePtr::createFromExecutableAddress(
85                MacroAssembler::readCallTarget(call).executableAddress()));
86        key = key.withCallTarget(newCalleeFunction.executableAddress());
87        newCalleeFunction = FunctionPtr(
88            thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
89    }
90#endif // ENABLE(FTL_JIT)
91    repatchBuffer.relink(call, newCalleeFunction);
92}
93
94static void repatchCall(CodeBlock* codeblock, CodeLocationCall call, FunctionPtr newCalleeFunction)
95{
96    RepatchBuffer repatchBuffer(codeblock);
97    repatchCall(repatchBuffer, call, newCalleeFunction);
98}
99
100static void repatchByIdSelfAccess(VM& vm, CodeBlock* codeBlock, StructureStubInfo& stubInfo, Structure* structure, const Identifier& propertyName, PropertyOffset offset,
101    const FunctionPtr &slowPathFunction, bool compact)
102{
103    if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
104        vm.registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
105
106    RepatchBuffer repatchBuffer(codeBlock);
107
108    // Only optimize once!
109    repatchCall(repatchBuffer, stubInfo.callReturnLocation, slowPathFunction);
110
111    // Patch the structure check & the offset of the load.
112    repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall), bitwise_cast<int32_t>(structure->id()));
113    repatchBuffer.setLoadInstructionIsActive(stubInfo.callReturnLocation.convertibleLoadAtOffset(stubInfo.patch.deltaCallToStorageLoad), isOutOfLineOffset(offset));
114#if USE(JSVALUE64)
115    if (compact)
116        repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
117    else
118        repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), offsetRelativeToPatchedStorage(offset));
119#elif USE(JSVALUE32_64)
120    if (compact) {
121        repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
122        repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
123    } else {
124        repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag));
125        repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), offsetRelativeToPatchedStorage(offset) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload));
126    }
127#endif
128}
129
130static void addStructureTransitionCheck(
131    JSCell* object, Structure* structure, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
132    MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
133{
134    if (object->structure() == structure && structure->transitionWatchpointSetIsStillValid()) {
135        structure->addTransitionWatchpoint(stubInfo.addWatchpoint(codeBlock));
136        if (!ASSERT_DISABLED) {
137            // If we execute this code, the object must have the structure we expect. Assert
138            // this in debug modes.
139            jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
140            MacroAssembler::Jump ok = branchStructure(
141                jit,
142                MacroAssembler::Equal,
143                MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
144                structure);
145            jit.abortWithReason(RepatchIneffectiveWatchpoint);
146            ok.link(&jit);
147        }
148        return;
149    }
150
151    jit.move(MacroAssembler::TrustedImmPtr(object), scratchGPR);
152    failureCases.append(
153        branchStructure(jit,
154            MacroAssembler::NotEqual,
155            MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
156            structure));
157}
158
159static void addStructureTransitionCheck(
160    JSValue prototype, CodeBlock* codeBlock, StructureStubInfo& stubInfo,
161    MacroAssembler& jit, MacroAssembler::JumpList& failureCases, GPRReg scratchGPR)
162{
163    if (prototype.isNull())
164        return;
165
166    ASSERT(prototype.isCell());
167
168    addStructureTransitionCheck(
169        prototype.asCell(), prototype.asCell()->structure(), codeBlock, stubInfo, jit,
170        failureCases, scratchGPR);
171}
172
173static void replaceWithJump(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo, const MacroAssemblerCodePtr target)
174{
175    if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
176        repatchBuffer.replaceWithJump(
177            RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(
178                stubInfo.callReturnLocation.dataLabel32AtOffset(
179                    -(intptr_t)stubInfo.patch.deltaCheckImmToCall)),
180            CodeLocationLabel(target));
181        return;
182    }
183
184    repatchBuffer.relink(
185        stubInfo.callReturnLocation.jumpAtOffset(
186            stubInfo.patch.deltaCallToJump),
187        CodeLocationLabel(target));
188}
189
190static void emitRestoreScratch(MacroAssembler& stubJit, bool needToRestoreScratch, GPRReg scratchGPR, MacroAssembler::Jump& success, MacroAssembler::Jump& fail, MacroAssembler::JumpList failureCases)
191{
192    if (needToRestoreScratch) {
193        stubJit.popToRestore(scratchGPR);
194
195        success = stubJit.jump();
196
197        // link failure cases here, so we can pop scratchGPR, and then jump back.
198        failureCases.link(&stubJit);
199
200        stubJit.popToRestore(scratchGPR);
201
202        fail = stubJit.jump();
203        return;
204    }
205
206    success = stubJit.jump();
207}
208
209static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases, CodeLocationLabel successLabel, CodeLocationLabel slowCaseBegin)
210{
211    patchBuffer.link(success, successLabel);
212
213    if (needToRestoreScratch) {
214        patchBuffer.link(fail, slowCaseBegin);
215        return;
216    }
217
218    // link failure cases directly back to normal path
219    patchBuffer.link(failureCases, slowCaseBegin);
220}
221
222static void linkRestoreScratch(LinkBuffer& patchBuffer, bool needToRestoreScratch, StructureStubInfo& stubInfo, MacroAssembler::Jump success, MacroAssembler::Jump fail, MacroAssembler::JumpList failureCases)
223{
224    linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
225}
226
227enum ByIdStubKind {
228    GetValue,
229    CallGetter,
230    CallCustomGetter,
231    CallSetter,
232    CallCustomSetter
233};
234
235static const char* toString(ByIdStubKind kind)
236{
237    switch (kind) {
238    case GetValue:
239        return "GetValue";
240    case CallGetter:
241        return "CallGetter";
242    case CallCustomGetter:
243        return "CallCustomGetter";
244    case CallSetter:
245        return "CallSetter";
246    case CallCustomSetter:
247        return "CallCustomSetter";
248    default:
249        RELEASE_ASSERT_NOT_REACHED();
250        return nullptr;
251    }
252}
253
254static ByIdStubKind kindFor(const PropertySlot& slot)
255{
256    if (slot.isCacheableValue())
257        return GetValue;
258    if (slot.isCacheableCustom())
259        return CallCustomGetter;
260    RELEASE_ASSERT(slot.isCacheableGetter());
261    return CallGetter;
262}
263
264static FunctionPtr customFor(const PropertySlot& slot)
265{
266    if (!slot.isCacheableCustom())
267        return FunctionPtr();
268    return FunctionPtr(slot.customGetter());
269}
270
271static ByIdStubKind kindFor(const PutPropertySlot& slot)
272{
273    RELEASE_ASSERT(!slot.isCacheablePut());
274    if (slot.isCacheableSetter())
275        return CallSetter;
276    RELEASE_ASSERT(slot.isCacheableCustom());
277    return CallCustomSetter;
278}
279
280static FunctionPtr customFor(const PutPropertySlot& slot)
281{
282    if (!slot.isCacheableCustom())
283        return FunctionPtr();
284    return FunctionPtr(slot.customSetter());
285}
286
287static void generateByIdStub(
288    ExecState* exec, ByIdStubKind kind, const Identifier& propertyName,
289    FunctionPtr custom, StructureStubInfo& stubInfo, StructureChain* chain, size_t count,
290    PropertyOffset offset, Structure* structure, bool loadTargetFromProxy, WatchpointSet* watchpointSet,
291    CodeLocationLabel successLabel, CodeLocationLabel slowCaseLabel, RefPtr<JITStubRoutine>& stubRoutine)
292{
293    VM* vm = &exec->vm();
294    GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
295    JSValueRegs valueRegs = JSValueRegs(
296#if USE(JSVALUE32_64)
297        static_cast<GPRReg>(stubInfo.patch.valueTagGPR),
298#endif
299        static_cast<GPRReg>(stubInfo.patch.valueGPR));
300    GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
301    bool needToRestoreScratch = scratchGPR == InvalidGPRReg;
302    RELEASE_ASSERT(!needToRestoreScratch || kind == GetValue);
303
304    CCallHelpers stubJit(&exec->vm(), exec->codeBlock());
305    if (needToRestoreScratch) {
306        scratchGPR = AssemblyHelpers::selectScratchGPR(
307            baseGPR, valueRegs.tagGPR(), valueRegs.payloadGPR());
308        stubJit.pushToSave(scratchGPR);
309        needToRestoreScratch = true;
310    }
311
312    MacroAssembler::JumpList failureCases;
313
314    GPRReg baseForGetGPR;
315    if (loadTargetFromProxy) {
316        baseForGetGPR = valueRegs.payloadGPR();
317        failureCases.append(stubJit.branch8(
318            MacroAssembler::NotEqual,
319            MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()),
320            MacroAssembler::TrustedImm32(PureForwardingProxyType)));
321
322        stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), scratchGPR);
323
324        failureCases.append(branchStructure(stubJit,
325            MacroAssembler::NotEqual,
326            MacroAssembler::Address(scratchGPR, JSCell::structureIDOffset()),
327            structure));
328    } else {
329        baseForGetGPR = baseGPR;
330
331        failureCases.append(branchStructure(stubJit,
332            MacroAssembler::NotEqual,
333            MacroAssembler::Address(baseForGetGPR, JSCell::structureIDOffset()),
334            structure));
335    }
336
337    CodeBlock* codeBlock = exec->codeBlock();
338    if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
339        vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
340
341    if (watchpointSet)
342        watchpointSet->add(stubInfo.addWatchpoint(codeBlock));
343
344    Structure* currStructure = structure;
345    JSObject* protoObject = 0;
346    if (chain) {
347        WriteBarrier<Structure>* it = chain->head();
348        for (unsigned i = 0; i < count; ++i, ++it) {
349            protoObject = asObject(currStructure->prototypeForLookup(exec));
350            Structure* protoStructure = protoObject->structure();
351            if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
352                vm->registerWatchpointForImpureProperty(propertyName, stubInfo.addWatchpoint(codeBlock));
353            addStructureTransitionCheck(
354                protoObject, protoStructure, codeBlock, stubInfo, stubJit,
355                failureCases, scratchGPR);
356            currStructure = it->get();
357        }
358    }
359
360    GPRReg baseForAccessGPR;
361    if (chain) {
362        // We could have clobbered scratchGPR earlier, so we have to reload from baseGPR to get the target.
363        if (loadTargetFromProxy)
364            stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSProxy::targetOffset()), baseForGetGPR);
365        stubJit.move(MacroAssembler::TrustedImmPtr(protoObject), scratchGPR);
366        baseForAccessGPR = scratchGPR;
367    } else {
368        // For proxy objects, we need to do all the Structure checks before moving the baseGPR into
369        // baseForGetGPR because if we fail any of the checks then we would have the wrong value in baseGPR
370        // on the slow path.
371        if (loadTargetFromProxy)
372            stubJit.move(scratchGPR, baseForGetGPR);
373        baseForAccessGPR = baseForGetGPR;
374    }
375
376    GPRReg loadedValueGPR = InvalidGPRReg;
377    if (kind != CallCustomGetter && kind != CallCustomSetter) {
378        if (kind == GetValue)
379            loadedValueGPR = valueRegs.payloadGPR();
380        else
381            loadedValueGPR = scratchGPR;
382
383        GPRReg storageGPR;
384        if (isInlineOffset(offset))
385            storageGPR = baseForAccessGPR;
386        else {
387            stubJit.loadPtr(MacroAssembler::Address(baseForAccessGPR, JSObject::butterflyOffset()), loadedValueGPR);
388            storageGPR = loadedValueGPR;
389        }
390
391#if USE(JSVALUE64)
392        stubJit.load64(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset)), loadedValueGPR);
393#else
394        if (kind == GetValue)
395            stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + TagOffset), valueRegs.tagGPR());
396        stubJit.load32(MacroAssembler::Address(storageGPR, offsetRelativeToBase(offset) + PayloadOffset), loadedValueGPR);
397#endif
398    }
399
400    // Stuff for custom getters.
401    MacroAssembler::Call operationCall;
402    MacroAssembler::Call handlerCall;
403
404    // Stuff for JS getters.
405    MacroAssembler::DataLabelPtr addressOfLinkFunctionCheck;
406    MacroAssembler::Call fastPathCall;
407    MacroAssembler::Call slowPathCall;
408    std::unique_ptr<CallLinkInfo> callLinkInfo;
409
410    MacroAssembler::Jump success, fail;
411    if (kind != GetValue) {
412        // Need to make sure that whenever this call is made in the future, we remember the
413        // place that we made it from. It just so happens to be the place that we are at
414        // right now!
415        stubJit.store32(MacroAssembler::TrustedImm32(exec->locationAsRawBits()),
416            CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
417
418        if (kind == CallGetter || kind == CallSetter) {
419            // Create a JS call using a JS call inline cache. Assume that:
420            //
421            // - SP is aligned and represents the extent of the calling compiler's stack usage.
422            //
423            // - FP is set correctly (i.e. it points to the caller's call frame header).
424            //
425            // - SP - FP is an aligned difference.
426            //
427            // - Any byte between FP (exclusive) and SP (inclusive) could be live in the calling
428            //   code.
429            //
430            // Therefore, we temporarily grow the stack for the purpose of the call and then
431            // shrink it after.
432
433            callLinkInfo = std::make_unique<CallLinkInfo>();
434            callLinkInfo->callType = CallLinkInfo::Call;
435            callLinkInfo->codeOrigin = stubInfo.codeOrigin;
436            callLinkInfo->calleeGPR = loadedValueGPR;
437
438            MacroAssembler::JumpList done;
439
440            // There is a 'this' argument but nothing else.
441            unsigned numberOfParameters = 1;
442            // ... unless we're calling a setter.
443            if (kind == CallSetter)
444                numberOfParameters++;
445
446            // Get the accessor; if there ain't one then the result is jsUndefined().
447            if (kind == CallSetter) {
448                stubJit.loadPtr(
449                    MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfSetter()),
450                    loadedValueGPR);
451            } else {
452                stubJit.loadPtr(
453                    MacroAssembler::Address(loadedValueGPR, GetterSetter::offsetOfGetter()),
454                    loadedValueGPR);
455            }
456            MacroAssembler::Jump returnUndefined = stubJit.branchTestPtr(
457                MacroAssembler::Zero, loadedValueGPR);
458
459            unsigned numberOfRegsForCall =
460                JSStack::CallFrameHeaderSize + numberOfParameters;
461
462            unsigned numberOfBytesForCall =
463                numberOfRegsForCall * sizeof(Register) - sizeof(CallerFrameAndPC);
464
465            unsigned alignedNumberOfBytesForCall =
466                WTF::roundUpToMultipleOf(stackAlignmentBytes(), numberOfBytesForCall);
467
468            stubJit.subPtr(
469                MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
470                MacroAssembler::stackPointerRegister);
471
472            MacroAssembler::Address calleeFrame = MacroAssembler::Address(
473                MacroAssembler::stackPointerRegister,
474                -static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC)));
475
476            stubJit.store32(
477                MacroAssembler::TrustedImm32(numberOfParameters),
478                calleeFrame.withOffset(
479                    JSStack::ArgumentCount * sizeof(Register) + PayloadOffset));
480
481            stubJit.storeCell(
482                loadedValueGPR, calleeFrame.withOffset(JSStack::Callee * sizeof(Register)));
483
484            stubJit.storeCell(
485                baseForGetGPR,
486                calleeFrame.withOffset(
487                    virtualRegisterForArgument(0).offset() * sizeof(Register)));
488
489            if (kind == CallSetter) {
490                stubJit.storeValue(
491                    valueRegs,
492                    calleeFrame.withOffset(
493                        virtualRegisterForArgument(1).offset() * sizeof(Register)));
494            }
495
496            MacroAssembler::Jump slowCase = stubJit.branchPtrWithPatch(
497                MacroAssembler::NotEqual, loadedValueGPR, addressOfLinkFunctionCheck,
498                MacroAssembler::TrustedImmPtr(0));
499
500            // loadedValueGPR is already burned. We can reuse it. From here on we assume that
501            // any volatile register will be clobbered anyway.
502            stubJit.loadPtr(
503                MacroAssembler::Address(loadedValueGPR, JSFunction::offsetOfScopeChain()),
504                loadedValueGPR);
505            stubJit.storeCell(
506                loadedValueGPR, calleeFrame.withOffset(JSStack::ScopeChain * sizeof(Register)));
507            fastPathCall = stubJit.nearCall();
508
509            stubJit.addPtr(
510                MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
511                MacroAssembler::stackPointerRegister);
512            if (kind == CallGetter)
513                stubJit.setupResults(valueRegs);
514
515            done.append(stubJit.jump());
516            slowCase.link(&stubJit);
517
518            stubJit.move(loadedValueGPR, GPRInfo::regT0);
519#if USE(JSVALUE32_64)
520            stubJit.move(MacroAssembler::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
521#endif
522            stubJit.move(MacroAssembler::TrustedImmPtr(callLinkInfo.get()), GPRInfo::regT2);
523            slowPathCall = stubJit.nearCall();
524
525            stubJit.addPtr(
526                MacroAssembler::TrustedImm32(alignedNumberOfBytesForCall),
527                MacroAssembler::stackPointerRegister);
528            if (kind == CallGetter)
529                stubJit.setupResults(valueRegs);
530
531            done.append(stubJit.jump());
532            returnUndefined.link(&stubJit);
533
534            if (kind == CallGetter)
535                stubJit.moveTrustedValue(jsUndefined(), valueRegs);
536
537            done.link(&stubJit);
538        } else {
539            // getter: EncodedJSValue (*GetValueFunc)(ExecState*, JSObject* slotBase, EncodedJSValue thisValue, PropertyName);
540            // setter: void (*PutValueFunc)(ExecState*, JSObject* base, EncodedJSValue thisObject, EncodedJSValue value);
541#if USE(JSVALUE64)
542            if (kind == CallCustomGetter)
543                stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImmPtr(propertyName.impl()));
544            else
545                stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, valueRegs.gpr());
546#else
547            if (kind == CallCustomGetter)
548                stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), MacroAssembler::TrustedImmPtr(propertyName.impl()));
549            else
550                stubJit.setupArgumentsWithExecState(baseForAccessGPR, baseForGetGPR, MacroAssembler::TrustedImm32(JSValue::CellTag), valueRegs.payloadGPR(), valueRegs.tagGPR());
551#endif
552            stubJit.storePtr(GPRInfo::callFrameRegister, &vm->topCallFrame);
553
554            operationCall = stubJit.call();
555            if (kind == CallCustomGetter)
556                stubJit.setupResults(valueRegs);
557            MacroAssembler::Jump noException = stubJit.emitExceptionCheck(CCallHelpers::InvertedExceptionCheck);
558
559            stubJit.setupArguments(CCallHelpers::TrustedImmPtr(vm), GPRInfo::callFrameRegister);
560            handlerCall = stubJit.call();
561            stubJit.jumpToExceptionHandler();
562
563            noException.link(&stubJit);
564        }
565    }
566    emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
567
568    LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock());
569
570    linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
571    if (kind == CallCustomGetter || kind == CallCustomSetter) {
572        patchBuffer.link(operationCall, custom);
573        patchBuffer.link(handlerCall, lookupExceptionHandler);
574    } else if (kind == CallGetter || kind == CallSetter) {
575        callLinkInfo->hotPathOther = patchBuffer.locationOfNearCall(fastPathCall);
576        callLinkInfo->hotPathBegin = patchBuffer.locationOf(addressOfLinkFunctionCheck);
577        callLinkInfo->callReturnLocation = patchBuffer.locationOfNearCall(slowPathCall);
578
579        ThunkGenerator generator = linkThunkGeneratorFor(
580            CodeForCall, RegisterPreservationNotRequired);
581        patchBuffer.link(
582            slowPathCall, CodeLocationLabel(vm->getCTIStub(generator).code()));
583    }
584
585    MacroAssemblerCodeRef code = FINALIZE_CODE_FOR(
586        exec->codeBlock(), patchBuffer,
587        ("%s access stub for %s, return point %p",
588            toString(kind), toCString(*exec->codeBlock()).data(),
589            successLabel.executableAddress()));
590
591    if (kind == CallGetter || kind == CallSetter)
592        stubRoutine = adoptRef(new AccessorCallJITStubRoutine(code, *vm, WTF::move(callLinkInfo)));
593    else
594        stubRoutine = createJITStubRoutine(code, *vm, codeBlock->ownerExecutable(), true);
595}
596
597enum InlineCacheAction {
598    GiveUpOnCache,
599    RetryCacheLater,
600    AttemptToCache
601};
602
603static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
604{
605    Structure* structure = cell->structure(vm);
606
607    TypeInfo typeInfo = structure->typeInfo();
608    if (typeInfo.prohibitsPropertyCaching())
609        return GiveUpOnCache;
610
611    if (structure->isUncacheableDictionary()) {
612        if (structure->hasBeenFlattenedBefore())
613            return GiveUpOnCache;
614        // Flattening could have changed the offset, so return early for another try.
615        asObject(cell)->flattenDictionaryObject(vm);
616        return RetryCacheLater;
617    }
618    ASSERT(!structure->isUncacheableDictionary());
619
620    if (typeInfo.hasImpureGetOwnPropertySlot() && !typeInfo.newImpurePropertyFiresWatchpoints())
621        return GiveUpOnCache;
622
623    return AttemptToCache;
624}
625
626static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
627{
628    if (Options::forceICFailure())
629        return GiveUpOnCache;
630
631    // FIXME: Write a test that proves we need to check for recursion here just
632    // like the interpreter does, then add a check for recursion.
633
634    CodeBlock* codeBlock = exec->codeBlock();
635    VM* vm = &exec->vm();
636
637    if ((isJSArray(baseValue) || isRegExpMatchesArray(baseValue) || isJSString(baseValue)) && propertyName == exec->propertyNames().length) {
638        GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
639#if USE(JSVALUE32_64)
640        GPRReg resultTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
641#endif
642        GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
643
644        MacroAssembler stubJit;
645
646        if (isJSArray(baseValue) || isRegExpMatchesArray(baseValue)) {
647            GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
648            bool needToRestoreScratch = false;
649
650            if (scratchGPR == InvalidGPRReg) {
651#if USE(JSVALUE64)
652                scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
653#else
654                scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR, resultTagGPR);
655#endif
656                stubJit.pushToSave(scratchGPR);
657                needToRestoreScratch = true;
658            }
659
660            MacroAssembler::JumpList failureCases;
661
662            stubJit.load8(MacroAssembler::Address(baseGPR, JSCell::indexingTypeOffset()), scratchGPR);
663            failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IsArray)));
664            failureCases.append(stubJit.branchTest32(MacroAssembler::Zero, scratchGPR, MacroAssembler::TrustedImm32(IndexingShapeMask)));
665
666            stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR);
667            stubJit.load32(MacroAssembler::Address(scratchGPR, ArrayStorage::lengthOffset()), scratchGPR);
668            failureCases.append(stubJit.branch32(MacroAssembler::LessThan, scratchGPR, MacroAssembler::TrustedImm32(0)));
669
670            stubJit.move(scratchGPR, resultGPR);
671#if USE(JSVALUE64)
672            stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
673#elif USE(JSVALUE32_64)
674            stubJit.move(AssemblyHelpers::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
675#endif
676
677            MacroAssembler::Jump success, fail;
678
679            emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
680
681            LinkBuffer patchBuffer(*vm, stubJit, codeBlock);
682
683            linkRestoreScratch(patchBuffer, needToRestoreScratch, stubInfo, success, fail, failureCases);
684
685            stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
686                exec->codeBlock(), patchBuffer,
687                ("GetById array length stub for %s, return point %p",
688                    toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
689                        stubInfo.patch.deltaCallToDone).executableAddress()));
690
691            RepatchBuffer repatchBuffer(codeBlock);
692            replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
693            repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
694
695            return RetryCacheLater;
696        }
697
698        // String.length case
699        MacroAssembler::Jump failure = stubJit.branch8(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::typeInfoTypeOffset()), MacroAssembler::TrustedImm32(StringType));
700
701        stubJit.load32(MacroAssembler::Address(baseGPR, JSString::offsetOfLength()), resultGPR);
702
703#if USE(JSVALUE64)
704        stubJit.or64(AssemblyHelpers::TrustedImm64(TagTypeNumber), resultGPR);
705#elif USE(JSVALUE32_64)
706        stubJit.move(AssemblyHelpers::TrustedImm32(0xffffffff), resultTagGPR); // JSValue::Int32Tag
707#endif
708
709        MacroAssembler::Jump success = stubJit.jump();
710
711        LinkBuffer patchBuffer(*vm, stubJit, codeBlock);
712
713        patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
714        patchBuffer.link(failure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
715
716        stubInfo.stubRoutine = FINALIZE_CODE_FOR_STUB(
717            exec->codeBlock(), patchBuffer,
718            ("GetById string length stub for %s, return point %p",
719                toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
720                    stubInfo.patch.deltaCallToDone).executableAddress()));
721
722        RepatchBuffer repatchBuffer(codeBlock);
723        replaceWithJump(repatchBuffer, stubInfo, stubInfo.stubRoutine->code().code());
724        repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetById);
725
726        return RetryCacheLater;
727    }
728
729    // FIXME: Cache property access for immediates.
730    if (!baseValue.isCell())
731        return GiveUpOnCache;
732    JSCell* baseCell = baseValue.asCell();
733    Structure* structure = baseCell->structure();
734    if (!slot.isCacheable())
735        return GiveUpOnCache;
736
737    InlineCacheAction action = actionForCell(*vm, baseCell);
738    if (action != AttemptToCache)
739        return action;
740
741    // Optimize self access.
742    if (slot.slotBase() == baseValue
743        && slot.isCacheableValue()
744        && !slot.watchpointSet()
745        && MacroAssembler::isCompactPtrAlignedAddressOffset(maxOffsetRelativeToPatchedStorage(slot.cachedOffset()))) {
746            repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, propertyName, slot.cachedOffset(), operationGetByIdBuildList, true);
747            stubInfo.initGetByIdSelf(*vm, codeBlock->ownerExecutable(), structure);
748            return RetryCacheLater;
749    }
750
751    repatchCall(codeBlock, stubInfo.callReturnLocation, operationGetByIdBuildList);
752    return RetryCacheLater;
753}
754
755void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
756{
757    GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
758
759    if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
760        repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
761}
762
763static void patchJumpToGetByIdStub(CodeBlock* codeBlock, StructureStubInfo& stubInfo, JITStubRoutine* stubRoutine)
764{
765    RELEASE_ASSERT(stubInfo.accessType == access_get_by_id_list);
766    RepatchBuffer repatchBuffer(codeBlock);
767    if (stubInfo.u.getByIdList.list->didSelfPatching()) {
768        repatchBuffer.relink(
769            stubInfo.callReturnLocation.jumpAtOffset(
770                stubInfo.patch.deltaCallToJump),
771            CodeLocationLabel(stubRoutine->code().code()));
772        return;
773    }
774
775    replaceWithJump(repatchBuffer, stubInfo, stubRoutine->code().code());
776}
777
778static InlineCacheAction tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& ident, const PropertySlot& slot, StructureStubInfo& stubInfo)
779{
780    if (!baseValue.isCell()
781        || !slot.isCacheable())
782        return GiveUpOnCache;
783
784    JSCell* baseCell = baseValue.asCell();
785    bool loadTargetFromProxy = false;
786    if (baseCell->type() == PureForwardingProxyType) {
787        baseValue = jsCast<JSProxy*>(baseCell)->target();
788        baseCell = baseValue.asCell();
789        loadTargetFromProxy = true;
790    }
791
792    VM* vm = &exec->vm();
793    CodeBlock* codeBlock = exec->codeBlock();
794
795    InlineCacheAction action = actionForCell(*vm, baseCell);
796    if (action != AttemptToCache)
797        return action;
798
799    Structure* structure = baseCell->structure(*vm);
800    TypeInfo typeInfo = structure->typeInfo();
801
802    if (stubInfo.patch.spillMode == NeedToSpill) {
803        // We cannot do as much inline caching if the registers were not flushed prior to this GetById. In particular,
804        // non-Value cached properties require planting calls, which requires registers to have been flushed. Thus,
805        // if registers were not flushed, don't do non-Value caching.
806        if (!slot.isCacheableValue())
807            return GiveUpOnCache;
808    }
809
810    PropertyOffset offset = slot.cachedOffset();
811    StructureChain* prototypeChain = 0;
812    size_t count = 0;
813
814    if (slot.slotBase() != baseValue) {
815        if (typeInfo.prohibitsPropertyCaching() || structure->isDictionary())
816            return GiveUpOnCache;
817
818        count = normalizePrototypeChainForChainAccess(
819            exec, baseValue, slot.slotBase(), ident, offset);
820        if (count == InvalidPrototypeChain)
821            return GiveUpOnCache;
822        prototypeChain = structure->prototypeChain(exec);
823    }
824
825    PolymorphicGetByIdList* list = PolymorphicGetByIdList::from(stubInfo);
826    if (list->isFull()) {
827        // We need this extra check because of recursion.
828        return GiveUpOnCache;
829    }
830
831    RefPtr<JITStubRoutine> stubRoutine;
832    generateByIdStub(
833        exec, kindFor(slot), ident, customFor(slot), stubInfo, prototypeChain, count, offset,
834        structure, loadTargetFromProxy, slot.watchpointSet(),
835        stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
836        CodeLocationLabel(list->currentSlowPathTarget(stubInfo)), stubRoutine);
837
838    GetByIdAccess::AccessType accessType;
839    if (slot.isCacheableValue())
840        accessType = slot.watchpointSet() ? GetByIdAccess::WatchedStub : GetByIdAccess::SimpleStub;
841    else if (slot.isCacheableGetter())
842        accessType = GetByIdAccess::Getter;
843    else
844        accessType = GetByIdAccess::CustomGetter;
845
846    list->addAccess(GetByIdAccess(
847        *vm, codeBlock->ownerExecutable(), accessType, stubRoutine, structure,
848        prototypeChain, count));
849
850    patchJumpToGetByIdStub(codeBlock, stubInfo, stubRoutine.get());
851
852    return list->isFull() ? GiveUpOnCache : RetryCacheLater;
853}
854
855void buildGetByIDList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo)
856{
857    GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
858
859    if (tryBuildGetByIDList(exec, baseValue, propertyName, slot, stubInfo) == GiveUpOnCache)
860        repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
861}
862
863static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
864{
865    if (slot.isStrictMode()) {
866        if (putKind == Direct)
867            return operationPutByIdDirectStrict;
868        return operationPutByIdStrict;
869    }
870    if (putKind == Direct)
871        return operationPutByIdDirectNonStrict;
872    return operationPutByIdNonStrict;
873}
874
875static V_JITOperation_ESsiJJI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
876{
877    if (slot.isStrictMode()) {
878        if (putKind == Direct)
879            return operationPutByIdDirectStrictBuildList;
880        return operationPutByIdStrictBuildList;
881    }
882    if (putKind == Direct)
883        return operationPutByIdDirectNonStrictBuildList;
884    return operationPutByIdNonStrictBuildList;
885}
886
887static void emitPutReplaceStub(
888    ExecState* exec,
889    JSValue,
890    const Identifier&,
891    const PutPropertySlot& slot,
892    StructureStubInfo& stubInfo,
893    PutKind,
894    Structure* structure,
895    CodeLocationLabel failureLabel,
896    RefPtr<JITStubRoutine>& stubRoutine)
897{
898    VM* vm = &exec->vm();
899    GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
900#if USE(JSVALUE32_64)
901    GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
902#endif
903    GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
904
905    ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
906    allocator.lock(baseGPR);
907#if USE(JSVALUE32_64)
908    allocator.lock(valueTagGPR);
909#endif
910    allocator.lock(valueGPR);
911
912    GPRReg scratchGPR1 = allocator.allocateScratchGPR();
913
914    CCallHelpers stubJit(vm, exec->codeBlock());
915
916    allocator.preserveReusedRegistersByPushing(stubJit);
917
918    MacroAssembler::Jump badStructure = branchStructure(stubJit,
919        MacroAssembler::NotEqual,
920        MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
921        structure);
922
923#if USE(JSVALUE64)
924    if (isInlineOffset(slot.cachedOffset()))
925        stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
926    else {
927        stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
928        stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
929    }
930#elif USE(JSVALUE32_64)
931    if (isInlineOffset(slot.cachedOffset())) {
932        stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
933        stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
934    } else {
935        stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
936        stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
937        stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
938    }
939#endif
940
941    MacroAssembler::Jump success;
942    MacroAssembler::Jump failure;
943
944    if (allocator.didReuseRegisters()) {
945        allocator.restoreReusedRegistersByPopping(stubJit);
946        success = stubJit.jump();
947
948        badStructure.link(&stubJit);
949        allocator.restoreReusedRegistersByPopping(stubJit);
950        failure = stubJit.jump();
951    } else {
952        success = stubJit.jump();
953        failure = badStructure;
954    }
955
956    LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock());
957    patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
958    patchBuffer.link(failure, failureLabel);
959
960    stubRoutine = FINALIZE_CODE_FOR_STUB(
961        exec->codeBlock(), patchBuffer,
962        ("PutById replace stub for %s, return point %p",
963            toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
964                stubInfo.patch.deltaCallToDone).executableAddress()));
965}
966
967static void emitPutTransitionStub(
968    ExecState* exec,
969    JSValue,
970    const Identifier&,
971    const PutPropertySlot& slot,
972    StructureStubInfo& stubInfo,
973    PutKind putKind,
974    Structure* structure,
975    Structure* oldStructure,
976    StructureChain* prototypeChain,
977    CodeLocationLabel failureLabel,
978    RefPtr<JITStubRoutine>& stubRoutine)
979{
980    VM* vm = &exec->vm();
981
982    GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
983#if USE(JSVALUE32_64)
984    GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.patch.valueTagGPR);
985#endif
986    GPRReg valueGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
987
988    ScratchRegisterAllocator allocator(stubInfo.patch.usedRegisters);
989    allocator.lock(baseGPR);
990#if USE(JSVALUE32_64)
991    allocator.lock(valueTagGPR);
992#endif
993    allocator.lock(valueGPR);
994
995    CCallHelpers stubJit(vm);
996
997    bool needThirdScratch = false;
998    if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()
999        && oldStructure->outOfLineCapacity()) {
1000        needThirdScratch = true;
1001    }
1002
1003    GPRReg scratchGPR1 = allocator.allocateScratchGPR();
1004    ASSERT(scratchGPR1 != baseGPR);
1005    ASSERT(scratchGPR1 != valueGPR);
1006
1007    GPRReg scratchGPR2 = allocator.allocateScratchGPR();
1008    ASSERT(scratchGPR2 != baseGPR);
1009    ASSERT(scratchGPR2 != valueGPR);
1010    ASSERT(scratchGPR2 != scratchGPR1);
1011
1012    GPRReg scratchGPR3;
1013    if (needThirdScratch) {
1014        scratchGPR3 = allocator.allocateScratchGPR();
1015        ASSERT(scratchGPR3 != baseGPR);
1016        ASSERT(scratchGPR3 != valueGPR);
1017        ASSERT(scratchGPR3 != scratchGPR1);
1018        ASSERT(scratchGPR3 != scratchGPR2);
1019    } else
1020        scratchGPR3 = InvalidGPRReg;
1021
1022    allocator.preserveReusedRegistersByPushing(stubJit);
1023
1024    MacroAssembler::JumpList failureCases;
1025
1026    ASSERT(oldStructure->transitionWatchpointSetHasBeenInvalidated());
1027
1028    failureCases.append(branchStructure(stubJit,
1029        MacroAssembler::NotEqual,
1030        MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
1031        oldStructure));
1032
1033    addStructureTransitionCheck(
1034        oldStructure->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
1035        scratchGPR1);
1036
1037    if (putKind == NotDirect) {
1038        for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it) {
1039            addStructureTransitionCheck(
1040                (*it)->storedPrototype(), exec->codeBlock(), stubInfo, stubJit, failureCases,
1041                scratchGPR1);
1042        }
1043    }
1044
1045    MacroAssembler::JumpList slowPath;
1046
1047    bool scratchGPR1HasStorage = false;
1048
1049    if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1050        size_t newSize = structure->outOfLineCapacity() * sizeof(JSValue);
1051        CopiedAllocator* copiedAllocator = &vm->heap.storageAllocator();
1052
1053        if (!oldStructure->outOfLineCapacity()) {
1054            stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1055            slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1056            stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1057            stubJit.negPtr(scratchGPR1);
1058            stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1059            stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1060        } else {
1061            size_t oldSize = oldStructure->outOfLineCapacity() * sizeof(JSValue);
1062            ASSERT(newSize > oldSize);
1063
1064            stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR3);
1065            stubJit.loadPtr(&copiedAllocator->m_currentRemaining, scratchGPR1);
1066            slowPath.append(stubJit.branchSubPtr(MacroAssembler::Signed, MacroAssembler::TrustedImm32(newSize), scratchGPR1));
1067            stubJit.storePtr(scratchGPR1, &copiedAllocator->m_currentRemaining);
1068            stubJit.negPtr(scratchGPR1);
1069            stubJit.addPtr(MacroAssembler::AbsoluteAddress(&copiedAllocator->m_currentPayloadEnd), scratchGPR1);
1070            stubJit.addPtr(MacroAssembler::TrustedImm32(sizeof(JSValue)), scratchGPR1);
1071            // We have scratchGPR1 = new storage, scratchGPR3 = old storage, scratchGPR2 = available
1072            for (size_t offset = 0; offset < oldSize; offset += sizeof(void*)) {
1073                stubJit.loadPtr(MacroAssembler::Address(scratchGPR3, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))), scratchGPR2);
1074                stubJit.storePtr(scratchGPR2, MacroAssembler::Address(scratchGPR1, -static_cast<ptrdiff_t>(offset + sizeof(JSValue) + sizeof(void*))));
1075            }
1076        }
1077
1078        stubJit.storePtr(scratchGPR1, MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()));
1079        scratchGPR1HasStorage = true;
1080    }
1081
1082    ASSERT(oldStructure->typeInfo().type() == structure->typeInfo().type());
1083    ASSERT(oldStructure->typeInfo().inlineTypeFlags() == structure->typeInfo().inlineTypeFlags());
1084    ASSERT(oldStructure->indexingType() == structure->indexingType());
1085#if USE(JSVALUE64)
1086    uint32_t val = structure->id();
1087#else
1088    uint32_t val = reinterpret_cast<uint32_t>(structure->id());
1089#endif
1090    stubJit.store32(MacroAssembler::TrustedImm32(val), MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()));
1091#if USE(JSVALUE64)
1092    if (isInlineOffset(slot.cachedOffset()))
1093        stubJit.store64(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue)));
1094    else {
1095        if (!scratchGPR1HasStorage)
1096            stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1097        stubJit.store64(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue)));
1098    }
1099#elif USE(JSVALUE32_64)
1100    if (isInlineOffset(slot.cachedOffset())) {
1101        stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1102        stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + offsetInInlineStorage(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1103    } else {
1104        if (!scratchGPR1HasStorage)
1105            stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR1);
1106        stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
1107        stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR1, offsetInButterfly(slot.cachedOffset()) * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
1108    }
1109#endif
1110
1111    MacroAssembler::Jump success;
1112    MacroAssembler::Jump failure;
1113
1114    if (allocator.didReuseRegisters()) {
1115        allocator.restoreReusedRegistersByPopping(stubJit);
1116        success = stubJit.jump();
1117
1118        failureCases.link(&stubJit);
1119        allocator.restoreReusedRegistersByPopping(stubJit);
1120        failure = stubJit.jump();
1121    } else
1122        success = stubJit.jump();
1123
1124    MacroAssembler::Call operationCall;
1125    MacroAssembler::Jump successInSlowPath;
1126
1127    if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1128        slowPath.link(&stubJit);
1129
1130        allocator.restoreReusedRegistersByPopping(stubJit);
1131        ScratchBuffer* scratchBuffer = vm->scratchBufferForSize(allocator.desiredScratchBufferSizeForCall());
1132        allocator.preserveUsedRegistersToScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1133#if USE(JSVALUE64)
1134        stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR);
1135#else
1136        stubJit.setupArgumentsWithExecState(baseGPR, MacroAssembler::TrustedImmPtr(structure), MacroAssembler::TrustedImm32(slot.cachedOffset()), valueGPR, valueTagGPR);
1137#endif
1138        operationCall = stubJit.call();
1139        allocator.restoreUsedRegistersFromScratchBufferForCall(stubJit, scratchBuffer, scratchGPR1);
1140        successInSlowPath = stubJit.jump();
1141    }
1142
1143    LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock());
1144    patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1145    if (allocator.didReuseRegisters())
1146        patchBuffer.link(failure, failureLabel);
1147    else
1148        patchBuffer.link(failureCases, failureLabel);
1149    if (structure->outOfLineCapacity() != oldStructure->outOfLineCapacity()) {
1150        patchBuffer.link(operationCall, operationReallocateStorageAndFinishPut);
1151        patchBuffer.link(successInSlowPath, stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone));
1152    }
1153
1154    stubRoutine =
1155        createJITStubRoutine(
1156            FINALIZE_CODE_FOR(
1157                exec->codeBlock(), patchBuffer,
1158                ("PutById %stransition stub (%p -> %p) for %s, return point %p",
1159                    structure->outOfLineCapacity() != oldStructure->outOfLineCapacity() ? "reallocating " : "",
1160                    oldStructure, structure,
1161                    toCString(*exec->codeBlock()).data(), stubInfo.callReturnLocation.labelAtOffset(
1162                        stubInfo.patch.deltaCallToDone).executableAddress())),
1163            *vm,
1164            exec->codeBlock()->ownerExecutable(),
1165            structure->outOfLineCapacity() != oldStructure->outOfLineCapacity(),
1166            structure);
1167}
1168
1169static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1170{
1171    if (Options::forceICFailure())
1172        return GiveUpOnCache;
1173
1174    CodeBlock* codeBlock = exec->codeBlock();
1175    VM* vm = &exec->vm();
1176
1177    if (!baseValue.isCell())
1178        return GiveUpOnCache;
1179    JSCell* baseCell = baseValue.asCell();
1180    Structure* structure = baseCell->structure();
1181    Structure* oldStructure = structure->previousID();
1182
1183    if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1184        return GiveUpOnCache;
1185    if (!structure->propertyAccessesAreCacheable())
1186        return GiveUpOnCache;
1187
1188    // Optimize self access.
1189    if (slot.base() == baseValue && slot.isCacheablePut()) {
1190        if (slot.type() == PutPropertySlot::NewProperty) {
1191            if (structure->isDictionary())
1192                return GiveUpOnCache;
1193
1194            // Skip optimizing the case where we need a realloc, if we don't have
1195            // enough registers to make it happen.
1196            if (GPRInfo::numberOfRegisters < 6
1197                && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
1198                && oldStructure->outOfLineCapacity())
1199                return GiveUpOnCache;
1200
1201            // Skip optimizing the case where we need realloc, and the structure has
1202            // indexing storage.
1203            // FIXME: We shouldn't skip this!  Implement it!
1204            // https://bugs.webkit.org/show_bug.cgi?id=130914
1205            if (oldStructure->couldHaveIndexingHeader())
1206                return GiveUpOnCache;
1207
1208            if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
1209                return GiveUpOnCache;
1210
1211            StructureChain* prototypeChain = structure->prototypeChain(exec);
1212
1213            emitPutTransitionStub(
1214                exec, baseValue, ident, slot, stubInfo, putKind,
1215                structure, oldStructure, prototypeChain,
1216                stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1217                stubInfo.stubRoutine);
1218
1219            RepatchBuffer repatchBuffer(codeBlock);
1220            repatchBuffer.relink(
1221                stubInfo.callReturnLocation.jumpAtOffset(
1222                    stubInfo.patch.deltaCallToJump),
1223                CodeLocationLabel(stubInfo.stubRoutine->code().code()));
1224            repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1225
1226            stubInfo.initPutByIdTransition(*vm, codeBlock->ownerExecutable(), oldStructure, structure, prototypeChain, putKind == Direct);
1227
1228            return RetryCacheLater;
1229        }
1230
1231        if (!MacroAssembler::isPtrAlignedAddressOffset(offsetRelativeToPatchedStorage(slot.cachedOffset())))
1232            return GiveUpOnCache;
1233
1234        repatchByIdSelfAccess(*vm, codeBlock, stubInfo, structure, ident, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false);
1235        stubInfo.initPutByIdReplace(*vm, codeBlock->ownerExecutable(), structure);
1236        return RetryCacheLater;
1237    }
1238    if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1239        && stubInfo.patch.spillMode == DontSpill) {
1240        RefPtr<JITStubRoutine> stubRoutine;
1241
1242        StructureChain* prototypeChain = 0;
1243        PropertyOffset offset = slot.cachedOffset();
1244        size_t count = 0;
1245        if (baseValue != slot.base()) {
1246            count = normalizePrototypeChainForChainAccess(exec, baseCell, slot.base(), ident, offset);
1247            if (count == InvalidPrototypeChain)
1248                return GiveUpOnCache;
1249
1250            prototypeChain = structure->prototypeChain(exec);
1251        }
1252        PolymorphicPutByIdList* list;
1253        list = PolymorphicPutByIdList::from(putKind, stubInfo);
1254
1255        generateByIdStub(
1256            exec, kindFor(slot), ident, customFor(slot), stubInfo, prototypeChain, count,
1257            offset, structure, false, nullptr,
1258            stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1259            stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase),
1260            stubRoutine);
1261
1262        list->addAccess(PutByIdAccess::setter(
1263            *vm, codeBlock->ownerExecutable(),
1264            slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1265            structure, prototypeChain, slot.customSetter(), stubRoutine));
1266
1267        RepatchBuffer repatchBuffer(codeBlock);
1268        repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1269        repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
1270        RELEASE_ASSERT(!list->isFull());
1271        return RetryCacheLater;
1272    }
1273
1274    return GiveUpOnCache;
1275}
1276
1277void repatchPutByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1278{
1279    GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1280
1281    if (tryCachePutByID(exec, baseValue, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
1282        repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1283}
1284
1285static InlineCacheAction tryBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1286{
1287    CodeBlock* codeBlock = exec->codeBlock();
1288    VM* vm = &exec->vm();
1289
1290    if (!baseValue.isCell())
1291        return GiveUpOnCache;
1292    JSCell* baseCell = baseValue.asCell();
1293    Structure* structure = baseCell->structure();
1294    Structure* oldStructure = structure->previousID();
1295
1296
1297    if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
1298        return GiveUpOnCache;
1299
1300    if (!structure->propertyAccessesAreCacheable())
1301        return GiveUpOnCache;
1302
1303    // Optimize self access.
1304    if (slot.base() == baseValue && slot.isCacheablePut()) {
1305        PolymorphicPutByIdList* list;
1306        RefPtr<JITStubRoutine> stubRoutine;
1307
1308        if (slot.type() == PutPropertySlot::NewProperty) {
1309            if (structure->isDictionary())
1310                return GiveUpOnCache;
1311
1312            // Skip optimizing the case where we need a realloc, if we don't have
1313            // enough registers to make it happen.
1314            if (GPRInfo::numberOfRegisters < 6
1315                && oldStructure->outOfLineCapacity() != structure->outOfLineCapacity()
1316                && oldStructure->outOfLineCapacity())
1317                return GiveUpOnCache;
1318
1319            // Skip optimizing the case where we need realloc, and the structure has
1320            // indexing storage.
1321            if (oldStructure->couldHaveIndexingHeader())
1322                return GiveUpOnCache;
1323
1324            if (normalizePrototypeChain(exec, baseCell) == InvalidPrototypeChain)
1325                return GiveUpOnCache;
1326
1327            StructureChain* prototypeChain = structure->prototypeChain(exec);
1328
1329            list = PolymorphicPutByIdList::from(putKind, stubInfo);
1330            if (list->isFull())
1331                return GiveUpOnCache; // Will get here due to recursion.
1332
1333            // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1334            emitPutTransitionStub(
1335                exec, baseValue, propertyName, slot, stubInfo, putKind,
1336                structure, oldStructure, prototypeChain,
1337                CodeLocationLabel(list->currentSlowPathTarget()),
1338                stubRoutine);
1339
1340            list->addAccess(
1341                PutByIdAccess::transition(
1342                    *vm, codeBlock->ownerExecutable(),
1343                    oldStructure, structure, prototypeChain,
1344                    stubRoutine));
1345        } else {
1346            list = PolymorphicPutByIdList::from(putKind, stubInfo);
1347            if (list->isFull())
1348                return GiveUpOnCache; // Will get here due to recursion.
1349
1350            // We're now committed to creating the stub. Mogrify the meta-data accordingly.
1351            emitPutReplaceStub(
1352                exec, baseValue, propertyName, slot, stubInfo, putKind,
1353                structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine);
1354
1355            list->addAccess(
1356                PutByIdAccess::replace(
1357                    *vm, codeBlock->ownerExecutable(),
1358                    structure, stubRoutine));
1359        }
1360
1361        RepatchBuffer repatchBuffer(codeBlock);
1362        repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1363
1364        if (list->isFull())
1365            repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1366
1367        return RetryCacheLater;
1368    }
1369
1370    if ((slot.isCacheableCustom() || slot.isCacheableSetter())
1371        && stubInfo.patch.spillMode == DontSpill) {
1372        RefPtr<JITStubRoutine> stubRoutine;
1373        StructureChain* prototypeChain = 0;
1374        PropertyOffset offset = slot.cachedOffset();
1375        size_t count = 0;
1376        if (baseValue != slot.base()) {
1377            count = normalizePrototypeChainForChainAccess(exec, baseCell, slot.base(), propertyName, offset);
1378            if (count == InvalidPrototypeChain)
1379                return GiveUpOnCache;
1380
1381            prototypeChain = structure->prototypeChain(exec);
1382        }
1383        PolymorphicPutByIdList* list;
1384        list = PolymorphicPutByIdList::from(putKind, stubInfo);
1385
1386        generateByIdStub(
1387            exec, kindFor(slot), propertyName, customFor(slot), stubInfo, prototypeChain, count,
1388            offset, structure, false, nullptr,
1389            stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone),
1390            CodeLocationLabel(list->currentSlowPathTarget()),
1391            stubRoutine);
1392
1393        list->addAccess(PutByIdAccess::setter(
1394            *vm, codeBlock->ownerExecutable(),
1395            slot.isCacheableSetter() ? PutByIdAccess::Setter : PutByIdAccess::CustomSetter,
1396            structure, prototypeChain, slot.customSetter(), stubRoutine));
1397
1398        RepatchBuffer repatchBuffer(codeBlock);
1399        repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1400        if (list->isFull())
1401            repatchCall(repatchBuffer, stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1402
1403        return RetryCacheLater;
1404    }
1405    return GiveUpOnCache;
1406}
1407
1408void buildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
1409{
1410    GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
1411
1412    if (tryBuildPutByIdList(exec, baseValue, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
1413        repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
1414}
1415
1416static InlineCacheAction tryRepatchIn(
1417    ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1418    const PropertySlot& slot, StructureStubInfo& stubInfo)
1419{
1420    if (Options::forceICFailure())
1421        return GiveUpOnCache;
1422
1423    if (!base->structure()->propertyAccessesAreCacheable())
1424        return GiveUpOnCache;
1425
1426    if (wasFound) {
1427        if (!slot.isCacheable())
1428            return GiveUpOnCache;
1429    }
1430
1431    CodeBlock* codeBlock = exec->codeBlock();
1432    VM* vm = &exec->vm();
1433    Structure* structure = base->structure();
1434
1435    PropertyOffset offsetIgnored;
1436    size_t count = normalizePrototypeChainForChainAccess(exec, base, wasFound ? slot.slotBase() : JSValue(), ident, offsetIgnored);
1437    if (count == InvalidPrototypeChain)
1438        return GiveUpOnCache;
1439
1440    PolymorphicAccessStructureList* polymorphicStructureList;
1441    int listIndex;
1442
1443    CodeLocationLabel successLabel = stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToDone);
1444    CodeLocationLabel slowCaseLabel;
1445
1446    if (stubInfo.accessType == access_unset) {
1447        polymorphicStructureList = new PolymorphicAccessStructureList();
1448        stubInfo.initInList(polymorphicStructureList, 0);
1449        slowCaseLabel = stubInfo.callReturnLocation.labelAtOffset(
1450            stubInfo.patch.deltaCallToSlowCase);
1451        listIndex = 0;
1452    } else {
1453        RELEASE_ASSERT(stubInfo.accessType == access_in_list);
1454        polymorphicStructureList = stubInfo.u.inList.structureList;
1455        listIndex = stubInfo.u.inList.listSize;
1456        slowCaseLabel = CodeLocationLabel(polymorphicStructureList->list[listIndex - 1].stubRoutine->code().code());
1457
1458        if (listIndex == POLYMORPHIC_LIST_CACHE_SIZE)
1459            return GiveUpOnCache;
1460    }
1461
1462    StructureChain* chain = structure->prototypeChain(exec);
1463    RefPtr<JITStubRoutine> stubRoutine;
1464
1465    {
1466        GPRReg baseGPR = static_cast<GPRReg>(stubInfo.patch.baseGPR);
1467        GPRReg resultGPR = static_cast<GPRReg>(stubInfo.patch.valueGPR);
1468        GPRReg scratchGPR = TempRegisterSet(stubInfo.patch.usedRegisters).getFreeGPR();
1469
1470        CCallHelpers stubJit(vm);
1471
1472        bool needToRestoreScratch;
1473        if (scratchGPR == InvalidGPRReg) {
1474            scratchGPR = AssemblyHelpers::selectScratchGPR(baseGPR, resultGPR);
1475            stubJit.pushToSave(scratchGPR);
1476            needToRestoreScratch = true;
1477        } else
1478            needToRestoreScratch = false;
1479
1480        MacroAssembler::JumpList failureCases;
1481        failureCases.append(branchStructure(stubJit,
1482            MacroAssembler::NotEqual,
1483            MacroAssembler::Address(baseGPR, JSCell::structureIDOffset()),
1484            structure));
1485
1486        CodeBlock* codeBlock = exec->codeBlock();
1487        if (structure->typeInfo().newImpurePropertyFiresWatchpoints())
1488            vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1489
1490        if (slot.watchpointSet())
1491            slot.watchpointSet()->add(stubInfo.addWatchpoint(codeBlock));
1492
1493        Structure* currStructure = structure;
1494        WriteBarrier<Structure>* it = chain->head();
1495        for (unsigned i = 0; i < count; ++i, ++it) {
1496            JSObject* prototype = asObject(currStructure->prototypeForLookup(exec));
1497            Structure* protoStructure = prototype->structure();
1498            addStructureTransitionCheck(
1499                prototype, protoStructure, exec->codeBlock(), stubInfo, stubJit,
1500                failureCases, scratchGPR);
1501            if (protoStructure->typeInfo().newImpurePropertyFiresWatchpoints())
1502                vm->registerWatchpointForImpureProperty(ident, stubInfo.addWatchpoint(codeBlock));
1503            currStructure = it->get();
1504        }
1505
1506#if USE(JSVALUE64)
1507        stubJit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(wasFound))), resultGPR);
1508#else
1509        stubJit.move(MacroAssembler::TrustedImm32(wasFound), resultGPR);
1510#endif
1511
1512        MacroAssembler::Jump success, fail;
1513
1514        emitRestoreScratch(stubJit, needToRestoreScratch, scratchGPR, success, fail, failureCases);
1515
1516        LinkBuffer patchBuffer(*vm, stubJit, exec->codeBlock());
1517
1518        linkRestoreScratch(patchBuffer, needToRestoreScratch, success, fail, failureCases, successLabel, slowCaseLabel);
1519
1520        stubRoutine = FINALIZE_CODE_FOR_STUB(
1521            exec->codeBlock(), patchBuffer,
1522            ("In (found = %s) stub for %s, return point %p",
1523                wasFound ? "yes" : "no", toCString(*exec->codeBlock()).data(),
1524                successLabel.executableAddress()));
1525    }
1526
1527    polymorphicStructureList->list[listIndex].set(*vm, codeBlock->ownerExecutable(), stubRoutine, structure, true);
1528    stubInfo.u.inList.listSize++;
1529
1530    RepatchBuffer repatchBuffer(codeBlock);
1531    repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), CodeLocationLabel(stubRoutine->code().code()));
1532
1533    return listIndex < (POLYMORPHIC_LIST_CACHE_SIZE - 1) ? RetryCacheLater : GiveUpOnCache;
1534}
1535
1536void repatchIn(
1537    ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
1538    const PropertySlot& slot, StructureStubInfo& stubInfo)
1539{
1540    if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
1541        repatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationIn);
1542}
1543
1544static void linkSlowFor(
1545    RepatchBuffer& repatchBuffer, VM* vm, CallLinkInfo& callLinkInfo,
1546    CodeSpecializationKind kind, RegisterPreservationMode registers)
1547{
1548    repatchBuffer.relink(
1549        callLinkInfo.callReturnLocation,
1550        vm->getCTIStub(virtualThunkGeneratorFor(kind, registers)).code());
1551}
1552
1553void linkFor(
1554    ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1555    JSFunction* callee, MacroAssemblerCodePtr codePtr, CodeSpecializationKind kind,
1556    RegisterPreservationMode registers)
1557{
1558    ASSERT(!callLinkInfo.stub);
1559
1560    CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1561
1562    // If you're being call-linked from a DFG caller then you obviously didn't get inlined.
1563    if (calleeCodeBlock && JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1564        calleeCodeBlock->m_shouldAlwaysBeInlined = false;
1565
1566    VM* vm = callerCodeBlock->vm();
1567
1568    RepatchBuffer repatchBuffer(callerCodeBlock);
1569
1570    ASSERT(!callLinkInfo.isLinked());
1571    callLinkInfo.callee.set(exec->callerFrame()->vm(), callLinkInfo.hotPathBegin, callerCodeBlock->ownerExecutable(), callee);
1572    callLinkInfo.lastSeenCallee.set(exec->callerFrame()->vm(), callerCodeBlock->ownerExecutable(), callee);
1573    if (shouldShowDisassemblyFor(callerCodeBlock))
1574        dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin, " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
1575    repatchBuffer.relink(callLinkInfo.hotPathOther, codePtr);
1576
1577    if (calleeCodeBlock)
1578        calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
1579
1580    if (kind == CodeForCall) {
1581        repatchBuffer.relink(callLinkInfo.callReturnLocation, vm->getCTIStub(linkClosureCallThunkGeneratorFor(registers)).code());
1582        return;
1583    }
1584
1585    ASSERT(kind == CodeForConstruct);
1586    linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForConstruct, registers);
1587}
1588
1589void linkSlowFor(
1590    ExecState* exec, CallLinkInfo& callLinkInfo, CodeSpecializationKind kind,
1591    RegisterPreservationMode registers)
1592{
1593    CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1594    VM* vm = callerCodeBlock->vm();
1595
1596    RepatchBuffer repatchBuffer(callerCodeBlock);
1597
1598    linkSlowFor(repatchBuffer, vm, callLinkInfo, kind, registers);
1599}
1600
1601void linkClosureCall(
1602    ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
1603    Structure* structure, ExecutableBase* executable, MacroAssemblerCodePtr codePtr,
1604    RegisterPreservationMode registers)
1605{
1606    ASSERT(!callLinkInfo.stub);
1607
1608    CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
1609    VM* vm = callerCodeBlock->vm();
1610
1611    GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR);
1612
1613    CCallHelpers stubJit(vm, callerCodeBlock);
1614
1615    CCallHelpers::JumpList slowPath;
1616
1617    ptrdiff_t offsetToFrame = -sizeof(CallerFrameAndPC);
1618
1619    if (!ASSERT_DISABLED) {
1620        CCallHelpers::Jump okArgumentCount = stubJit.branch32(
1621            CCallHelpers::Below, CCallHelpers::Address(CCallHelpers::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ArgumentCount) + offsetToFrame + PayloadOffset), CCallHelpers::TrustedImm32(10000000));
1622        stubJit.abortWithReason(RepatchInsaneArgumentCount);
1623        okArgumentCount.link(&stubJit);
1624    }
1625
1626#if USE(JSVALUE64)
1627    // We can safely clobber everything except the calleeGPR. We can't rely on tagMaskRegister
1628    // being set. So we do this the hard way.
1629    GPRReg scratch = AssemblyHelpers::selectScratchGPR(calleeGPR);
1630    stubJit.move(MacroAssembler::TrustedImm64(TagMask), scratch);
1631    slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, scratch));
1632#else
1633    // We would have already checked that the callee is a cell.
1634#endif
1635
1636    slowPath.append(
1637        branchStructure(stubJit,
1638            CCallHelpers::NotEqual,
1639            CCallHelpers::Address(calleeGPR, JSCell::structureIDOffset()),
1640            structure));
1641
1642    slowPath.append(
1643        stubJit.branchPtr(
1644            CCallHelpers::NotEqual,
1645            CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
1646            CCallHelpers::TrustedImmPtr(executable)));
1647
1648    stubJit.loadPtr(
1649        CCallHelpers::Address(calleeGPR, JSFunction::offsetOfScopeChain()),
1650        GPRInfo::returnValueGPR);
1651
1652#if USE(JSVALUE64)
1653    stubJit.store64(
1654        GPRInfo::returnValueGPR,
1655        CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + offsetToFrame));
1656#else
1657    stubJit.storePtr(
1658        GPRInfo::returnValueGPR,
1659        CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload) + offsetToFrame));
1660    stubJit.store32(
1661        CCallHelpers::TrustedImm32(JSValue::CellTag),
1662        CCallHelpers::Address(MacroAssembler::stackPointerRegister, static_cast<ptrdiff_t>(sizeof(Register) * JSStack::ScopeChain) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag) + offsetToFrame));
1663#endif
1664
1665    AssemblyHelpers::Call call = stubJit.nearCall();
1666    AssemblyHelpers::Jump done = stubJit.jump();
1667
1668    slowPath.link(&stubJit);
1669    stubJit.move(calleeGPR, GPRInfo::regT0);
1670#if USE(JSVALUE32_64)
1671    stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
1672#endif
1673    stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
1674    stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation.executableAddress()), GPRInfo::regT4);
1675
1676    stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
1677    AssemblyHelpers::Jump slow = stubJit.jump();
1678
1679    LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock);
1680
1681    patchBuffer.link(call, FunctionPtr(codePtr.executableAddress()));
1682    if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
1683        patchBuffer.link(done, callLinkInfo.callReturnLocation.labelAtOffset(0));
1684    else
1685        patchBuffer.link(done, callLinkInfo.hotPathOther.labelAtOffset(0));
1686    patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(virtualThunkGeneratorFor(CodeForCall, registers)).code()));
1687
1688    RefPtr<ClosureCallStubRoutine> stubRoutine = adoptRef(new ClosureCallStubRoutine(
1689        FINALIZE_CODE_FOR(
1690            callerCodeBlock, patchBuffer,
1691            ("Closure call stub for %s, return point %p, target %p (%s)",
1692                toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation.labelAtOffset(0).executableAddress(),
1693                codePtr.executableAddress(), toCString(pointerDump(calleeCodeBlock)).data())),
1694        *vm, callerCodeBlock->ownerExecutable(), structure, executable, callLinkInfo.codeOrigin));
1695
1696    RepatchBuffer repatchBuffer(callerCodeBlock);
1697
1698    repatchBuffer.replaceWithJump(
1699        RepatchBuffer::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin),
1700        CodeLocationLabel(stubRoutine->code().code()));
1701    linkSlowFor(repatchBuffer, vm, callLinkInfo, CodeForCall, registers);
1702
1703    callLinkInfo.stub = stubRoutine.release();
1704
1705    ASSERT(!calleeCodeBlock || calleeCodeBlock->isIncomingCallAlreadyLinked(&callLinkInfo));
1706}
1707
1708void resetGetByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1709{
1710    repatchCall(repatchBuffer, stubInfo.callReturnLocation, operationGetByIdOptimize);
1711    CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1712    if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1713        repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1714            RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1715            MacroAssembler::Address(
1716                static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1717                JSCell::structureIDOffset()),
1718            static_cast<int32_t>(unusedPointer));
1719    }
1720    repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1721#if USE(JSVALUE64)
1722    repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1723#else
1724    repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1725    repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabelCompactAtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1726#endif
1727    repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1728}
1729
1730void resetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1731{
1732    V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(repatchBuffer, stubInfo.callReturnLocation).executableAddress());
1733    V_JITOperation_ESsiJJI optimizedFunction;
1734    if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList)
1735        optimizedFunction = operationPutByIdStrictOptimize;
1736    else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList)
1737        optimizedFunction = operationPutByIdNonStrictOptimize;
1738    else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList)
1739        optimizedFunction = operationPutByIdDirectStrictOptimize;
1740    else {
1741        ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList);
1742        optimizedFunction = operationPutByIdDirectNonStrictOptimize;
1743    }
1744    repatchCall(repatchBuffer, stubInfo.callReturnLocation, optimizedFunction);
1745    CodeLocationDataLabel32 structureLabel = stubInfo.callReturnLocation.dataLabel32AtOffset(-(intptr_t)stubInfo.patch.deltaCheckImmToCall);
1746    if (MacroAssembler::canJumpReplacePatchableBranch32WithPatch()) {
1747        repatchBuffer.revertJumpReplacementToPatchableBranch32WithPatch(
1748            RepatchBuffer::startOfPatchableBranch32WithPatchOnAddress(structureLabel),
1749            MacroAssembler::Address(
1750                static_cast<MacroAssembler::RegisterID>(stubInfo.patch.baseGPR),
1751                JSCell::structureIDOffset()),
1752            static_cast<int32_t>(unusedPointer));
1753    }
1754    repatchBuffer.repatch(structureLabel, static_cast<int32_t>(unusedPointer));
1755#if USE(JSVALUE64)
1756    repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToLoadOrStore), 0);
1757#else
1758    repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToTagLoadOrStore), 0);
1759    repatchBuffer.repatch(stubInfo.callReturnLocation.dataLabel32AtOffset(stubInfo.patch.deltaCallToPayloadLoadOrStore), 0);
1760#endif
1761    repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1762}
1763
1764void resetIn(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
1765{
1766    repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.patch.deltaCallToJump), stubInfo.callReturnLocation.labelAtOffset(stubInfo.patch.deltaCallToSlowCase));
1767}
1768
1769} // namespace JSC
1770
1771#endif
1772