1/*
2 * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 *    notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 *    notice, this list of conditions and the following disclaimer in the
11 *    documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27
28#if ENABLE(JIT)
29#if USE(JSVALUE32_64)
30#include "JIT.h"
31
32#include "CodeBlock.h"
33#include "GCAwareJITStubRoutine.h"
34#include "Interpreter.h"
35#include "JITInlines.h"
36#include "JITStubCall.h"
37#include "JSArray.h"
38#include "JSFunction.h"
39#include "JSPropertyNameIterator.h"
40#include "JSVariableObject.h"
41#include "LinkBuffer.h"
42#include "RepatchBuffer.h"
43#include "ResultType.h"
44#include "SamplingTool.h"
45#include <wtf/StringPrintStream.h>
46
47#ifndef NDEBUG
48#include <stdio.h>
49#endif
50
51using namespace std;
52
53namespace JSC {
54
55void JIT::emit_op_put_by_index(Instruction* currentInstruction)
56{
57    unsigned base = currentInstruction[1].u.operand;
58    unsigned property = currentInstruction[2].u.operand;
59    unsigned value = currentInstruction[3].u.operand;
60
61    JITStubCall stubCall(this, cti_op_put_by_index);
62    stubCall.addArgument(base);
63    stubCall.addArgument(TrustedImm32(property));
64    stubCall.addArgument(value);
65    stubCall.call();
66}
67
68void JIT::emit_op_put_getter_setter(Instruction* currentInstruction)
69{
70    unsigned base = currentInstruction[1].u.operand;
71    unsigned property = currentInstruction[2].u.operand;
72    unsigned getter = currentInstruction[3].u.operand;
73    unsigned setter = currentInstruction[4].u.operand;
74
75    JITStubCall stubCall(this, cti_op_put_getter_setter);
76    stubCall.addArgument(base);
77    stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(property)));
78    stubCall.addArgument(getter);
79    stubCall.addArgument(setter);
80    stubCall.call();
81}
82
83void JIT::emit_op_del_by_id(Instruction* currentInstruction)
84{
85    unsigned dst = currentInstruction[1].u.operand;
86    unsigned base = currentInstruction[2].u.operand;
87    unsigned property = currentInstruction[3].u.operand;
88
89    JITStubCall stubCall(this, cti_op_del_by_id);
90    stubCall.addArgument(base);
91    stubCall.addArgument(TrustedImmPtr(&m_codeBlock->identifier(property)));
92    stubCall.call(dst);
93}
94
95JIT::CodeRef JIT::stringGetByValStubGenerator(VM* vm)
96{
97    JSInterfaceJIT jit;
98    JumpList failures;
99    failures.append(jit.branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(vm->stringStructure.get())));
100
101    // Load string length to regT1, and start the process of loading the data pointer into regT0
102    jit.load32(Address(regT0, ThunkHelpers::jsStringLengthOffset()), regT1);
103    jit.loadPtr(Address(regT0, ThunkHelpers::jsStringValueOffset()), regT0);
104    failures.append(jit.branchTest32(Zero, regT0));
105
106    // Do an unsigned compare to simultaneously filter negative indices as well as indices that are too large
107    failures.append(jit.branch32(AboveOrEqual, regT2, regT1));
108
109    // Load the character
110    JumpList is16Bit;
111    JumpList cont8Bit;
112    // Load the string flags
113    jit.loadPtr(Address(regT0, StringImpl::flagsOffset()), regT1);
114    jit.loadPtr(Address(regT0, StringImpl::dataOffset()), regT0);
115    is16Bit.append(jit.branchTest32(Zero, regT1, TrustedImm32(StringImpl::flagIs8Bit())));
116    jit.load8(BaseIndex(regT0, regT2, TimesOne, 0), regT0);
117    cont8Bit.append(jit.jump());
118    is16Bit.link(&jit);
119    jit.load16(BaseIndex(regT0, regT2, TimesTwo, 0), regT0);
120
121    cont8Bit.link(&jit);
122
123    failures.append(jit.branch32(AboveOrEqual, regT0, TrustedImm32(0x100)));
124    jit.move(TrustedImmPtr(vm->smallStrings.singleCharacterStrings()), regT1);
125    jit.loadPtr(BaseIndex(regT1, regT0, ScalePtr, 0), regT0);
126    jit.move(TrustedImm32(JSValue::CellTag), regT1); // We null check regT0 on return so this is safe
127    jit.ret();
128
129    failures.link(&jit);
130    jit.move(TrustedImm32(0), regT0);
131    jit.ret();
132
133    LinkBuffer patchBuffer(*vm, &jit, GLOBAL_THUNK_ID);
134    return FINALIZE_CODE(patchBuffer, ("String get_by_val stub"));
135}
136
137void JIT::emit_op_get_by_val(Instruction* currentInstruction)
138{
139    unsigned dst = currentInstruction[1].u.operand;
140    unsigned base = currentInstruction[2].u.operand;
141    unsigned property = currentInstruction[3].u.operand;
142    ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
143
144    emitLoad2(base, regT1, regT0, property, regT3, regT2);
145
146    addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::Int32Tag)));
147    emitJumpSlowCaseIfNotJSCell(base, regT1);
148    loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
149    emitArrayProfilingSite(regT1, regT3, profile);
150    and32(TrustedImm32(IndexingShapeMask), regT1);
151
152    PatchableJump badType;
153    JumpList slowCases;
154
155    JITArrayMode mode = chooseArrayMode(profile);
156    switch (mode) {
157    case JITInt32:
158        slowCases = emitInt32GetByVal(currentInstruction, badType);
159        break;
160    case JITDouble:
161        slowCases = emitDoubleGetByVal(currentInstruction, badType);
162        break;
163    case JITContiguous:
164        slowCases = emitContiguousGetByVal(currentInstruction, badType);
165        break;
166    case JITArrayStorage:
167        slowCases = emitArrayStorageGetByVal(currentInstruction, badType);
168        break;
169    default:
170        CRASH();
171    }
172
173    addSlowCase(badType);
174    addSlowCase(slowCases);
175
176    Label done = label();
177
178#if !ASSERT_DISABLED
179    Jump resultOK = branch32(NotEqual, regT1, TrustedImm32(JSValue::EmptyValueTag));
180    breakpoint();
181    resultOK.link(this);
182#endif
183
184    emitValueProfilingSite();
185    emitStore(dst, regT1, regT0);
186    map(m_bytecodeOffset + OPCODE_LENGTH(op_get_by_val), dst, regT1, regT0);
187
188    m_byValCompilationInfo.append(ByValCompilationInfo(m_bytecodeOffset, badType, mode, done));
189}
190
191JIT::JumpList JIT::emitContiguousGetByVal(Instruction*, PatchableJump& badType, IndexingType expectedShape)
192{
193    JumpList slowCases;
194
195    badType = patchableBranch32(NotEqual, regT1, TrustedImm32(expectedShape));
196
197    loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
198    slowCases.append(branch32(AboveOrEqual, regT2, Address(regT3, Butterfly::offsetOfPublicLength())));
199
200    load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1); // tag
201    load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0); // payload
202    slowCases.append(branch32(Equal, regT1, TrustedImm32(JSValue::EmptyValueTag)));
203
204    return slowCases;
205}
206
207JIT::JumpList JIT::emitDoubleGetByVal(Instruction*, PatchableJump& badType)
208{
209    JumpList slowCases;
210
211    badType = patchableBranch32(NotEqual, regT1, TrustedImm32(DoubleShape));
212
213    loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
214    slowCases.append(branch32(AboveOrEqual, regT2, Address(regT3, Butterfly::offsetOfPublicLength())));
215
216    loadDouble(BaseIndex(regT3, regT2, TimesEight), fpRegT0);
217    slowCases.append(branchDouble(DoubleNotEqualOrUnordered, fpRegT0, fpRegT0));
218    moveDoubleToInts(fpRegT0, regT0, regT1);
219
220    return slowCases;
221}
222
223JIT::JumpList JIT::emitArrayStorageGetByVal(Instruction*, PatchableJump& badType)
224{
225    JumpList slowCases;
226
227    add32(TrustedImm32(-ArrayStorageShape), regT1, regT3);
228    badType = patchableBranch32(Above, regT3, TrustedImm32(SlowPutArrayStorageShape - ArrayStorageShape));
229
230    loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
231    slowCases.append(branch32(AboveOrEqual, regT2, Address(regT3, ArrayStorage::vectorLengthOffset())));
232
233    load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1); // tag
234    load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0); // payload
235    slowCases.append(branch32(Equal, regT1, TrustedImm32(JSValue::EmptyValueTag)));
236
237    return slowCases;
238}
239
240void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
241{
242    unsigned dst = currentInstruction[1].u.operand;
243    unsigned base = currentInstruction[2].u.operand;
244    unsigned property = currentInstruction[3].u.operand;
245    ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
246
247    linkSlowCase(iter); // property int32 check
248    linkSlowCaseIfNotJSCell(iter, base); // base cell check
249
250    Jump nonCell = jump();
251    linkSlowCase(iter); // base array check
252    Jump notString = branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(m_vm->stringStructure.get()));
253    emitNakedCall(m_vm->getCTIStub(stringGetByValStubGenerator).code());
254    Jump failed = branchTestPtr(Zero, regT0);
255    emitStore(dst, regT1, regT0);
256    emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
257    failed.link(this);
258    notString.link(this);
259    nonCell.link(this);
260
261    Jump skipProfiling = jump();
262
263    linkSlowCase(iter); // vector length check
264    linkSlowCase(iter); // empty value
265
266    emitArrayProfileOutOfBoundsSpecialCase(profile);
267
268    skipProfiling.link(this);
269
270    Label slowPath = label();
271
272    JITStubCall stubCall(this, cti_op_get_by_val);
273    stubCall.addArgument(base);
274    stubCall.addArgument(property);
275    Call call = stubCall.call(dst);
276
277    m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
278    m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
279    m_byValInstructionIndex++;
280
281    emitValueProfilingSite();
282}
283
284void JIT::emit_op_put_by_val(Instruction* currentInstruction)
285{
286    unsigned base = currentInstruction[1].u.operand;
287    unsigned property = currentInstruction[2].u.operand;
288    ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
289
290    emitLoad2(base, regT1, regT0, property, regT3, regT2);
291
292    addSlowCase(branch32(NotEqual, regT3, TrustedImm32(JSValue::Int32Tag)));
293    emitJumpSlowCaseIfNotJSCell(base, regT1);
294    loadPtr(Address(regT0, JSCell::structureOffset()), regT1);
295    emitArrayProfilingSite(regT1, regT3, profile);
296    and32(TrustedImm32(IndexingShapeMask), regT1);
297
298    PatchableJump badType;
299    JumpList slowCases;
300
301    JITArrayMode mode = chooseArrayMode(profile);
302    switch (mode) {
303    case JITInt32:
304        slowCases = emitInt32PutByVal(currentInstruction, badType);
305        break;
306    case JITDouble:
307        slowCases = emitDoublePutByVal(currentInstruction, badType);
308        break;
309    case JITContiguous:
310        slowCases = emitContiguousPutByVal(currentInstruction, badType);
311        break;
312    case JITArrayStorage:
313        slowCases = emitArrayStoragePutByVal(currentInstruction, badType);
314        break;
315    default:
316        CRASH();
317        break;
318    }
319
320    addSlowCase(badType);
321    addSlowCase(slowCases);
322
323    Label done = label();
324
325    m_byValCompilationInfo.append(ByValCompilationInfo(m_bytecodeOffset, badType, mode, done));
326}
327
328JIT::JumpList JIT::emitGenericContiguousPutByVal(Instruction* currentInstruction, PatchableJump& badType, IndexingType indexingShape)
329{
330    unsigned value = currentInstruction[3].u.operand;
331    ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
332
333    JumpList slowCases;
334
335    badType = patchableBranch32(NotEqual, regT1, TrustedImm32(ContiguousShape));
336
337    loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
338    Jump outOfBounds = branch32(AboveOrEqual, regT2, Address(regT3, Butterfly::offsetOfPublicLength()));
339
340    Label storeResult = label();
341    emitLoad(value, regT1, regT0);
342    switch (indexingShape) {
343    case Int32Shape:
344        slowCases.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag)));
345        // Fall through.
346    case ContiguousShape:
347        store32(regT0, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
348        store32(regT1, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
349        break;
350    case DoubleShape: {
351        Jump notInt = branch32(NotEqual, regT1, TrustedImm32(JSValue::Int32Tag));
352        convertInt32ToDouble(regT0, fpRegT0);
353        Jump ready = jump();
354        notInt.link(this);
355        moveIntsToDouble(regT0, regT1, fpRegT0, fpRegT1);
356        slowCases.append(branchDouble(DoubleNotEqualOrUnordered, fpRegT0, fpRegT0));
357        ready.link(this);
358        storeDouble(fpRegT0, BaseIndex(regT3, regT2, TimesEight));
359        break;
360    }
361    default:
362        CRASH();
363        break;
364    }
365
366    Jump done = jump();
367
368    outOfBounds.link(this);
369    slowCases.append(branch32(AboveOrEqual, regT2, Address(regT3, Butterfly::offsetOfVectorLength())));
370
371    emitArrayProfileStoreToHoleSpecialCase(profile);
372
373    add32(TrustedImm32(1), regT2, regT1);
374    store32(regT1, Address(regT3, Butterfly::offsetOfPublicLength()));
375    jump().linkTo(storeResult, this);
376
377    done.link(this);
378
379    emitWriteBarrier(regT0, regT1, regT1, regT3, UnconditionalWriteBarrier, WriteBarrierForPropertyAccess);
380
381    return slowCases;
382}
383
384JIT::JumpList JIT::emitArrayStoragePutByVal(Instruction* currentInstruction, PatchableJump& badType)
385{
386    unsigned value = currentInstruction[3].u.operand;
387    ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
388
389    JumpList slowCases;
390
391    badType = patchableBranch32(NotEqual, regT1, TrustedImm32(ArrayStorageShape));
392
393    loadPtr(Address(regT0, JSObject::butterflyOffset()), regT3);
394    slowCases.append(branch32(AboveOrEqual, regT2, Address(regT3, ArrayStorage::vectorLengthOffset())));
395
396    Jump empty = branch32(Equal, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), TrustedImm32(JSValue::EmptyValueTag));
397
398    Label storeResult(this);
399    emitLoad(value, regT1, regT0);
400    store32(regT0, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload))); // payload
401    store32(regT1, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag))); // tag
402    Jump end = jump();
403
404    empty.link(this);
405    emitArrayProfileStoreToHoleSpecialCase(profile);
406    add32(TrustedImm32(1), Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_numValuesInVector)));
407    branch32(Below, regT2, Address(regT3, ArrayStorage::lengthOffset())).linkTo(storeResult, this);
408
409    add32(TrustedImm32(1), regT2, regT0);
410    store32(regT0, Address(regT3, ArrayStorage::lengthOffset()));
411    jump().linkTo(storeResult, this);
412
413    end.link(this);
414
415    emitWriteBarrier(regT0, regT1, regT1, regT3, UnconditionalWriteBarrier, WriteBarrierForPropertyAccess);
416
417    return slowCases;
418}
419
420void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
421{
422    unsigned base = currentInstruction[1].u.operand;
423    unsigned property = currentInstruction[2].u.operand;
424    unsigned value = currentInstruction[3].u.operand;
425    ArrayProfile* profile = currentInstruction[4].u.arrayProfile;
426
427    linkSlowCase(iter); // property int32 check
428    linkSlowCaseIfNotJSCell(iter, base); // base cell check
429    linkSlowCase(iter); // base not array check
430
431    JITArrayMode mode = chooseArrayMode(profile);
432    switch (mode) {
433    case JITInt32:
434    case JITDouble:
435        linkSlowCase(iter); // value type check
436        break;
437    default:
438        break;
439    }
440
441    Jump skipProfiling = jump();
442    linkSlowCase(iter); // out of bounds
443    emitArrayProfileOutOfBoundsSpecialCase(profile);
444    skipProfiling.link(this);
445
446    Label slowPath = label();
447
448    JITStubCall stubPutByValCall(this, cti_op_put_by_val);
449    stubPutByValCall.addArgument(base);
450    stubPutByValCall.addArgument(property);
451    stubPutByValCall.addArgument(value);
452    Call call = stubPutByValCall.call();
453
454    m_byValCompilationInfo[m_byValInstructionIndex].slowPathTarget = slowPath;
455    m_byValCompilationInfo[m_byValInstructionIndex].returnAddress = call;
456    m_byValInstructionIndex++;
457}
458
459void JIT::emit_op_get_by_id(Instruction* currentInstruction)
460{
461    int dst = currentInstruction[1].u.operand;
462    int base = currentInstruction[2].u.operand;
463    Identifier* ident = &(m_codeBlock->identifier(currentInstruction[3].u.operand));
464
465    emitLoad(base, regT1, regT0);
466    emitJumpSlowCaseIfNotJSCell(base, regT1);
467    compileGetByIdHotPath(ident);
468    emitValueProfilingSite();
469    emitStore(dst, regT1, regT0);
470    map(m_bytecodeOffset + OPCODE_LENGTH(op_get_by_id), dst, regT1, regT0);
471}
472
473void JIT::compileGetByIdHotPath(Identifier* ident)
474{
475    // As for put_by_id, get_by_id requires the offset of the Structure and the offset of the access to be patched.
476    // Additionally, for get_by_id we need patch the offset of the branch to the slow case (we patch this to jump
477    // to array-length / prototype access tranpolines, and finally we also the the property-map access offset as a label
478    // to jump back to if one of these trampolies finds a match.
479
480    if (*ident == m_vm->propertyNames->length && shouldEmitProfiling()) {
481        loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
482        emitArrayProfilingSiteForBytecodeIndex(regT2, regT3, m_bytecodeOffset);
483    }
484
485    BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
486
487    Label hotPathBegin(this);
488
489    DataLabelPtr structureToCompare;
490    PatchableJump structureCheck = patchableBranchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
491    addSlowCase(structureCheck);
492
493    ConvertibleLoadLabel propertyStorageLoad = convertibleLoadPtr(Address(regT0, JSObject::butterflyOffset()), regT2);
494    DataLabelCompact displacementLabel1 = loadPtrWithCompactAddressOffsetPatch(Address(regT2, patchGetByIdDefaultOffset), regT0); // payload
495    DataLabelCompact displacementLabel2 = loadPtrWithCompactAddressOffsetPatch(Address(regT2, patchGetByIdDefaultOffset), regT1); // tag
496
497    Label putResult(this);
498
499    END_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
500
501    m_propertyAccessCompilationInfo.append(PropertyStubCompilationInfo(PropertyStubGetById, m_bytecodeOffset, hotPathBegin, structureToCompare, structureCheck, propertyStorageLoad, displacementLabel1, displacementLabel2, putResult));
502}
503
504void JIT::emitSlow_op_get_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
505{
506    int dst = currentInstruction[1].u.operand;
507    int base = currentInstruction[2].u.operand;
508    int ident = currentInstruction[3].u.operand;
509
510    compileGetByIdSlowCase(dst, base, &(m_codeBlock->identifier(ident)), iter);
511    emitValueProfilingSite();
512}
513
514void JIT::compileGetByIdSlowCase(int dst, int base, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter)
515{
516    // As for the hot path of get_by_id, above, we ensure that we can use an architecture specific offset
517    // so that we only need track one pointer into the slow case code - we track a pointer to the location
518    // of the call (which we can use to look up the patch information), but should a array-length or
519    // prototype access trampoline fail we want to bail out back to here.  To do so we can subtract back
520    // the distance from the call to the head of the slow case.
521    linkSlowCaseIfNotJSCell(iter, base);
522    linkSlowCase(iter);
523
524    BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdSlowCase);
525
526    Label coldPathBegin(this);
527    JITStubCall stubCall(this, cti_op_get_by_id);
528    stubCall.addArgument(regT1, regT0);
529    stubCall.addArgument(TrustedImmPtr(ident));
530    Call call = stubCall.call(dst);
531
532    END_UNINTERRUPTED_SEQUENCE_FOR_PUT(sequenceGetByIdSlowCase, dst);
533
534    // Track the location of the call; this will be used to recover patch information.
535    m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex++].slowCaseInfo(PropertyStubGetById, coldPathBegin, call);
536}
537
538void JIT::emit_op_put_by_id(Instruction* currentInstruction)
539{
540    // In order to be able to patch both the Structure, and the object offset, we store one pointer,
541    // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code
542    // such that the Structure & offset are always at the same distance from this.
543
544    int base = currentInstruction[1].u.operand;
545    int value = currentInstruction[3].u.operand;
546
547    emitLoad2(base, regT1, regT0, value, regT3, regT2);
548
549    emitJumpSlowCaseIfNotJSCell(base, regT1);
550
551    BEGIN_UNINTERRUPTED_SEQUENCE(sequencePutById);
552
553    Label hotPathBegin(this);
554
555    // It is important that the following instruction plants a 32bit immediate, in order that it can be patched over.
556    DataLabelPtr structureToCompare;
557    addSlowCase(branchPtrWithPatch(NotEqual, Address(regT0, JSCell::structureOffset()), structureToCompare, TrustedImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure))));
558
559    ConvertibleLoadLabel propertyStorageLoad = convertibleLoadPtr(Address(regT0, JSObject::butterflyOffset()), regT1);
560    DataLabel32 displacementLabel1 = storePtrWithAddressOffsetPatch(regT2, Address(regT1, patchPutByIdDefaultOffset)); // payload
561    DataLabel32 displacementLabel2 = storePtrWithAddressOffsetPatch(regT3, Address(regT1, patchPutByIdDefaultOffset)); // tag
562
563    END_UNINTERRUPTED_SEQUENCE(sequencePutById);
564
565    emitWriteBarrier(regT0, regT2, regT1, regT2, ShouldFilterImmediates, WriteBarrierForPropertyAccess);
566
567    m_propertyAccessCompilationInfo.append(PropertyStubCompilationInfo(PropertyStubPutById, m_bytecodeOffset, hotPathBegin, structureToCompare, propertyStorageLoad, displacementLabel1, displacementLabel2));
568}
569
570void JIT::emitSlow_op_put_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
571{
572    int base = currentInstruction[1].u.operand;
573    int ident = currentInstruction[2].u.operand;
574    int direct = currentInstruction[8].u.operand;
575
576    linkSlowCaseIfNotJSCell(iter, base);
577    linkSlowCase(iter);
578
579    JITStubCall stubCall(this, direct ? cti_op_put_by_id_direct : cti_op_put_by_id);
580    stubCall.addArgument(base);
581    stubCall.addArgument(TrustedImmPtr(&(m_codeBlock->identifier(ident))));
582    stubCall.addArgument(regT3, regT2);
583    Call call = stubCall.call();
584
585    // Track the location of the call; this will be used to recover patch information.
586    m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex++].slowCaseInfo(PropertyStubPutById, call);
587}
588
589// Compile a store into an object's property storage.  May overwrite base.
590void JIT::compilePutDirectOffset(RegisterID base, RegisterID valueTag, RegisterID valuePayload, PropertyOffset cachedOffset)
591{
592    if (isOutOfLineOffset(cachedOffset))
593        loadPtr(Address(base, JSObject::butterflyOffset()), base);
594    emitStore(indexRelativeToBase(cachedOffset), valueTag, valuePayload, base);
595}
596
597// Compile a load from an object's property storage.  May overwrite base.
598void JIT::compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, PropertyOffset cachedOffset)
599{
600    if (isInlineOffset(cachedOffset)) {
601        emitLoad(indexRelativeToBase(cachedOffset), resultTag, resultPayload, base);
602        return;
603    }
604
605    RegisterID temp = resultPayload;
606    loadPtr(Address(base, JSObject::butterflyOffset()), temp);
607    emitLoad(indexRelativeToBase(cachedOffset), resultTag, resultPayload, temp);
608}
609
610void JIT::compileGetDirectOffset(JSObject* base, RegisterID resultTag, RegisterID resultPayload, PropertyOffset cachedOffset)
611{
612    if (isInlineOffset(cachedOffset)) {
613        move(TrustedImmPtr(base->locationForOffset(cachedOffset)), resultTag);
614        load32(Address(resultTag, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
615        load32(Address(resultTag, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
616        return;
617    }
618
619    loadPtr(base->butterflyAddress(), resultTag);
620    load32(Address(resultTag, offsetInButterfly(cachedOffset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
621    load32(Address(resultTag, offsetInButterfly(cachedOffset) * sizeof(WriteBarrier<Unknown>) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
622}
623
624void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, PropertyOffset cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct)
625{
626    // The code below assumes that regT0 contains the basePayload and regT1 contains the baseTag. Restore them from the stack.
627#if CPU(MIPS) || CPU(SH4) || CPU(ARM)
628    // For MIPS, we don't add sizeof(void*) to the stack offset.
629    load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
630    // For MIPS, we don't add sizeof(void*) to the stack offset.
631    load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
632#else
633    load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
634    load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
635#endif
636
637    JumpList failureCases;
638    failureCases.append(branch32(NotEqual, regT1, TrustedImm32(JSValue::CellTag)));
639    failureCases.append(branchPtr(NotEqual, Address(regT0, JSCell::structureOffset()), TrustedImmPtr(oldStructure)));
640    testPrototype(oldStructure->storedPrototype(), failureCases, stubInfo);
641
642    if (!direct) {
643        // Verify that nothing in the prototype chain has a setter for this property.
644        for (WriteBarrier<Structure>* it = chain->head(); *it; ++it)
645            testPrototype((*it)->storedPrototype(), failureCases, stubInfo);
646    }
647
648    // If we succeed in all of our checks, and the code was optimizable, then make sure we
649    // decrement the rare case counter.
650#if ENABLE(VALUE_PROFILER)
651    if (m_codeBlock->canCompileWithDFG() >= DFG::MayInline) {
652        sub32(
653            TrustedImm32(1),
654            AbsoluteAddress(&m_codeBlock->rareCaseProfileForBytecodeOffset(stubInfo->bytecodeIndex)->m_counter));
655    }
656#endif
657
658    // Reallocate property storage if needed.
659    Call callTarget;
660    bool willNeedStorageRealloc = oldStructure->outOfLineCapacity() != newStructure->outOfLineCapacity();
661    if (willNeedStorageRealloc) {
662        // This trampoline was called to like a JIT stub; before we can can call again we need to
663        // remove the return address from the stack, to prevent the stack from becoming misaligned.
664        preserveReturnAddressAfterCall(regT3);
665
666        JITStubCall stubCall(this, cti_op_put_by_id_transition_realloc);
667        stubCall.skipArgument(); // base
668        stubCall.skipArgument(); // ident
669        stubCall.skipArgument(); // value
670        stubCall.addArgument(TrustedImm32(oldStructure->outOfLineCapacity()));
671        stubCall.addArgument(TrustedImmPtr(newStructure));
672        stubCall.call(regT0);
673
674        restoreReturnAddressBeforeReturn(regT3);
675
676#if CPU(MIPS) || CPU(SH4) || CPU(ARM)
677        // For MIPS, we don't add sizeof(void*) to the stack offset.
678        load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
679        // For MIPS, we don't add sizeof(void*) to the stack offset.
680        load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
681#else
682        load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT0);
683        load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[0]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT1);
684#endif
685    }
686
687    emitWriteBarrier(regT0, regT1, regT1, regT3, UnconditionalWriteBarrier, WriteBarrierForPropertyAccess);
688
689    storePtr(TrustedImmPtr(newStructure), Address(regT0, JSCell::structureOffset()));
690#if CPU(MIPS) || CPU(SH4) || CPU(ARM)
691    // For MIPS, we don't add sizeof(void*) to the stack offset.
692    load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT3);
693    load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT2);
694#else
695    load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT3);
696    load32(Address(stackPointerRegister, OBJECT_OFFSETOF(JITStackFrame, args[2]) + sizeof(void*) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT2);
697#endif
698    compilePutDirectOffset(regT0, regT2, regT3, cachedOffset);
699
700    ret();
701
702    ASSERT(!failureCases.empty());
703    failureCases.link(this);
704    restoreArgumentReferenceForTrampoline();
705    Call failureCall = tailRecursiveCall();
706
707    LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
708
709    patchBuffer.link(failureCall, FunctionPtr(direct ? cti_op_put_by_id_direct_fail : cti_op_put_by_id_fail));
710
711    if (willNeedStorageRealloc) {
712        ASSERT(m_calls.size() == 1);
713        patchBuffer.link(m_calls[0].from, FunctionPtr(cti_op_put_by_id_transition_realloc));
714    }
715
716    stubInfo->stubRoutine = createJITStubRoutine(
717        FINALIZE_CODE(
718            patchBuffer,
719            ("Baseline put_by_id transition stub for %s, return point %p",
720                toCString(*m_codeBlock).data(), returnAddress.value())),
721        *m_vm,
722        m_codeBlock->ownerExecutable(),
723        willNeedStorageRealloc,
724        newStructure);
725    RepatchBuffer repatchBuffer(m_codeBlock);
726    repatchBuffer.relinkCallerToTrampoline(returnAddress, CodeLocationLabel(stubInfo->stubRoutine->code().code()));
727}
728
729void JIT::patchGetByIdSelf(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress)
730{
731    RepatchBuffer repatchBuffer(codeBlock);
732
733    // We don't want to patch more than once - in future go to cti_op_get_by_id_generic.
734    // Should probably go to JITStubs::cti_op_get_by_id_fail, but that doesn't do anything interesting right now.
735    repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_self_fail));
736
737    // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
738    repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.get.structureToCompare), structure);
739    repatchBuffer.setLoadInstructionIsActive(stubInfo->hotPathBegin.convertibleLoadAtOffset(stubInfo->patch.baseline.u.get.propertyStorageLoad), isOutOfLineOffset(cachedOffset));
740    repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel1), offsetRelativeToPatchedStorage(cachedOffset) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)); // payload
741    repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel2), offsetRelativeToPatchedStorage(cachedOffset) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)); // tag
742}
743
744void JIT::patchPutByIdReplace(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress, bool direct)
745{
746    RepatchBuffer repatchBuffer(codeBlock);
747
748    // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
749    // Should probably go to cti_op_put_by_id_fail, but that doesn't do anything interesting right now.
750    repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
751
752    // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
753    repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.put.structureToCompare), structure);
754    repatchBuffer.setLoadInstructionIsActive(stubInfo->hotPathBegin.convertibleLoadAtOffset(stubInfo->patch.baseline.u.put.propertyStorageLoad), isOutOfLineOffset(cachedOffset));
755    repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(stubInfo->patch.baseline.u.put.displacementLabel1), offsetRelativeToPatchedStorage(cachedOffset) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)); // payload
756    repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(stubInfo->patch.baseline.u.put.displacementLabel2), offsetRelativeToPatchedStorage(cachedOffset) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)); // tag
757}
758
759void JIT::privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress)
760{
761    StructureStubInfo* stubInfo = &m_codeBlock->getStubInfo(returnAddress);
762
763    // regT0 holds a JSCell*
764
765    // Check for array
766    loadPtr(Address(regT0, JSCell::structureOffset()), regT2);
767    Jump failureCases1 = branchTest32(Zero, regT2, TrustedImm32(IsArray));
768    Jump failureCases2 = branchTest32(Zero, regT2, TrustedImm32(IndexingShapeMask));
769
770    // Checks out okay! - get the length from the storage
771    loadPtr(Address(regT0, JSArray::butterflyOffset()), regT2);
772    load32(Address(regT2, ArrayStorage::lengthOffset()), regT2);
773
774    Jump failureCases3 = branch32(Above, regT2, TrustedImm32(INT_MAX));
775    move(regT2, regT0);
776    move(TrustedImm32(JSValue::Int32Tag), regT1);
777    Jump success = jump();
778
779    LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
780
781    // Use the patch information to link the failure cases back to the original slow case routine.
782    CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin);
783    patchBuffer.link(failureCases1, slowCaseBegin);
784    patchBuffer.link(failureCases2, slowCaseBegin);
785    patchBuffer.link(failureCases3, slowCaseBegin);
786
787    // On success return back to the hot patch code, at a point it will perform the store to dest for us.
788    patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
789
790    // Track the stub we have created so that it will be deleted later.
791    stubInfo->stubRoutine = FINALIZE_CODE_FOR_STUB(
792        patchBuffer,
793        ("Baseline get_by_id array length stub for %s, return point %p",
794            toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset(
795                stubInfo->patch.baseline.u.get.putResult).executableAddress()));
796
797    // Finally patch the jump to slow case back in the hot path to jump here instead.
798    CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
799    RepatchBuffer repatchBuffer(m_codeBlock);
800    repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubInfo->stubRoutine->code().code()));
801
802    // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
803    repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_array_fail));
804}
805
806void JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
807{
808    // regT0 holds a JSCell*
809
810    // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
811    // referencing the prototype object - let's speculatively load it's table nice and early!)
812    JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
813
814    Jump failureCases1 = checkStructure(regT0, structure);
815
816    // Check the prototype object's Structure had not changed.
817    Jump failureCases2 = addStructureTransitionCheck(protoObject, prototypeStructure, stubInfo, regT3);
818
819    bool needsStubLink = false;
820    // Checks out okay!
821    if (slot.cachedPropertyType() == PropertySlot::Getter) {
822        needsStubLink = true;
823        compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
824        JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
825        stubCall.addArgument(regT1);
826        stubCall.addArgument(regT0);
827        stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
828        stubCall.call();
829    } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
830        needsStubLink = true;
831        JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
832        stubCall.addArgument(TrustedImmPtr(protoObject));
833        stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
834        stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
835        stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
836        stubCall.call();
837    } else
838        compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
839
840    Jump success = jump();
841
842    LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
843
844    // Use the patch information to link the failure cases back to the original slow case routine.
845    CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin);
846    patchBuffer.link(failureCases1, slowCaseBegin);
847    if (failureCases2.isSet())
848        patchBuffer.link(failureCases2, slowCaseBegin);
849
850    // On success return back to the hot patch code, at a point it will perform the store to dest for us.
851    patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
852
853    if (needsStubLink) {
854        for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
855            if (iter->to)
856                patchBuffer.link(iter->from, FunctionPtr(iter->to));
857        }
858    }
859
860    // Track the stub we have created so that it will be deleted later.
861    stubInfo->stubRoutine = createJITStubRoutine(
862        FINALIZE_CODE(
863            patchBuffer,
864            ("Baseline get_by_id proto stub for %s, return point %p",
865                toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset(
866                    stubInfo->patch.baseline.u.get.putResult).executableAddress())),
867        *m_vm,
868        m_codeBlock->ownerExecutable(),
869        needsStubLink);
870
871    // Finally patch the jump to slow case back in the hot path to jump here instead.
872    CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
873    RepatchBuffer repatchBuffer(m_codeBlock);
874    repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubInfo->stubRoutine->code().code()));
875
876    // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
877    repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
878}
879
880
881void JIT::privateCompileGetByIdSelfList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset)
882{
883    // regT0 holds a JSCell*
884    Jump failureCase = checkStructure(regT0, structure);
885    bool needsStubLink = false;
886    bool isDirect = false;
887    if (slot.cachedPropertyType() == PropertySlot::Getter) {
888        needsStubLink = true;
889        compileGetDirectOffset(regT0, regT2, regT1, cachedOffset);
890        JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
891        stubCall.addArgument(regT1);
892        stubCall.addArgument(regT0);
893        stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
894        stubCall.call();
895    } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
896        needsStubLink = true;
897        JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
898        stubCall.addArgument(regT0);
899        stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
900        stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
901        stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
902        stubCall.call();
903    } else {
904        isDirect = true;
905        compileGetDirectOffset(regT0, regT1, regT0, cachedOffset);
906    }
907
908    Jump success = jump();
909
910    LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
911    if (needsStubLink) {
912        for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
913            if (iter->to)
914                patchBuffer.link(iter->from, FunctionPtr(iter->to));
915        }
916    }
917    // Use the patch information to link the failure cases back to the original slow case routine.
918    CodeLocationLabel lastProtoBegin = CodeLocationLabel(JITStubRoutine::asCodePtr(polymorphicStructures->list[currentIndex - 1].stubRoutine));
919    if (!lastProtoBegin)
920        lastProtoBegin = stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin);
921
922    patchBuffer.link(failureCase, lastProtoBegin);
923
924    // On success return back to the hot patch code, at a point it will perform the store to dest for us.
925    patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
926
927    RefPtr<JITStubRoutine> stubRoutine = createJITStubRoutine(
928        FINALIZE_CODE(
929            patchBuffer,
930            ("Baseline get_by_id self list stub for %s, return point %p",
931                toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset(
932                    stubInfo->patch.baseline.u.get.putResult).executableAddress())),
933        *m_vm,
934        m_codeBlock->ownerExecutable(),
935        needsStubLink);
936
937    polymorphicStructures->list[currentIndex].set(*m_vm, m_codeBlock->ownerExecutable(), stubRoutine, structure, isDirect);
938
939    // Finally patch the jump to slow case back in the hot path to jump here instead.
940    CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
941    RepatchBuffer repatchBuffer(m_codeBlock);
942    repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine->code().code()));
943}
944
945void JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, CallFrame* callFrame)
946{
947    // regT0 holds a JSCell*
948
949    // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
950    // referencing the prototype object - let's speculatively load it's table nice and early!)
951    JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
952
953    // Check eax is an object of the right Structure.
954    Jump failureCases1 = checkStructure(regT0, structure);
955
956    // Check the prototype object's Structure had not changed.
957    Jump failureCases2 = addStructureTransitionCheck(protoObject, prototypeStructure, stubInfo, regT3);
958
959    bool needsStubLink = false;
960    bool isDirect = false;
961    if (slot.cachedPropertyType() == PropertySlot::Getter) {
962        needsStubLink = true;
963        compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
964        JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
965        stubCall.addArgument(regT1);
966        stubCall.addArgument(regT0);
967        stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
968        stubCall.call();
969    } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
970        needsStubLink = true;
971        JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
972        stubCall.addArgument(TrustedImmPtr(protoObject));
973        stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
974        stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
975        stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
976        stubCall.call();
977    } else {
978        isDirect = true;
979        compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
980    }
981
982    Jump success = jump();
983
984    LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
985    if (needsStubLink) {
986        for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
987            if (iter->to)
988                patchBuffer.link(iter->from, FunctionPtr(iter->to));
989        }
990    }
991    // Use the patch information to link the failure cases back to the original slow case routine.
992    CodeLocationLabel lastProtoBegin = CodeLocationLabel(JITStubRoutine::asCodePtr(prototypeStructures->list[currentIndex - 1].stubRoutine));
993    patchBuffer.link(failureCases1, lastProtoBegin);
994    if (failureCases2.isSet())
995        patchBuffer.link(failureCases2, lastProtoBegin);
996
997    // On success return back to the hot patch code, at a point it will perform the store to dest for us.
998    patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
999
1000    RefPtr<JITStubRoutine> stubRoutine = createJITStubRoutine(
1001        FINALIZE_CODE(
1002            patchBuffer,
1003            ("Baseline get_by_id proto list stub for %s, return point %p",
1004                toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset(
1005                    stubInfo->patch.baseline.u.get.putResult).executableAddress())),
1006        *m_vm,
1007        m_codeBlock->ownerExecutable(),
1008        needsStubLink);
1009
1010    prototypeStructures->list[currentIndex].set(callFrame->vm(), m_codeBlock->ownerExecutable(), stubRoutine, structure, prototypeStructure, isDirect);
1011
1012    // Finally patch the jump to slow case back in the hot path to jump here instead.
1013    CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
1014    RepatchBuffer repatchBuffer(m_codeBlock);
1015    repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine->code().code()));
1016}
1017
1018void JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, CallFrame* callFrame)
1019{
1020    // regT0 holds a JSCell*
1021    ASSERT(count);
1022
1023    JumpList bucketsOfFail;
1024
1025    // Check eax is an object of the right Structure.
1026    bucketsOfFail.append(checkStructure(regT0, structure));
1027
1028    Structure* currStructure = structure;
1029    WriteBarrier<Structure>* it = chain->head();
1030    JSObject* protoObject = 0;
1031    for (unsigned i = 0; i < count; ++i, ++it) {
1032        protoObject = asObject(currStructure->prototypeForLookup(callFrame));
1033        currStructure = it->get();
1034        testPrototype(protoObject, bucketsOfFail, stubInfo);
1035    }
1036    ASSERT(protoObject);
1037
1038    bool needsStubLink = false;
1039    bool isDirect = false;
1040    if (slot.cachedPropertyType() == PropertySlot::Getter) {
1041        needsStubLink = true;
1042        compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
1043        JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
1044        stubCall.addArgument(regT1);
1045        stubCall.addArgument(regT0);
1046        stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
1047        stubCall.call();
1048    } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
1049        needsStubLink = true;
1050        JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
1051        stubCall.addArgument(TrustedImmPtr(protoObject));
1052        stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
1053        stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
1054        stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
1055        stubCall.call();
1056    } else {
1057        isDirect = true;
1058        compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
1059    }
1060
1061    Jump success = jump();
1062
1063    LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
1064    if (needsStubLink) {
1065        for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
1066            if (iter->to)
1067                patchBuffer.link(iter->from, FunctionPtr(iter->to));
1068        }
1069    }
1070    // Use the patch information to link the failure cases back to the original slow case routine.
1071    CodeLocationLabel lastProtoBegin = CodeLocationLabel(JITStubRoutine::asCodePtr(prototypeStructures->list[currentIndex - 1].stubRoutine));
1072
1073    patchBuffer.link(bucketsOfFail, lastProtoBegin);
1074
1075    // On success return back to the hot patch code, at a point it will perform the store to dest for us.
1076    patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
1077
1078    RefPtr<JITStubRoutine> stubRoutine = createJITStubRoutine(
1079        FINALIZE_CODE(
1080            patchBuffer,
1081            ("Baseline get_by_id chain list stub for %s, return point %p",
1082                toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset(
1083                    stubInfo->patch.baseline.u.get.putResult).executableAddress())),
1084        *m_vm,
1085        m_codeBlock->ownerExecutable(),
1086        needsStubLink);
1087
1088    // Track the stub we have created so that it will be deleted later.
1089    prototypeStructures->list[currentIndex].set(callFrame->vm(), m_codeBlock->ownerExecutable(), stubRoutine, structure, chain, isDirect);
1090
1091    // Finally patch the jump to slow case back in the hot path to jump here instead.
1092    CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
1093    RepatchBuffer repatchBuffer(m_codeBlock);
1094    repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine->code().code()));
1095}
1096
1097void JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, PropertyOffset cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
1098{
1099    // regT0 holds a JSCell*
1100    ASSERT(count);
1101
1102    JumpList bucketsOfFail;
1103
1104    // Check eax is an object of the right Structure.
1105    bucketsOfFail.append(checkStructure(regT0, structure));
1106
1107    Structure* currStructure = structure;
1108    WriteBarrier<Structure>* it = chain->head();
1109    JSObject* protoObject = 0;
1110    for (unsigned i = 0; i < count; ++i, ++it) {
1111        protoObject = asObject(currStructure->prototypeForLookup(callFrame));
1112        currStructure = it->get();
1113        testPrototype(protoObject, bucketsOfFail, stubInfo);
1114    }
1115    ASSERT(protoObject);
1116
1117    bool needsStubLink = false;
1118    if (slot.cachedPropertyType() == PropertySlot::Getter) {
1119        needsStubLink = true;
1120        compileGetDirectOffset(protoObject, regT2, regT1, cachedOffset);
1121        JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
1122        stubCall.addArgument(regT1);
1123        stubCall.addArgument(regT0);
1124        stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
1125        stubCall.call();
1126    } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
1127        needsStubLink = true;
1128        JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
1129        stubCall.addArgument(TrustedImmPtr(protoObject));
1130        stubCall.addArgument(TrustedImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
1131        stubCall.addArgument(TrustedImmPtr(const_cast<Identifier*>(&ident)));
1132        stubCall.addArgument(TrustedImmPtr(stubInfo->callReturnLocation.executableAddress()));
1133        stubCall.call();
1134    } else
1135        compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
1136    Jump success = jump();
1137
1138    LinkBuffer patchBuffer(*m_vm, this, m_codeBlock);
1139    if (needsStubLink) {
1140        for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
1141            if (iter->to)
1142                patchBuffer.link(iter->from, FunctionPtr(iter->to));
1143        }
1144    }
1145    // Use the patch information to link the failure cases back to the original slow case routine.
1146    patchBuffer.link(bucketsOfFail, stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin));
1147
1148    // On success return back to the hot patch code, at a point it will perform the store to dest for us.
1149    patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(stubInfo->patch.baseline.u.get.putResult));
1150
1151    // Track the stub we have created so that it will be deleted later.
1152    RefPtr<JITStubRoutine> stubRoutine = createJITStubRoutine(
1153        FINALIZE_CODE(
1154            patchBuffer,
1155            ("Baseline get_by_id chain stub for %s, return point %p",
1156                toCString(*m_codeBlock).data(), stubInfo->hotPathBegin.labelAtOffset(
1157                    stubInfo->patch.baseline.u.get.putResult).executableAddress())),
1158        *m_vm,
1159        m_codeBlock->ownerExecutable(),
1160        needsStubLink);
1161    stubInfo->stubRoutine = stubRoutine;
1162
1163    // Finally patch the jump to slow case back in the hot path to jump here instead.
1164    CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck);
1165    RepatchBuffer repatchBuffer(m_codeBlock);
1166    repatchBuffer.relink(jumpLocation, CodeLocationLabel(stubRoutine->code().code()));
1167
1168    // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
1169    repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
1170}
1171
1172void JIT::compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, RegisterID offset, FinalObjectMode finalObjectMode)
1173{
1174    ASSERT(sizeof(JSValue) == 8);
1175
1176    if (finalObjectMode == MayBeFinal) {
1177        Jump isInline = branch32(LessThan, offset, TrustedImm32(firstOutOfLineOffset));
1178        loadPtr(Address(base, JSObject::butterflyOffset()), base);
1179        neg32(offset);
1180        Jump done = jump();
1181        isInline.link(this);
1182        addPtr(TrustedImmPtr(JSObject::offsetOfInlineStorage() - (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)), base);
1183        done.link(this);
1184    } else {
1185#if !ASSERT_DISABLED
1186        Jump isOutOfLine = branch32(GreaterThanOrEqual, offset, TrustedImm32(firstOutOfLineOffset));
1187        breakpoint();
1188        isOutOfLine.link(this);
1189#endif
1190        loadPtr(Address(base, JSObject::butterflyOffset()), base);
1191        neg32(offset);
1192    }
1193    load32(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload) + (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)), resultPayload);
1194    load32(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag) + (firstOutOfLineOffset - 2) * sizeof(EncodedJSValue)), resultTag);
1195}
1196
1197void JIT::emit_op_get_by_pname(Instruction* currentInstruction)
1198{
1199    unsigned dst = currentInstruction[1].u.operand;
1200    unsigned base = currentInstruction[2].u.operand;
1201    unsigned property = currentInstruction[3].u.operand;
1202    unsigned expected = currentInstruction[4].u.operand;
1203    unsigned iter = currentInstruction[5].u.operand;
1204    unsigned i = currentInstruction[6].u.operand;
1205
1206    emitLoad2(property, regT1, regT0, base, regT3, regT2);
1207    emitJumpSlowCaseIfNotJSCell(property, regT1);
1208    addSlowCase(branchPtr(NotEqual, regT0, payloadFor(expected)));
1209    // Property registers are now available as the property is known
1210    emitJumpSlowCaseIfNotJSCell(base, regT3);
1211    emitLoadPayload(iter, regT1);
1212
1213    // Test base's structure
1214    loadPtr(Address(regT2, JSCell::structureOffset()), regT0);
1215    addSlowCase(branchPtr(NotEqual, regT0, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure))));
1216    load32(addressFor(i), regT3);
1217    sub32(TrustedImm32(1), regT3);
1218    addSlowCase(branch32(AboveOrEqual, regT3, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_numCacheableSlots))));
1219    Jump inlineProperty = branch32(Below, regT3, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructureInlineCapacity)));
1220    add32(TrustedImm32(firstOutOfLineOffset), regT3);
1221    sub32(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructureInlineCapacity)), regT3);
1222    inlineProperty.link(this);
1223    compileGetDirectOffset(regT2, regT1, regT0, regT3);
1224
1225    emitStore(dst, regT1, regT0);
1226    map(m_bytecodeOffset + OPCODE_LENGTH(op_get_by_pname), dst, regT1, regT0);
1227}
1228
1229void JIT::emitSlow_op_get_by_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1230{
1231    unsigned dst = currentInstruction[1].u.operand;
1232    unsigned base = currentInstruction[2].u.operand;
1233    unsigned property = currentInstruction[3].u.operand;
1234
1235    linkSlowCaseIfNotJSCell(iter, property);
1236    linkSlowCase(iter);
1237    linkSlowCaseIfNotJSCell(iter, base);
1238    linkSlowCase(iter);
1239    linkSlowCase(iter);
1240
1241    JITStubCall stubCall(this, cti_op_get_by_val_generic);
1242    stubCall.addArgument(base);
1243    stubCall.addArgument(property);
1244    stubCall.call(dst);
1245}
1246
1247void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
1248{
1249    int dst = currentInstruction[1].u.operand;
1250    int index = currentInstruction[2].u.operand;
1251    int skip = currentInstruction[3].u.operand;
1252
1253    emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain, regT2);
1254    bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
1255    ASSERT(skip || !checkTopLevel);
1256    if (checkTopLevel && skip--) {
1257        Jump activationNotCreated;
1258        if (checkTopLevel)
1259            activationNotCreated = branch32(Equal, tagFor(m_codeBlock->activationRegister()), TrustedImm32(JSValue::EmptyValueTag));
1260        loadPtr(Address(regT2, JSScope::offsetOfNext()), regT2);
1261        activationNotCreated.link(this);
1262    }
1263    while (skip--)
1264        loadPtr(Address(regT2, JSScope::offsetOfNext()), regT2);
1265
1266    loadPtr(Address(regT2, JSVariableObject::offsetOfRegisters()), regT2);
1267
1268    emitLoad(index, regT1, regT0, regT2);
1269    emitValueProfilingSite();
1270    emitStore(dst, regT1, regT0);
1271    map(m_bytecodeOffset + OPCODE_LENGTH(op_get_scoped_var), dst, regT1, regT0);
1272}
1273
1274void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
1275{
1276    int index = currentInstruction[1].u.operand;
1277    int skip = currentInstruction[2].u.operand;
1278    int value = currentInstruction[3].u.operand;
1279
1280    emitLoad(value, regT1, regT0);
1281
1282    emitGetFromCallFrameHeaderPtr(JSStack::ScopeChain, regT2);
1283    bool checkTopLevel = m_codeBlock->codeType() == FunctionCode && m_codeBlock->needsFullScopeChain();
1284    ASSERT(skip || !checkTopLevel);
1285    if (checkTopLevel && skip--) {
1286        Jump activationNotCreated;
1287        if (checkTopLevel)
1288            activationNotCreated = branch32(Equal, tagFor(m_codeBlock->activationRegister()), TrustedImm32(JSValue::EmptyValueTag));
1289        loadPtr(Address(regT2, JSScope::offsetOfNext()), regT2);
1290        activationNotCreated.link(this);
1291    }
1292    while (skip--)
1293        loadPtr(Address(regT2, JSScope::offsetOfNext()), regT2);
1294
1295    loadPtr(Address(regT2, JSVariableObject::offsetOfRegisters()), regT3);
1296    emitStore(index, regT1, regT0, regT3);
1297    emitWriteBarrier(regT2, regT1, regT0, regT1, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1298}
1299
1300void JIT::emit_op_init_global_const(Instruction* currentInstruction)
1301{
1302    WriteBarrier<Unknown>* registerPointer = currentInstruction[1].u.registerPointer;
1303    int value = currentInstruction[2].u.operand;
1304
1305    JSGlobalObject* globalObject = m_codeBlock->globalObject();
1306
1307    emitLoad(value, regT1, regT0);
1308
1309    if (Heap::isWriteBarrierEnabled()) {
1310        move(TrustedImmPtr(globalObject), regT2);
1311
1312        emitWriteBarrier(globalObject, regT1, regT3, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1313    }
1314
1315    store32(regT1, registerPointer->tagPointer());
1316    store32(regT0, registerPointer->payloadPointer());
1317    map(m_bytecodeOffset + OPCODE_LENGTH(op_init_global_const), value, regT1, regT0);
1318}
1319
1320void JIT::emit_op_init_global_const_check(Instruction* currentInstruction)
1321{
1322    WriteBarrier<Unknown>* registerPointer = currentInstruction[1].u.registerPointer;
1323    int value = currentInstruction[2].u.operand;
1324
1325    JSGlobalObject* globalObject = m_codeBlock->globalObject();
1326
1327    emitLoad(value, regT1, regT0);
1328
1329    addSlowCase(branchTest8(NonZero, AbsoluteAddress(currentInstruction[3].u.predicatePointer)));
1330
1331    if (Heap::isWriteBarrierEnabled()) {
1332        move(TrustedImmPtr(globalObject), regT2);
1333        emitWriteBarrier(globalObject, regT1, regT3, ShouldFilterImmediates, WriteBarrierForVariableAccess);
1334    }
1335
1336    store32(regT1, registerPointer->tagPointer());
1337    store32(regT0, registerPointer->payloadPointer());
1338    unmap();
1339}
1340
1341void JIT::emitSlow_op_init_global_const_check(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
1342{
1343    linkSlowCase(iter);
1344
1345    JITStubCall stubCall(this, cti_op_init_global_const_check);
1346    stubCall.addArgument(regT1, regT0);
1347    stubCall.addArgument(TrustedImm32(currentInstruction[4].u.operand));
1348    stubCall.call();
1349}
1350
1351void JIT::resetPatchGetById(RepatchBuffer& repatchBuffer, StructureStubInfo* stubInfo)
1352{
1353    repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_get_by_id);
1354    repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.get.structureToCompare), reinterpret_cast<void*>(unusedPointer));
1355    repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel1), 0);
1356    repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelCompactAtOffset(stubInfo->patch.baseline.u.get.displacementLabel2), 0);
1357    repatchBuffer.relink(stubInfo->hotPathBegin.jumpAtOffset(stubInfo->patch.baseline.u.get.structureCheck), stubInfo->callReturnLocation.labelAtOffset(-stubInfo->patch.baseline.u.get.coldPathBegin));
1358}
1359
1360void JIT::resetPatchPutById(RepatchBuffer& repatchBuffer, StructureStubInfo* stubInfo)
1361{
1362    if (isDirectPutById(stubInfo))
1363        repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_put_by_id_direct);
1364    else
1365        repatchBuffer.relink(stubInfo->callReturnLocation, cti_op_put_by_id);
1366    repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(stubInfo->patch.baseline.u.put.structureToCompare), reinterpret_cast<void*>(unusedPointer));
1367    repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(stubInfo->patch.baseline.u.put.displacementLabel1), 0);
1368    repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(stubInfo->patch.baseline.u.put.displacementLabel2), 0);
1369}
1370
1371} // namespace JSC
1372
1373#endif // USE(JSVALUE32_64)
1374#endif // ENABLE(JIT)
1375