1/*
2 * Copyright (C) 2012, 2013, 2014 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 *    notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 *    notice, this list of conditions and the following disclaimer in the
11 *    documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "GetByIdStatus.h"
28
29#include "CodeBlock.h"
30#include "JSCInlines.h"
31#include "JSScope.h"
32#include "LLIntData.h"
33#include "LowLevelInterpreter.h"
34#include "PolymorphicGetByIdList.h"
35#include <wtf/ListDump.h>
36
37namespace JSC {
38
39bool GetByIdStatus::appendVariant(const GetByIdVariant& variant)
40{
41    for (unsigned i = 0; i < m_variants.size(); ++i) {
42        if (m_variants[i].structureSet().overlaps(variant.structureSet()))
43            return false;
44    }
45    m_variants.append(variant);
46    return true;
47}
48
49#if ENABLE(DFG_JIT)
50bool GetByIdStatus::hasExitSite(const ConcurrentJITLocker& locker, CodeBlock* profiledBlock, unsigned bytecodeIndex, ExitingJITType jitType)
51{
52    return profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadCache, jitType))
53        || profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadCacheWatchpoint, jitType))
54        || profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadWeakConstantCache, jitType))
55        || profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadWeakConstantCacheWatchpoint, jitType));
56}
57#endif
58
59GetByIdStatus GetByIdStatus::computeFromLLInt(CodeBlock* profiledBlock, unsigned bytecodeIndex, StringImpl* uid)
60{
61    UNUSED_PARAM(profiledBlock);
62    UNUSED_PARAM(bytecodeIndex);
63    UNUSED_PARAM(uid);
64    Instruction* instruction = profiledBlock->instructions().begin() + bytecodeIndex;
65
66    if (instruction[0].u.opcode == LLInt::getOpcode(op_get_array_length))
67        return GetByIdStatus(NoInformation, false);
68
69    Structure* structure = instruction[4].u.structure.get();
70    if (!structure)
71        return GetByIdStatus(NoInformation, false);
72
73    if (structure->takesSlowPathInDFGForImpureProperty())
74        return GetByIdStatus(NoInformation, false);
75
76    unsigned attributesIgnored;
77    JSCell* specificValue;
78    PropertyOffset offset = structure->getConcurrently(
79        *profiledBlock->vm(), uid, attributesIgnored, specificValue);
80    if (structure->isDictionary())
81        specificValue = 0;
82    if (!isValidOffset(offset))
83        return GetByIdStatus(NoInformation, false);
84
85    return GetByIdStatus(Simple, false, GetByIdVariant(StructureSet(structure), offset, specificValue));
86}
87
88bool GetByIdStatus::computeForChain(CodeBlock* profiledBlock, StringImpl* uid, PassRefPtr<IntendedStructureChain> passedChain)
89{
90#if ENABLE(JIT)
91    RefPtr<IntendedStructureChain> chain = passedChain;
92
93    // Validate the chain. If the chain is invalid, then currently the best thing
94    // we can do is to assume that TakesSlow is true. In the future, it might be
95    // worth exploring reifying the structure chain from the structure we've got
96    // instead of using the one from the cache, since that will do the right things
97    // if the structure chain has changed. But that may be harder, because we may
98    // then end up having a different type of access altogether. And it currently
99    // does not appear to be worth it to do so -- effectively, the heuristic we
100    // have now is that if the structure chain has changed between when it was
101    // cached on in the baseline JIT and when the DFG tried to inline the access,
102    // then we fall back on a polymorphic access.
103    if (!chain->isStillValid())
104        return false;
105
106    if (chain->head()->takesSlowPathInDFGForImpureProperty())
107        return false;
108    size_t chainSize = chain->size();
109    for (size_t i = 0; i < chainSize; i++) {
110        if (chain->at(i)->takesSlowPathInDFGForImpureProperty())
111            return false;
112    }
113
114    JSObject* currentObject = chain->terminalPrototype();
115    Structure* currentStructure = chain->last();
116
117    ASSERT_UNUSED(currentObject, currentObject);
118
119    unsigned attributesIgnored;
120    JSCell* specificValue;
121
122    PropertyOffset offset = currentStructure->getConcurrently(
123        *profiledBlock->vm(), uid, attributesIgnored, specificValue);
124    if (currentStructure->isDictionary())
125        specificValue = 0;
126    if (!isValidOffset(offset))
127        return false;
128
129    return appendVariant(GetByIdVariant(StructureSet(chain->head()), offset, specificValue, chain));
130#else // ENABLE(JIT)
131    UNUSED_PARAM(profiledBlock);
132    UNUSED_PARAM(uid);
133    UNUSED_PARAM(passedChain);
134    UNREACHABLE_FOR_PLATFORM();
135    return false;
136#endif // ENABLE(JIT)
137}
138
139GetByIdStatus GetByIdStatus::computeFor(CodeBlock* profiledBlock, StubInfoMap& map, unsigned bytecodeIndex, StringImpl* uid)
140{
141    ConcurrentJITLocker locker(profiledBlock->m_lock);
142
143    GetByIdStatus result;
144
145#if ENABLE(DFG_JIT)
146    result = computeForStubInfo(
147        locker, profiledBlock, map.get(CodeOrigin(bytecodeIndex)), uid);
148
149    if (!result.takesSlowPath()
150        && (hasExitSite(locker, profiledBlock, bytecodeIndex)
151            || profiledBlock->likelyToTakeSlowCase(bytecodeIndex)))
152        return GetByIdStatus(TakesSlowPath, true);
153#else
154    UNUSED_PARAM(map);
155#endif
156
157    if (!result)
158        return computeFromLLInt(profiledBlock, bytecodeIndex, uid);
159
160    return result;
161}
162
163#if ENABLE(JIT)
164GetByIdStatus GetByIdStatus::computeForStubInfo(
165    const ConcurrentJITLocker&, CodeBlock* profiledBlock, StructureStubInfo* stubInfo,
166    StringImpl* uid)
167{
168    if (!stubInfo || !stubInfo->seen)
169        return GetByIdStatus(NoInformation);
170
171    if (stubInfo->resetByGC)
172        return GetByIdStatus(TakesSlowPath, true);
173
174    PolymorphicGetByIdList* list = 0;
175    if (stubInfo->accessType == access_get_by_id_list) {
176        list = stubInfo->u.getByIdList.list;
177        bool makesCalls = false;
178        bool isWatched = false;
179        for (unsigned i = 0; i < list->size(); ++i) {
180            const GetByIdAccess& access = list->at(i);
181            if (access.doesCalls()) {
182                makesCalls = true;
183                break;
184            }
185            if (access.isWatched()) {
186                isWatched = true;
187                continue;
188            }
189        }
190        if (makesCalls)
191            return GetByIdStatus(MakesCalls, true);
192        if (isWatched)
193            return GetByIdStatus(TakesSlowPath, true);
194    }
195
196    // Finally figure out if we can derive an access strategy.
197    GetByIdStatus result;
198    result.m_state = Simple;
199    result.m_wasSeenInJIT = true; // This is interesting for bytecode dumping only.
200    switch (stubInfo->accessType) {
201    case access_unset:
202        return GetByIdStatus(NoInformation);
203
204    case access_get_by_id_self: {
205        Structure* structure = stubInfo->u.getByIdSelf.baseObjectStructure.get();
206        if (structure->takesSlowPathInDFGForImpureProperty())
207            return GetByIdStatus(TakesSlowPath, true);
208        unsigned attributesIgnored;
209        JSCell* specificValue;
210        GetByIdVariant variant;
211        variant.m_offset = structure->getConcurrently(
212            *profiledBlock->vm(), uid, attributesIgnored, specificValue);
213        if (!isValidOffset(variant.m_offset))
214            return GetByIdStatus(TakesSlowPath, true);
215
216        if (structure->isDictionary())
217            specificValue = 0;
218
219        variant.m_structureSet.add(structure);
220        variant.m_specificValue = JSValue(specificValue);
221        result.appendVariant(variant);
222        return result;
223    }
224
225    case access_get_by_id_list: {
226        for (unsigned listIndex = 0; listIndex < list->size(); ++listIndex) {
227            ASSERT(list->at(listIndex).isSimple());
228
229            Structure* structure = list->at(listIndex).structure();
230
231            // FIXME: We should assert that we never see a structure that
232            // hasImpureGetOwnPropertySlot() but for which we don't
233            // newImpurePropertyFiresWatchpoints(). We're not at a point where we can do
234            // that, yet.
235            // https://bugs.webkit.org/show_bug.cgi?id=131810
236
237            if (structure->takesSlowPathInDFGForImpureProperty())
238                return GetByIdStatus(TakesSlowPath, true);
239
240            if (list->at(listIndex).chain()) {
241                RefPtr<IntendedStructureChain> chain = adoptRef(new IntendedStructureChain(
242                    profiledBlock, structure, list->at(listIndex).chain(),
243                    list->at(listIndex).chainCount()));
244                if (!result.computeForChain(profiledBlock, uid, chain))
245                    return GetByIdStatus(TakesSlowPath, true);
246                continue;
247            }
248
249            unsigned attributesIgnored;
250            JSCell* specificValue;
251            PropertyOffset myOffset = structure->getConcurrently(
252                *profiledBlock->vm(), uid, attributesIgnored, specificValue);
253            if (structure->isDictionary())
254                specificValue = 0;
255
256            if (!isValidOffset(myOffset))
257                return GetByIdStatus(TakesSlowPath, true);
258
259            bool found = false;
260            for (unsigned variantIndex = 0; variantIndex < result.m_variants.size(); ++variantIndex) {
261                GetByIdVariant& variant = result.m_variants[variantIndex];
262                if (variant.m_chain)
263                    continue;
264
265                if (variant.m_offset != myOffset)
266                    continue;
267
268                found = true;
269                if (variant.m_structureSet.contains(structure))
270                    break;
271
272                if (variant.m_specificValue != JSValue(specificValue))
273                    variant.m_specificValue = JSValue();
274
275                variant.m_structureSet.add(structure);
276                break;
277            }
278
279            if (found)
280                continue;
281
282            if (!result.appendVariant(GetByIdVariant(StructureSet(structure), myOffset, specificValue)))
283                return GetByIdStatus(TakesSlowPath, true);
284        }
285
286        return result;
287    }
288
289    case access_get_by_id_chain: {
290        if (!stubInfo->u.getByIdChain.isDirect)
291            return GetByIdStatus(MakesCalls, true);
292        RefPtr<IntendedStructureChain> chain = adoptRef(new IntendedStructureChain(
293            profiledBlock,
294            stubInfo->u.getByIdChain.baseObjectStructure.get(),
295            stubInfo->u.getByIdChain.chain.get(),
296            stubInfo->u.getByIdChain.count));
297        if (result.computeForChain(profiledBlock, uid, chain))
298            return result;
299        return GetByIdStatus(TakesSlowPath, true);
300    }
301
302    default:
303        return GetByIdStatus(TakesSlowPath, true);
304    }
305
306    RELEASE_ASSERT_NOT_REACHED();
307    return GetByIdStatus();
308}
309#endif // ENABLE(JIT)
310
311GetByIdStatus GetByIdStatus::computeFor(
312    CodeBlock* profiledBlock, CodeBlock* dfgBlock, StubInfoMap& baselineMap,
313    StubInfoMap& dfgMap, CodeOrigin codeOrigin, StringImpl* uid)
314{
315#if ENABLE(DFG_JIT)
316    if (dfgBlock) {
317        GetByIdStatus result;
318        {
319            ConcurrentJITLocker locker(dfgBlock->m_lock);
320            result = computeForStubInfo(locker, dfgBlock, dfgMap.get(codeOrigin), uid);
321        }
322
323        if (result.takesSlowPath())
324            return result;
325
326        {
327            ConcurrentJITLocker locker(profiledBlock->m_lock);
328            if (hasExitSite(locker, profiledBlock, codeOrigin.bytecodeIndex, ExitFromFTL))
329                return GetByIdStatus(TakesSlowPath, true);
330        }
331
332        if (result.isSet())
333            return result;
334    }
335#else
336    UNUSED_PARAM(dfgBlock);
337    UNUSED_PARAM(dfgMap);
338#endif
339
340    return computeFor(profiledBlock, baselineMap, codeOrigin.bytecodeIndex, uid);
341}
342
343GetByIdStatus GetByIdStatus::computeFor(VM& vm, Structure* structure, StringImpl* uid)
344{
345    // For now we only handle the super simple self access case. We could handle the
346    // prototype case in the future.
347
348    if (!structure)
349        return GetByIdStatus(TakesSlowPath);
350
351    if (toUInt32FromStringImpl(uid) != PropertyName::NotAnIndex)
352        return GetByIdStatus(TakesSlowPath);
353
354    if (structure->typeInfo().overridesGetOwnPropertySlot() && structure->typeInfo().type() != GlobalObjectType)
355        return GetByIdStatus(TakesSlowPath);
356
357    if (!structure->propertyAccessesAreCacheable())
358        return GetByIdStatus(TakesSlowPath);
359
360    unsigned attributes;
361    JSCell* specificValue;
362    PropertyOffset offset = structure->getConcurrently(vm, uid, attributes, specificValue);
363    if (!isValidOffset(offset))
364        return GetByIdStatus(TakesSlowPath); // It's probably a prototype lookup. Give up on life for now, even though we could totally be way smarter about it.
365    if (attributes & Accessor)
366        return GetByIdStatus(MakesCalls);
367    if (structure->isDictionary())
368        specificValue = 0;
369    return GetByIdStatus(
370        Simple, false, GetByIdVariant(StructureSet(structure), offset, specificValue));
371}
372
373void GetByIdStatus::dump(PrintStream& out) const
374{
375    out.print("(");
376    switch (m_state) {
377    case NoInformation:
378        out.print("NoInformation");
379        break;
380    case Simple:
381        out.print("Simple");
382        break;
383    case TakesSlowPath:
384        out.print("TakesSlowPath");
385        break;
386    case MakesCalls:
387        out.print("MakesCalls");
388        break;
389    }
390    out.print(", ", listDump(m_variants), ", seenInJIT = ", m_wasSeenInJIT, ")");
391}
392
393} // namespace JSC
394
395