1/* 2 * Copyright (C) 2012, 2013 Apple Inc. All rights reserved. 3 * 4 * Redistribution and use in source and binary forms, with or without 5 * modification, are permitted provided that the following conditions 6 * are met: 7 * 1. Redistributions of source code must retain the above copyright 8 * notice, this list of conditions and the following disclaimer. 9 * 2. Redistributions in binary form must reproduce the above copyright 10 * notice, this list of conditions and the following disclaimer in the 11 * documentation and/or other materials provided with the distribution. 12 * 13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY 14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR 17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY 21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 24 */ 25 26#include "config.h" 27#include "DFGVariableEventStream.h" 28 29#if ENABLE(DFG_JIT) 30 31#include "CodeBlock.h" 32#include "DFGValueSource.h" 33#include "Operations.h" 34#include <wtf/DataLog.h> 35#include <wtf/HashMap.h> 36 37namespace JSC { namespace DFG { 38 39void VariableEventStream::logEvent(const VariableEvent& event) 40{ 41 dataLogF("seq#%u:", static_cast<unsigned>(size())); 42 event.dump(WTF::dataFile()); 43 dataLogF(" "); 44} 45 46namespace { 47 48struct MinifiedGenerationInfo { 49 bool filled; // true -> in gpr/fpr/pair, false -> spilled 50 VariableRepresentation u; 51 DataFormat format; 52 53 MinifiedGenerationInfo() 54 : format(DataFormatNone) 55 { 56 } 57 58 void update(const VariableEvent& event) 59 { 60 switch (event.kind()) { 61 case BirthToFill: 62 case Fill: 63 filled = true; 64 break; 65 case BirthToSpill: 66 case Spill: 67 filled = false; 68 break; 69 case Death: 70 format = DataFormatNone; 71 return; 72 default: 73 return; 74 } 75 76 u = event.variableRepresentation(); 77 format = event.dataFormat(); 78 } 79}; 80 81} // namespace 82 83bool VariableEventStream::tryToSetConstantRecovery(ValueRecovery& recovery, CodeBlock* codeBlock, MinifiedNode* node) const 84{ 85 if (!node) 86 return false; 87 88 if (node->hasConstantNumber()) { 89 recovery = ValueRecovery::constant( 90 codeBlock->constantRegister( 91 FirstConstantRegisterIndex + node->constantNumber()).get()); 92 return true; 93 } 94 95 if (node->hasWeakConstant()) { 96 recovery = ValueRecovery::constant(node->weakConstant()); 97 return true; 98 } 99 100 if (node->op() == PhantomArguments) { 101 recovery = ValueRecovery::argumentsThatWereNotCreated(); 102 return true; 103 } 104 105 return false; 106} 107 108void VariableEventStream::reconstruct( 109 CodeBlock* codeBlock, CodeOrigin codeOrigin, MinifiedGraph& graph, 110 unsigned index, Operands<ValueRecovery>& valueRecoveries) const 111{ 112 ASSERT(codeBlock->getJITType() == JITCode::DFGJIT); 113 CodeBlock* baselineCodeBlock = codeBlock->baselineVersion(); 114 115 unsigned numVariables; 116 if (codeOrigin.inlineCallFrame) 117 numVariables = baselineCodeBlockForInlineCallFrame(codeOrigin.inlineCallFrame)->m_numCalleeRegisters + codeOrigin.inlineCallFrame->stackOffset; 118 else 119 numVariables = baselineCodeBlock->m_numCalleeRegisters; 120 121 // Crazy special case: if we're at index == 0 then this must be an argument check 122 // failure, in which case all variables are already set up. The recoveries should 123 // reflect this. 124 if (!index) { 125 valueRecoveries = Operands<ValueRecovery>(codeBlock->numParameters(), numVariables); 126 for (size_t i = 0; i < valueRecoveries.size(); ++i) 127 valueRecoveries[i] = ValueRecovery::alreadyInJSStack(); 128 return; 129 } 130 131 // Step 1: Find the last checkpoint, and figure out the number of virtual registers as we go. 132 unsigned startIndex = index - 1; 133 while (at(startIndex).kind() != Reset) 134 startIndex--; 135 136#if DFG_ENABLE(DEBUG_VERBOSE) 137 dataLogF("Computing OSR exit recoveries starting at seq#%u.\n", startIndex); 138#endif 139 140 // Step 2: Create a mock-up of the DFG's state and execute the events. 141 Operands<ValueSource> operandSources(codeBlock->numParameters(), numVariables); 142 HashMap<MinifiedID, MinifiedGenerationInfo> generationInfos; 143 for (unsigned i = startIndex; i < index; ++i) { 144 const VariableEvent& event = at(i); 145 switch (event.kind()) { 146 case Reset: 147 // nothing to do. 148 break; 149 case BirthToFill: 150 case BirthToSpill: { 151 MinifiedGenerationInfo info; 152 info.update(event); 153 generationInfos.add(event.id(), info); 154 break; 155 } 156 case Fill: 157 case Spill: 158 case Death: { 159 HashMap<MinifiedID, MinifiedGenerationInfo>::iterator iter = generationInfos.find(event.id()); 160 ASSERT(iter != generationInfos.end()); 161 iter->value.update(event); 162 break; 163 } 164 case MovHintEvent: 165 if (operandSources.hasOperand(event.operand())) 166 operandSources.setOperand(event.operand(), ValueSource(event.id())); 167 break; 168 case SetLocalEvent: 169 if (operandSources.hasOperand(event.operand())) 170 operandSources.setOperand(event.operand(), ValueSource::forDataFormat(event.dataFormat())); 171 break; 172 default: 173 RELEASE_ASSERT_NOT_REACHED(); 174 break; 175 } 176 } 177 178 // Step 3: Compute value recoveries! 179 valueRecoveries = Operands<ValueRecovery>(codeBlock->numParameters(), numVariables); 180 for (unsigned i = 0; i < operandSources.size(); ++i) { 181 ValueSource& source = operandSources[i]; 182 if (source.isTriviallyRecoverable()) { 183 valueRecoveries[i] = source.valueRecovery(); 184 continue; 185 } 186 187 ASSERT(source.kind() == HaveNode); 188 MinifiedNode* node = graph.at(source.id()); 189 if (tryToSetConstantRecovery(valueRecoveries[i], codeBlock, node)) 190 continue; 191 192 MinifiedGenerationInfo info = generationInfos.get(source.id()); 193 if (info.format == DataFormatNone) { 194 // Try to see if there is an alternate node that would contain the value we want. 195 // There are four possibilities: 196 // 197 // Int32ToDouble: We can use this in place of the original node, but 198 // we'd rather not; so we use it only if it is the only remaining 199 // live version. 200 // 201 // ValueToInt32: If the only remaining live version of the value is 202 // ValueToInt32, then we can use it. 203 // 204 // UInt32ToNumber: If the only live version of the value is a UInt32ToNumber 205 // then the only remaining uses are ones that want a properly formed number 206 // rather than a UInt32 intermediate. 207 // 208 // DoubleAsInt32: Same as UInt32ToNumber. 209 // 210 // The reverse of the above: This node could be a UInt32ToNumber, but its 211 // alternative is still alive. This means that the only remaining uses of 212 // the number would be fine with a UInt32 intermediate. 213 214 bool found = false; 215 216 if (node && node->op() == UInt32ToNumber) { 217 MinifiedID id = node->child1(); 218 if (tryToSetConstantRecovery(valueRecoveries[i], codeBlock, graph.at(id))) 219 continue; 220 info = generationInfos.get(id); 221 if (info.format != DataFormatNone) 222 found = true; 223 } 224 225 if (!found) { 226 MinifiedID int32ToDoubleID; 227 MinifiedID valueToInt32ID; 228 MinifiedID uint32ToNumberID; 229 MinifiedID doubleAsInt32ID; 230 231 HashMap<MinifiedID, MinifiedGenerationInfo>::iterator iter = generationInfos.begin(); 232 HashMap<MinifiedID, MinifiedGenerationInfo>::iterator end = generationInfos.end(); 233 for (; iter != end; ++iter) { 234 MinifiedID id = iter->key; 235 node = graph.at(id); 236 if (!node) 237 continue; 238 if (!node->hasChild1()) 239 continue; 240 if (node->child1() != source.id()) 241 continue; 242 if (iter->value.format == DataFormatNone) 243 continue; 244 switch (node->op()) { 245 case Int32ToDouble: 246 case ForwardInt32ToDouble: 247 int32ToDoubleID = id; 248 break; 249 case ValueToInt32: 250 valueToInt32ID = id; 251 break; 252 case UInt32ToNumber: 253 uint32ToNumberID = id; 254 break; 255 case DoubleAsInt32: 256 doubleAsInt32ID = id; 257 break; 258 default: 259 break; 260 } 261 } 262 263 MinifiedID idToUse; 264 if (!!doubleAsInt32ID) 265 idToUse = doubleAsInt32ID; 266 else if (!!int32ToDoubleID) 267 idToUse = int32ToDoubleID; 268 else if (!!valueToInt32ID) 269 idToUse = valueToInt32ID; 270 else if (!!uint32ToNumberID) 271 idToUse = uint32ToNumberID; 272 273 if (!!idToUse) { 274 info = generationInfos.get(idToUse); 275 ASSERT(info.format != DataFormatNone); 276 found = true; 277 } 278 } 279 280 if (!found) { 281 valueRecoveries[i] = ValueRecovery::constant(jsUndefined()); 282 continue; 283 } 284 } 285 286 ASSERT(info.format != DataFormatNone); 287 288 if (info.filled) { 289 if (info.format == DataFormatDouble) { 290 valueRecoveries[i] = ValueRecovery::inFPR(info.u.fpr); 291 continue; 292 } 293#if USE(JSVALUE32_64) 294 if (info.format & DataFormatJS) { 295 valueRecoveries[i] = ValueRecovery::inPair(info.u.pair.tagGPR, info.u.pair.payloadGPR); 296 continue; 297 } 298#endif 299 valueRecoveries[i] = ValueRecovery::inGPR(info.u.gpr, info.format); 300 continue; 301 } 302 303 valueRecoveries[i] = 304 ValueRecovery::displacedInJSStack(static_cast<VirtualRegister>(info.u.virtualReg), info.format); 305 } 306 307 // Step 4: Make sure that for locals that coincide with true call frame headers, the exit compiler knows 308 // that those values don't have to be recovered. Signal this by using ValueRecovery::alreadyInJSStack() 309 for (InlineCallFrame* inlineCallFrame = codeOrigin.inlineCallFrame; inlineCallFrame; inlineCallFrame = inlineCallFrame->caller.inlineCallFrame) { 310 for (unsigned i = JSStack::CallFrameHeaderSize; i--;) 311 valueRecoveries.setLocal(inlineCallFrame->stackOffset - i - 1, ValueRecovery::alreadyInJSStack()); 312 } 313} 314 315} } // namespace JSC::DFG 316 317#endif // ENABLE(DFG_JIT) 318 319