1/* 2 * Copyright (C) 2011, 2012, 2013 Apple Inc. All rights reserved. 3 * 4 * Redistribution and use in source and binary forms, with or without 5 * modification, are permitted provided that the following conditions 6 * are met: 7 * 1. Redistributions of source code must retain the above copyright 8 * notice, this list of conditions and the following disclaimer. 9 * 2. Redistributions in binary form must reproduce the above copyright 10 * notice, this list of conditions and the following disclaimer in the 11 * documentation and/or other materials provided with the distribution. 12 * 13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY 14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR 17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY 21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 24 */ 25 26#include "config.h" 27#include "DFGSpeculativeJIT.h" 28 29#if ENABLE(DFG_JIT) 30 31#include "Arguments.h" 32#include "ArrayPrototype.h" 33#include "DFGCallArrayAllocatorSlowPathGenerator.h" 34#include "DFGSlowPathGenerator.h" 35#include "JSCJSValueInlines.h" 36#include "ObjectPrototype.h" 37 38namespace JSC { namespace DFG { 39 40#if USE(JSVALUE64) 41 42GPRReg SpeculativeJIT::fillInteger(Edge edge, DataFormat& returnFormat) 43{ 44 ASSERT(!needsTypeCheck(edge, SpecInt32)); 45 46 VirtualRegister virtualRegister = edge->virtualRegister(); 47 GenerationInfo& info = m_generationInfo[virtualRegister]; 48 49 if (info.registerFormat() == DataFormatNone) { 50 GPRReg gpr = allocate(); 51 52 if (edge->hasConstant()) { 53 m_gprs.retain(gpr, virtualRegister, SpillOrderConstant); 54 if (isInt32Constant(edge.node())) { 55 m_jit.move(MacroAssembler::Imm32(valueOfInt32Constant(edge.node())), gpr); 56 info.fillInteger(*m_stream, gpr); 57 returnFormat = DataFormatInteger; 58 return gpr; 59 } 60 if (isNumberConstant(edge.node())) { 61 JSValue jsValue = jsNumber(valueOfNumberConstant(edge.node())); 62 m_jit.move(MacroAssembler::Imm64(JSValue::encode(jsValue)), gpr); 63 } else { 64 ASSERT(isJSConstant(edge.node())); 65 JSValue jsValue = valueOfJSConstant(edge.node()); 66 m_jit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsValue)), gpr); 67 } 68 } else if (info.spillFormat() == DataFormatInteger) { 69 m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled); 70 m_jit.load32(JITCompiler::payloadFor(virtualRegister), gpr); 71 // Tag it, since fillInteger() is used when we want a boxed integer. 72 m_jit.or64(GPRInfo::tagTypeNumberRegister, gpr); 73 } else { 74 RELEASE_ASSERT(info.spillFormat() == DataFormatJS || info.spillFormat() == DataFormatJSInteger); 75 m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled); 76 m_jit.load64(JITCompiler::addressFor(virtualRegister), gpr); 77 } 78 79 // Since we statically know that we're filling an integer, and values 80 // in the JSStack are boxed, this must be DataFormatJSInteger. 81 // We will check this with a jitAssert below. 82 info.fillJSValue(*m_stream, gpr, DataFormatJSInteger); 83 unlock(gpr); 84 } 85 86 switch (info.registerFormat()) { 87 case DataFormatNone: 88 // Should have filled, above. 89 case DataFormatJSDouble: 90 case DataFormatDouble: 91 case DataFormatJS: 92 case DataFormatCell: 93 case DataFormatJSCell: 94 case DataFormatBoolean: 95 case DataFormatJSBoolean: 96 case DataFormatStorage: 97 // Should only be calling this function if we know this operand to be integer. 98 RELEASE_ASSERT_NOT_REACHED(); 99 100 case DataFormatJSInteger: { 101 GPRReg gpr = info.gpr(); 102 m_gprs.lock(gpr); 103 m_jit.jitAssertIsJSInt32(gpr); 104 returnFormat = DataFormatJSInteger; 105 return gpr; 106 } 107 108 case DataFormatInteger: { 109 GPRReg gpr = info.gpr(); 110 m_gprs.lock(gpr); 111 m_jit.jitAssertIsInt32(gpr); 112 returnFormat = DataFormatInteger; 113 return gpr; 114 } 115 116 default: 117 RELEASE_ASSERT_NOT_REACHED(); 118 return InvalidGPRReg; 119 } 120} 121 122GPRReg SpeculativeJIT::fillJSValue(Edge edge) 123{ 124 VirtualRegister virtualRegister = edge->virtualRegister(); 125 GenerationInfo& info = m_generationInfo[virtualRegister]; 126 127 switch (info.registerFormat()) { 128 case DataFormatNone: { 129 GPRReg gpr = allocate(); 130 131 if (edge->hasConstant()) { 132 if (isInt32Constant(edge.node())) { 133 info.fillJSValue(*m_stream, gpr, DataFormatJSInteger); 134 JSValue jsValue = jsNumber(valueOfInt32Constant(edge.node())); 135 m_jit.move(MacroAssembler::Imm64(JSValue::encode(jsValue)), gpr); 136 } else if (isNumberConstant(edge.node())) { 137 info.fillJSValue(*m_stream, gpr, DataFormatJSDouble); 138 JSValue jsValue(JSValue::EncodeAsDouble, valueOfNumberConstant(edge.node())); 139 m_jit.move(MacroAssembler::Imm64(JSValue::encode(jsValue)), gpr); 140 } else { 141 ASSERT(isJSConstant(edge.node())); 142 JSValue jsValue = valueOfJSConstant(edge.node()); 143 m_jit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsValue)), gpr); 144 info.fillJSValue(*m_stream, gpr, DataFormatJS); 145 } 146 147 m_gprs.retain(gpr, virtualRegister, SpillOrderConstant); 148 } else { 149 DataFormat spillFormat = info.spillFormat(); 150 m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled); 151 if (spillFormat == DataFormatInteger) { 152 m_jit.load32(JITCompiler::addressFor(virtualRegister), gpr); 153 m_jit.or64(GPRInfo::tagTypeNumberRegister, gpr); 154 spillFormat = DataFormatJSInteger; 155 } else { 156 m_jit.load64(JITCompiler::addressFor(virtualRegister), gpr); 157 if (spillFormat == DataFormatDouble) { 158 // Need to box the double, since we want a JSValue. 159 m_jit.sub64(GPRInfo::tagTypeNumberRegister, gpr); 160 spillFormat = DataFormatJSDouble; 161 } else 162 RELEASE_ASSERT(spillFormat & DataFormatJS); 163 } 164 info.fillJSValue(*m_stream, gpr, spillFormat); 165 } 166 return gpr; 167 } 168 169 case DataFormatInteger: { 170 GPRReg gpr = info.gpr(); 171 // If the register has already been locked we need to take a copy. 172 // If not, we'll zero extend in place, so mark on the info that this is now type DataFormatInteger, not DataFormatJSInteger. 173 if (m_gprs.isLocked(gpr)) { 174 GPRReg result = allocate(); 175 m_jit.or64(GPRInfo::tagTypeNumberRegister, gpr, result); 176 return result; 177 } 178 m_gprs.lock(gpr); 179 m_jit.or64(GPRInfo::tagTypeNumberRegister, gpr); 180 info.fillJSValue(*m_stream, gpr, DataFormatJSInteger); 181 return gpr; 182 } 183 184 case DataFormatDouble: { 185 FPRReg fpr = info.fpr(); 186 GPRReg gpr = boxDouble(fpr); 187 188 // Update all info 189 info.fillJSValue(*m_stream, gpr, DataFormatJSDouble); 190 m_fprs.release(fpr); 191 m_gprs.retain(gpr, virtualRegister, SpillOrderJS); 192 193 return gpr; 194 } 195 196 case DataFormatCell: 197 // No retag required on JSVALUE64! 198 case DataFormatJS: 199 case DataFormatJSInteger: 200 case DataFormatJSDouble: 201 case DataFormatJSCell: 202 case DataFormatJSBoolean: { 203 GPRReg gpr = info.gpr(); 204 m_gprs.lock(gpr); 205 return gpr; 206 } 207 208 case DataFormatBoolean: 209 case DataFormatStorage: 210 // this type currently never occurs 211 RELEASE_ASSERT_NOT_REACHED(); 212 213 default: 214 RELEASE_ASSERT_NOT_REACHED(); 215 return InvalidGPRReg; 216 } 217} 218 219void SpeculativeJIT::nonSpeculativeUInt32ToNumber(Node* node) 220{ 221 IntegerOperand op1(this, node->child1()); 222 FPRTemporary boxer(this); 223 GPRTemporary result(this, op1); 224 225 JITCompiler::Jump positive = m_jit.branch32(MacroAssembler::GreaterThanOrEqual, op1.gpr(), TrustedImm32(0)); 226 227 m_jit.convertInt32ToDouble(op1.gpr(), boxer.fpr()); 228 m_jit.addDouble(JITCompiler::AbsoluteAddress(&AssemblyHelpers::twoToThe32), boxer.fpr()); 229 230 boxDouble(boxer.fpr(), result.gpr()); 231 232 JITCompiler::Jump done = m_jit.jump(); 233 234 positive.link(&m_jit); 235 236 m_jit.or64(GPRInfo::tagTypeNumberRegister, op1.gpr(), result.gpr()); 237 238 done.link(&m_jit); 239 240 jsValueResult(result.gpr(), m_currentNode); 241} 242 243void SpeculativeJIT::cachedGetById(CodeOrigin codeOrigin, GPRReg baseGPR, GPRReg resultGPR, unsigned identifierNumber, JITCompiler::Jump slowPathTarget, SpillRegistersMode spillMode) 244{ 245 JITCompiler::DataLabelPtr structureToCompare; 246 JITCompiler::PatchableJump structureCheck = m_jit.patchableBranchPtrWithPatch(JITCompiler::NotEqual, JITCompiler::Address(baseGPR, JSCell::structureOffset()), structureToCompare, JITCompiler::TrustedImmPtr(reinterpret_cast<void*>(unusedPointer))); 247 248 JITCompiler::ConvertibleLoadLabel propertyStorageLoad = 249 m_jit.convertibleLoadPtr(JITCompiler::Address(baseGPR, JSObject::butterflyOffset()), resultGPR); 250 JITCompiler::DataLabelCompact loadWithPatch = m_jit.load64WithCompactAddressOffsetPatch(JITCompiler::Address(resultGPR, 0), resultGPR); 251 252 JITCompiler::Label doneLabel = m_jit.label(); 253 254 OwnPtr<SlowPathGenerator> slowPath; 255 if (!slowPathTarget.isSet()) { 256 slowPath = slowPathCall( 257 structureCheck.m_jump, this, operationGetByIdOptimize, resultGPR, baseGPR, 258 identifier(identifierNumber), spillMode); 259 } else { 260 JITCompiler::JumpList slowCases; 261 slowCases.append(structureCheck.m_jump); 262 slowCases.append(slowPathTarget); 263 slowPath = slowPathCall( 264 slowCases, this, operationGetByIdOptimize, resultGPR, baseGPR, 265 identifier(identifierNumber), spillMode); 266 } 267 m_jit.addPropertyAccess( 268 PropertyAccessRecord( 269 codeOrigin, structureToCompare, structureCheck, propertyStorageLoad, loadWithPatch, 270 slowPath.get(), doneLabel, safeCast<int8_t>(baseGPR), safeCast<int8_t>(resultGPR), 271 usedRegisters(), 272 spillMode == NeedToSpill ? PropertyAccessRecord::RegistersInUse : PropertyAccessRecord::RegistersFlushed)); 273 addSlowPathGenerator(slowPath.release()); 274} 275 276void SpeculativeJIT::cachedPutById(CodeOrigin codeOrigin, GPRReg baseGPR, GPRReg valueGPR, Edge valueUse, GPRReg scratchGPR, unsigned identifierNumber, PutKind putKind, JITCompiler::Jump slowPathTarget) 277{ 278 279 JITCompiler::DataLabelPtr structureToCompare; 280 JITCompiler::PatchableJump structureCheck = m_jit.patchableBranchPtrWithPatch(JITCompiler::NotEqual, JITCompiler::Address(baseGPR, JSCell::structureOffset()), structureToCompare, JITCompiler::TrustedImmPtr(reinterpret_cast<void*>(unusedPointer))); 281 282 writeBarrier(baseGPR, valueGPR, valueUse, WriteBarrierForPropertyAccess, scratchGPR); 283 284 JITCompiler::ConvertibleLoadLabel propertyStorageLoad = 285 m_jit.convertibleLoadPtr(JITCompiler::Address(baseGPR, JSObject::butterflyOffset()), scratchGPR); 286 JITCompiler::DataLabel32 storeWithPatch = m_jit.store64WithAddressOffsetPatch(valueGPR, JITCompiler::Address(scratchGPR, 0)); 287 288 JITCompiler::Label doneLabel = m_jit.label(); 289 290 V_DFGOperation_EJCI optimizedCall; 291 if (m_jit.strictModeFor(m_currentNode->codeOrigin)) { 292 if (putKind == Direct) 293 optimizedCall = operationPutByIdDirectStrictOptimize; 294 else 295 optimizedCall = operationPutByIdStrictOptimize; 296 } else { 297 if (putKind == Direct) 298 optimizedCall = operationPutByIdDirectNonStrictOptimize; 299 else 300 optimizedCall = operationPutByIdNonStrictOptimize; 301 } 302 OwnPtr<SlowPathGenerator> slowPath; 303 if (!slowPathTarget.isSet()) { 304 slowPath = slowPathCall( 305 structureCheck.m_jump, this, optimizedCall, NoResult, valueGPR, baseGPR, 306 identifier(identifierNumber)); 307 } else { 308 JITCompiler::JumpList slowCases; 309 slowCases.append(structureCheck.m_jump); 310 slowCases.append(slowPathTarget); 311 slowPath = slowPathCall( 312 slowCases, this, optimizedCall, NoResult, valueGPR, baseGPR, 313 identifier(identifierNumber)); 314 } 315 RegisterSet currentlyUsedRegisters = usedRegisters(); 316 currentlyUsedRegisters.clear(scratchGPR); 317 ASSERT(currentlyUsedRegisters.get(baseGPR)); 318 ASSERT(currentlyUsedRegisters.get(valueGPR)); 319 m_jit.addPropertyAccess( 320 PropertyAccessRecord( 321 codeOrigin, structureToCompare, structureCheck, propertyStorageLoad, 322 JITCompiler::DataLabelCompact(storeWithPatch.label()), slowPath.get(), doneLabel, 323 safeCast<int8_t>(baseGPR), safeCast<int8_t>(valueGPR), currentlyUsedRegisters)); 324 addSlowPathGenerator(slowPath.release()); 325} 326 327void SpeculativeJIT::nonSpeculativeNonPeepholeCompareNull(Edge operand, bool invert) 328{ 329 JSValueOperand arg(this, operand); 330 GPRReg argGPR = arg.gpr(); 331 332 GPRTemporary result(this, arg); 333 GPRReg resultGPR = result.gpr(); 334 335 JITCompiler::Jump notCell; 336 337 JITCompiler::Jump notMasqueradesAsUndefined; 338 if (m_jit.graph().globalObjectFor(operand->codeOrigin)->masqueradesAsUndefinedWatchpoint()->isStillValid()) { 339 if (!isKnownCell(operand.node())) 340 notCell = m_jit.branchTest64(MacroAssembler::NonZero, argGPR, GPRInfo::tagMaskRegister); 341 342 m_jit.graph().globalObjectFor(operand->codeOrigin)->masqueradesAsUndefinedWatchpoint()->add(speculationWatchpoint()); 343 m_jit.move(invert ? TrustedImm32(1) : TrustedImm32(0), resultGPR); 344 notMasqueradesAsUndefined = m_jit.jump(); 345 } else { 346 GPRTemporary localGlobalObject(this); 347 GPRTemporary remoteGlobalObject(this); 348 349 if (!isKnownCell(operand.node())) 350 notCell = m_jit.branchTest64(MacroAssembler::NonZero, argGPR, GPRInfo::tagMaskRegister); 351 352 m_jit.loadPtr(JITCompiler::Address(argGPR, JSCell::structureOffset()), resultGPR); 353 JITCompiler::Jump isMasqueradesAsUndefined = m_jit.branchTest8(JITCompiler::NonZero, JITCompiler::Address(resultGPR, Structure::typeInfoFlagsOffset()), JITCompiler::TrustedImm32(MasqueradesAsUndefined)); 354 355 m_jit.move(invert ? TrustedImm32(1) : TrustedImm32(0), resultGPR); 356 notMasqueradesAsUndefined = m_jit.jump(); 357 358 isMasqueradesAsUndefined.link(&m_jit); 359 GPRReg localGlobalObjectGPR = localGlobalObject.gpr(); 360 GPRReg remoteGlobalObjectGPR = remoteGlobalObject.gpr(); 361 m_jit.move(JITCompiler::TrustedImmPtr(m_jit.graph().globalObjectFor(operand->codeOrigin)), localGlobalObjectGPR); 362 m_jit.loadPtr(JITCompiler::Address(resultGPR, Structure::globalObjectOffset()), remoteGlobalObjectGPR); 363 m_jit.comparePtr(invert ? JITCompiler::NotEqual : JITCompiler::Equal, localGlobalObjectGPR, remoteGlobalObjectGPR, resultGPR); 364 } 365 366 if (!isKnownCell(operand.node())) { 367 JITCompiler::Jump done = m_jit.jump(); 368 369 notCell.link(&m_jit); 370 371 m_jit.move(argGPR, resultGPR); 372 m_jit.and64(JITCompiler::TrustedImm32(~TagBitUndefined), resultGPR); 373 m_jit.compare64(invert ? JITCompiler::NotEqual : JITCompiler::Equal, resultGPR, JITCompiler::TrustedImm32(ValueNull), resultGPR); 374 375 done.link(&m_jit); 376 } 377 378 notMasqueradesAsUndefined.link(&m_jit); 379 380 m_jit.or32(TrustedImm32(ValueFalse), resultGPR); 381 jsValueResult(resultGPR, m_currentNode, DataFormatJSBoolean); 382} 383 384void SpeculativeJIT::nonSpeculativePeepholeBranchNull(Edge operand, Node* branchNode, bool invert) 385{ 386 BlockIndex taken = branchNode->takenBlockIndex(); 387 BlockIndex notTaken = branchNode->notTakenBlockIndex(); 388 389 if (taken == nextBlock()) { 390 invert = !invert; 391 BlockIndex tmp = taken; 392 taken = notTaken; 393 notTaken = tmp; 394 } 395 396 JSValueOperand arg(this, operand); 397 GPRReg argGPR = arg.gpr(); 398 399 GPRTemporary result(this, arg); 400 GPRReg resultGPR = result.gpr(); 401 402 JITCompiler::Jump notCell; 403 404 if (m_jit.graph().globalObjectFor(operand->codeOrigin)->masqueradesAsUndefinedWatchpoint()->isStillValid()) { 405 if (!isKnownCell(operand.node())) 406 notCell = m_jit.branchTest64(MacroAssembler::NonZero, argGPR, GPRInfo::tagMaskRegister); 407 408 m_jit.graph().globalObjectFor(operand->codeOrigin)->masqueradesAsUndefinedWatchpoint()->add(speculationWatchpoint()); 409 jump(invert ? taken : notTaken, ForceJump); 410 } else { 411 GPRTemporary localGlobalObject(this); 412 GPRTemporary remoteGlobalObject(this); 413 414 if (!isKnownCell(operand.node())) 415 notCell = m_jit.branchTest64(MacroAssembler::NonZero, argGPR, GPRInfo::tagMaskRegister); 416 417 m_jit.loadPtr(JITCompiler::Address(argGPR, JSCell::structureOffset()), resultGPR); 418 branchTest8(JITCompiler::Zero, JITCompiler::Address(resultGPR, Structure::typeInfoFlagsOffset()), JITCompiler::TrustedImm32(MasqueradesAsUndefined), invert ? taken : notTaken); 419 420 GPRReg localGlobalObjectGPR = localGlobalObject.gpr(); 421 GPRReg remoteGlobalObjectGPR = remoteGlobalObject.gpr(); 422 m_jit.move(TrustedImmPtr(m_jit.graph().globalObjectFor(operand->codeOrigin)), localGlobalObjectGPR); 423 m_jit.loadPtr(JITCompiler::Address(resultGPR, Structure::globalObjectOffset()), remoteGlobalObjectGPR); 424 branchPtr(JITCompiler::Equal, localGlobalObjectGPR, remoteGlobalObjectGPR, invert ? notTaken : taken); 425 } 426 427 if (!isKnownCell(operand.node())) { 428 jump(notTaken, ForceJump); 429 430 notCell.link(&m_jit); 431 432 m_jit.move(argGPR, resultGPR); 433 m_jit.and64(JITCompiler::TrustedImm32(~TagBitUndefined), resultGPR); 434 branch64(invert ? JITCompiler::NotEqual : JITCompiler::Equal, resultGPR, JITCompiler::TrustedImm64(ValueNull), taken); 435 } 436 437 jump(notTaken); 438} 439 440bool SpeculativeJIT::nonSpeculativeCompareNull(Node* node, Edge operand, bool invert) 441{ 442 unsigned branchIndexInBlock = detectPeepHoleBranch(); 443 if (branchIndexInBlock != UINT_MAX) { 444 Node* branchNode = m_jit.graph().m_blocks[m_block]->at(branchIndexInBlock); 445 446 RELEASE_ASSERT(node->adjustedRefCount() == 1); 447 448 nonSpeculativePeepholeBranchNull(operand, branchNode, invert); 449 450 use(node->child1()); 451 use(node->child2()); 452 m_indexInBlock = branchIndexInBlock; 453 m_currentNode = branchNode; 454 455 return true; 456 } 457 458 nonSpeculativeNonPeepholeCompareNull(operand, invert); 459 460 return false; 461} 462 463void SpeculativeJIT::nonSpeculativePeepholeBranch(Node* node, Node* branchNode, MacroAssembler::RelationalCondition cond, S_DFGOperation_EJJ helperFunction) 464{ 465 BlockIndex taken = branchNode->takenBlockIndex(); 466 BlockIndex notTaken = branchNode->notTakenBlockIndex(); 467 468 JITCompiler::ResultCondition callResultCondition = JITCompiler::NonZero; 469 470 // The branch instruction will branch to the taken block. 471 // If taken is next, switch taken with notTaken & invert the branch condition so we can fall through. 472 if (taken == nextBlock()) { 473 cond = JITCompiler::invert(cond); 474 callResultCondition = JITCompiler::Zero; 475 BlockIndex tmp = taken; 476 taken = notTaken; 477 notTaken = tmp; 478 } 479 480 JSValueOperand arg1(this, node->child1()); 481 JSValueOperand arg2(this, node->child2()); 482 GPRReg arg1GPR = arg1.gpr(); 483 GPRReg arg2GPR = arg2.gpr(); 484 485 JITCompiler::JumpList slowPath; 486 487 if (isKnownNotInteger(node->child1().node()) || isKnownNotInteger(node->child2().node())) { 488 GPRResult result(this); 489 GPRReg resultGPR = result.gpr(); 490 491 arg1.use(); 492 arg2.use(); 493 494 flushRegisters(); 495 callOperation(helperFunction, resultGPR, arg1GPR, arg2GPR); 496 497 branchTest32(callResultCondition, resultGPR, taken); 498 } else { 499 GPRTemporary result(this, arg2); 500 GPRReg resultGPR = result.gpr(); 501 502 arg1.use(); 503 arg2.use(); 504 505 if (!isKnownInteger(node->child1().node())) 506 slowPath.append(m_jit.branch64(MacroAssembler::Below, arg1GPR, GPRInfo::tagTypeNumberRegister)); 507 if (!isKnownInteger(node->child2().node())) 508 slowPath.append(m_jit.branch64(MacroAssembler::Below, arg2GPR, GPRInfo::tagTypeNumberRegister)); 509 510 branch32(cond, arg1GPR, arg2GPR, taken); 511 512 if (!isKnownInteger(node->child1().node()) || !isKnownInteger(node->child2().node())) { 513 jump(notTaken, ForceJump); 514 515 slowPath.link(&m_jit); 516 517 silentSpillAllRegisters(resultGPR); 518 callOperation(helperFunction, resultGPR, arg1GPR, arg2GPR); 519 silentFillAllRegisters(resultGPR); 520 521 branchTest32(callResultCondition, resultGPR, taken); 522 } 523 } 524 525 jump(notTaken); 526 527 m_indexInBlock = m_jit.graph().m_blocks[m_block]->size() - 1; 528 m_currentNode = branchNode; 529} 530 531template<typename JumpType> 532class CompareAndBoxBooleanSlowPathGenerator 533 : public CallSlowPathGenerator<JumpType, S_DFGOperation_EJJ, GPRReg> { 534public: 535 CompareAndBoxBooleanSlowPathGenerator( 536 JumpType from, SpeculativeJIT* jit, 537 S_DFGOperation_EJJ function, GPRReg result, GPRReg arg1, GPRReg arg2) 538 : CallSlowPathGenerator<JumpType, S_DFGOperation_EJJ, GPRReg>( 539 from, jit, function, NeedToSpill, result) 540 , m_arg1(arg1) 541 , m_arg2(arg2) 542 { 543 } 544 545protected: 546 virtual void generateInternal(SpeculativeJIT* jit) 547 { 548 this->setUp(jit); 549 this->recordCall(jit->callOperation(this->m_function, this->m_result, m_arg1, m_arg2)); 550 jit->m_jit.and32(JITCompiler::TrustedImm32(1), this->m_result); 551 jit->m_jit.or32(JITCompiler::TrustedImm32(ValueFalse), this->m_result); 552 this->tearDown(jit); 553 } 554 555private: 556 GPRReg m_arg1; 557 GPRReg m_arg2; 558}; 559 560void SpeculativeJIT::nonSpeculativeNonPeepholeCompare(Node* node, MacroAssembler::RelationalCondition cond, S_DFGOperation_EJJ helperFunction) 561{ 562 JSValueOperand arg1(this, node->child1()); 563 JSValueOperand arg2(this, node->child2()); 564 GPRReg arg1GPR = arg1.gpr(); 565 GPRReg arg2GPR = arg2.gpr(); 566 567 JITCompiler::JumpList slowPath; 568 569 if (isKnownNotInteger(node->child1().node()) || isKnownNotInteger(node->child2().node())) { 570 GPRResult result(this); 571 GPRReg resultGPR = result.gpr(); 572 573 arg1.use(); 574 arg2.use(); 575 576 flushRegisters(); 577 callOperation(helperFunction, resultGPR, arg1GPR, arg2GPR); 578 579 m_jit.or32(TrustedImm32(ValueFalse), resultGPR); 580 jsValueResult(resultGPR, m_currentNode, DataFormatJSBoolean, UseChildrenCalledExplicitly); 581 } else { 582 GPRTemporary result(this, arg2); 583 GPRReg resultGPR = result.gpr(); 584 585 arg1.use(); 586 arg2.use(); 587 588 if (!isKnownInteger(node->child1().node())) 589 slowPath.append(m_jit.branch64(MacroAssembler::Below, arg1GPR, GPRInfo::tagTypeNumberRegister)); 590 if (!isKnownInteger(node->child2().node())) 591 slowPath.append(m_jit.branch64(MacroAssembler::Below, arg2GPR, GPRInfo::tagTypeNumberRegister)); 592 593 m_jit.compare32(cond, arg1GPR, arg2GPR, resultGPR); 594 m_jit.or32(TrustedImm32(ValueFalse), resultGPR); 595 596 if (!isKnownInteger(node->child1().node()) || !isKnownInteger(node->child2().node())) { 597 addSlowPathGenerator(adoptPtr( 598 new CompareAndBoxBooleanSlowPathGenerator<JITCompiler::JumpList>( 599 slowPath, this, helperFunction, resultGPR, arg1GPR, arg2GPR))); 600 } 601 602 jsValueResult(resultGPR, m_currentNode, DataFormatJSBoolean, UseChildrenCalledExplicitly); 603 } 604} 605 606void SpeculativeJIT::nonSpeculativePeepholeStrictEq(Node* node, Node* branchNode, bool invert) 607{ 608 BlockIndex taken = branchNode->takenBlockIndex(); 609 BlockIndex notTaken = branchNode->notTakenBlockIndex(); 610 611 // The branch instruction will branch to the taken block. 612 // If taken is next, switch taken with notTaken & invert the branch condition so we can fall through. 613 if (taken == nextBlock()) { 614 invert = !invert; 615 BlockIndex tmp = taken; 616 taken = notTaken; 617 notTaken = tmp; 618 } 619 620 JSValueOperand arg1(this, node->child1()); 621 JSValueOperand arg2(this, node->child2()); 622 GPRReg arg1GPR = arg1.gpr(); 623 GPRReg arg2GPR = arg2.gpr(); 624 625 GPRTemporary result(this); 626 GPRReg resultGPR = result.gpr(); 627 628 arg1.use(); 629 arg2.use(); 630 631 if (isKnownCell(node->child1().node()) && isKnownCell(node->child2().node())) { 632 // see if we get lucky: if the arguments are cells and they reference the same 633 // cell, then they must be strictly equal. 634 branch64(JITCompiler::Equal, arg1GPR, arg2GPR, invert ? notTaken : taken); 635 636 silentSpillAllRegisters(resultGPR); 637 callOperation(operationCompareStrictEqCell, resultGPR, arg1GPR, arg2GPR); 638 silentFillAllRegisters(resultGPR); 639 640 branchTest32(invert ? JITCompiler::Zero : JITCompiler::NonZero, resultGPR, taken); 641 } else { 642 m_jit.or64(arg1GPR, arg2GPR, resultGPR); 643 644 JITCompiler::Jump twoCellsCase = m_jit.branchTest64(JITCompiler::Zero, resultGPR, GPRInfo::tagMaskRegister); 645 646 JITCompiler::Jump leftOK = m_jit.branch64(JITCompiler::AboveOrEqual, arg1GPR, GPRInfo::tagTypeNumberRegister); 647 JITCompiler::Jump leftDouble = m_jit.branchTest64(JITCompiler::NonZero, arg1GPR, GPRInfo::tagTypeNumberRegister); 648 leftOK.link(&m_jit); 649 JITCompiler::Jump rightOK = m_jit.branch64(JITCompiler::AboveOrEqual, arg2GPR, GPRInfo::tagTypeNumberRegister); 650 JITCompiler::Jump rightDouble = m_jit.branchTest64(JITCompiler::NonZero, arg2GPR, GPRInfo::tagTypeNumberRegister); 651 rightOK.link(&m_jit); 652 653 branch64(invert ? JITCompiler::NotEqual : JITCompiler::Equal, arg1GPR, arg2GPR, taken); 654 jump(notTaken, ForceJump); 655 656 twoCellsCase.link(&m_jit); 657 branch64(JITCompiler::Equal, arg1GPR, arg2GPR, invert ? notTaken : taken); 658 659 leftDouble.link(&m_jit); 660 rightDouble.link(&m_jit); 661 662 silentSpillAllRegisters(resultGPR); 663 callOperation(operationCompareStrictEq, resultGPR, arg1GPR, arg2GPR); 664 silentFillAllRegisters(resultGPR); 665 666 branchTest32(invert ? JITCompiler::Zero : JITCompiler::NonZero, resultGPR, taken); 667 } 668 669 jump(notTaken); 670} 671 672void SpeculativeJIT::nonSpeculativeNonPeepholeStrictEq(Node* node, bool invert) 673{ 674 JSValueOperand arg1(this, node->child1()); 675 JSValueOperand arg2(this, node->child2()); 676 GPRReg arg1GPR = arg1.gpr(); 677 GPRReg arg2GPR = arg2.gpr(); 678 679 GPRTemporary result(this); 680 GPRReg resultGPR = result.gpr(); 681 682 arg1.use(); 683 arg2.use(); 684 685 if (isKnownCell(node->child1().node()) && isKnownCell(node->child2().node())) { 686 // see if we get lucky: if the arguments are cells and they reference the same 687 // cell, then they must be strictly equal. 688 // FIXME: this should flush registers instead of silent spill/fill. 689 JITCompiler::Jump notEqualCase = m_jit.branch64(JITCompiler::NotEqual, arg1GPR, arg2GPR); 690 691 m_jit.move(JITCompiler::TrustedImm64(JSValue::encode(jsBoolean(!invert))), resultGPR); 692 693 JITCompiler::Jump done = m_jit.jump(); 694 695 notEqualCase.link(&m_jit); 696 697 silentSpillAllRegisters(resultGPR); 698 callOperation(operationCompareStrictEqCell, resultGPR, arg1GPR, arg2GPR); 699 silentFillAllRegisters(resultGPR); 700 701 m_jit.and64(JITCompiler::TrustedImm32(1), resultGPR); 702 m_jit.or32(JITCompiler::TrustedImm32(ValueFalse), resultGPR); 703 704 done.link(&m_jit); 705 } else { 706 m_jit.or64(arg1GPR, arg2GPR, resultGPR); 707 708 JITCompiler::JumpList slowPathCases; 709 710 JITCompiler::Jump twoCellsCase = m_jit.branchTest64(JITCompiler::Zero, resultGPR, GPRInfo::tagMaskRegister); 711 712 JITCompiler::Jump leftOK = m_jit.branch64(JITCompiler::AboveOrEqual, arg1GPR, GPRInfo::tagTypeNumberRegister); 713 slowPathCases.append(m_jit.branchTest64(JITCompiler::NonZero, arg1GPR, GPRInfo::tagTypeNumberRegister)); 714 leftOK.link(&m_jit); 715 JITCompiler::Jump rightOK = m_jit.branch64(JITCompiler::AboveOrEqual, arg2GPR, GPRInfo::tagTypeNumberRegister); 716 slowPathCases.append(m_jit.branchTest64(JITCompiler::NonZero, arg2GPR, GPRInfo::tagTypeNumberRegister)); 717 rightOK.link(&m_jit); 718 719 m_jit.compare64(invert ? JITCompiler::NotEqual : JITCompiler::Equal, arg1GPR, arg2GPR, resultGPR); 720 m_jit.or32(JITCompiler::TrustedImm32(ValueFalse), resultGPR); 721 722 JITCompiler::Jump done = m_jit.jump(); 723 724 twoCellsCase.link(&m_jit); 725 slowPathCases.append(m_jit.branch64(JITCompiler::NotEqual, arg1GPR, arg2GPR)); 726 727 m_jit.move(JITCompiler::TrustedImm64(JSValue::encode(jsBoolean(!invert))), resultGPR); 728 729 addSlowPathGenerator( 730 adoptPtr( 731 new CompareAndBoxBooleanSlowPathGenerator<MacroAssembler::JumpList>( 732 slowPathCases, this, operationCompareStrictEq, resultGPR, arg1GPR, 733 arg2GPR))); 734 735 done.link(&m_jit); 736 } 737 738 jsValueResult(resultGPR, m_currentNode, DataFormatJSBoolean, UseChildrenCalledExplicitly); 739} 740 741void SpeculativeJIT::emitCall(Node* node) 742{ 743 if (node->op() != Call) 744 RELEASE_ASSERT(node->op() == Construct); 745 746 // For constructors, the this argument is not passed but we have to make space 747 // for it. 748 int dummyThisArgument = node->op() == Call ? 0 : 1; 749 750 CallLinkInfo::CallType callType = node->op() == Call ? CallLinkInfo::Call : CallLinkInfo::Construct; 751 752 Edge calleeEdge = m_jit.graph().m_varArgChildren[node->firstChild()]; 753 JSValueOperand callee(this, calleeEdge); 754 GPRReg calleeGPR = callee.gpr(); 755 use(calleeEdge); 756 757 // The call instruction's first child is the function; the subsequent children are the 758 // arguments. 759 int numPassedArgs = node->numChildren() - 1; 760 761 m_jit.store32(MacroAssembler::TrustedImm32(numPassedArgs + dummyThisArgument), callFramePayloadSlot(JSStack::ArgumentCount)); 762 m_jit.store64(GPRInfo::callFrameRegister, callFrameSlot(JSStack::CallerFrame)); 763 m_jit.store64(calleeGPR, callFrameSlot(JSStack::Callee)); 764 765 for (int i = 0; i < numPassedArgs; i++) { 766 Edge argEdge = m_jit.graph().m_varArgChildren[node->firstChild() + 1 + i]; 767 JSValueOperand arg(this, argEdge); 768 GPRReg argGPR = arg.gpr(); 769 use(argEdge); 770 771 m_jit.store64(argGPR, argumentSlot(i + dummyThisArgument)); 772 } 773 774 flushRegisters(); 775 776 GPRResult result(this); 777 GPRReg resultGPR = result.gpr(); 778 779 JITCompiler::DataLabelPtr targetToCheck; 780 JITCompiler::JumpList slowPath; 781 782 CallBeginToken token; 783 m_jit.beginCall(node->codeOrigin, token); 784 785 m_jit.addPtr(TrustedImm32(m_jit.codeBlock()->m_numCalleeRegisters * sizeof(Register)), GPRInfo::callFrameRegister); 786 787 slowPath.append(m_jit.branchPtrWithPatch(MacroAssembler::NotEqual, calleeGPR, targetToCheck, MacroAssembler::TrustedImmPtr(0))); 788 789 m_jit.loadPtr(MacroAssembler::Address(calleeGPR, OBJECT_OFFSETOF(JSFunction, m_scope)), resultGPR); 790 m_jit.store64(resultGPR, MacroAssembler::Address(GPRInfo::callFrameRegister, static_cast<ptrdiff_t>(sizeof(Register)) * JSStack::ScopeChain)); 791 792 CodeOrigin codeOrigin = m_currentNode->codeOrigin; 793 JITCompiler::Call fastCall = m_jit.nearCall(); 794 m_jit.notifyCall(fastCall, codeOrigin, token); 795 796 JITCompiler::Jump done = m_jit.jump(); 797 798 slowPath.link(&m_jit); 799 800 m_jit.move(calleeGPR, GPRInfo::nonArgGPR0); 801 m_jit.prepareForExceptionCheck(); 802 JITCompiler::Call slowCall = m_jit.nearCall(); 803 m_jit.notifyCall(slowCall, codeOrigin, token); 804 805 done.link(&m_jit); 806 807 m_jit.move(GPRInfo::returnValueGPR, resultGPR); 808 809 jsValueResult(resultGPR, m_currentNode, DataFormatJS, UseChildrenCalledExplicitly); 810 811 m_jit.addJSCall(fastCall, slowCall, targetToCheck, callType, calleeGPR, m_currentNode->codeOrigin); 812} 813 814template<bool strict> 815GPRReg SpeculativeJIT::fillSpeculateIntInternal(Edge edge, DataFormat& returnFormat) 816{ 817#if DFG_ENABLE(DEBUG_VERBOSE) 818 dataLogF("SpecInt@%d ", edge->index()); 819#endif 820 AbstractValue& value = m_state.forNode(edge); 821 SpeculatedType type = value.m_type; 822 ASSERT(edge.useKind() != KnownInt32Use || !(value.m_type & ~SpecInt32)); 823 value.filter(SpecInt32); 824 VirtualRegister virtualRegister = edge->virtualRegister(); 825 GenerationInfo& info = m_generationInfo[virtualRegister]; 826 827 switch (info.registerFormat()) { 828 case DataFormatNone: { 829 if ((edge->hasConstant() && !isInt32Constant(edge.node())) || info.spillFormat() == DataFormatDouble) { 830 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0); 831 returnFormat = DataFormatInteger; 832 return allocate(); 833 } 834 835 GPRReg gpr = allocate(); 836 837 if (edge->hasConstant()) { 838 m_gprs.retain(gpr, virtualRegister, SpillOrderConstant); 839 ASSERT(isInt32Constant(edge.node())); 840 m_jit.move(MacroAssembler::Imm32(valueOfInt32Constant(edge.node())), gpr); 841 info.fillInteger(*m_stream, gpr); 842 returnFormat = DataFormatInteger; 843 return gpr; 844 } 845 846 DataFormat spillFormat = info.spillFormat(); 847 848 RELEASE_ASSERT((spillFormat & DataFormatJS) || spillFormat == DataFormatInteger); 849 850 m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled); 851 852 if (spillFormat == DataFormatJSInteger || spillFormat == DataFormatInteger) { 853 // If we know this was spilled as an integer we can fill without checking. 854 if (strict) { 855 m_jit.load32(JITCompiler::addressFor(virtualRegister), gpr); 856 info.fillInteger(*m_stream, gpr); 857 returnFormat = DataFormatInteger; 858 return gpr; 859 } 860 if (spillFormat == DataFormatInteger) { 861 m_jit.load32(JITCompiler::addressFor(virtualRegister), gpr); 862 m_jit.or64(GPRInfo::tagTypeNumberRegister, gpr); 863 } else 864 m_jit.load64(JITCompiler::addressFor(virtualRegister), gpr); 865 info.fillJSValue(*m_stream, gpr, DataFormatJSInteger); 866 returnFormat = DataFormatJSInteger; 867 return gpr; 868 } 869 m_jit.load64(JITCompiler::addressFor(virtualRegister), gpr); 870 871 // Fill as JSValue, and fall through. 872 info.fillJSValue(*m_stream, gpr, DataFormatJSInteger); 873 m_gprs.unlock(gpr); 874 } 875 876 case DataFormatJS: { 877 // Check the value is an integer. 878 GPRReg gpr = info.gpr(); 879 m_gprs.lock(gpr); 880 if (type & ~SpecInt32) 881 speculationCheck(BadType, JSValueRegs(gpr), edge, m_jit.branch64(MacroAssembler::Below, gpr, GPRInfo::tagTypeNumberRegister)); 882 info.fillJSValue(*m_stream, gpr, DataFormatJSInteger); 883 // If !strict we're done, return. 884 if (!strict) { 885 returnFormat = DataFormatJSInteger; 886 return gpr; 887 } 888 // else fall through & handle as DataFormatJSInteger. 889 m_gprs.unlock(gpr); 890 } 891 892 case DataFormatJSInteger: { 893 // In a strict fill we need to strip off the value tag. 894 if (strict) { 895 GPRReg gpr = info.gpr(); 896 GPRReg result; 897 // If the register has already been locked we need to take a copy. 898 // If not, we'll zero extend in place, so mark on the info that this is now type DataFormatInteger, not DataFormatJSInteger. 899 if (m_gprs.isLocked(gpr)) 900 result = allocate(); 901 else { 902 m_gprs.lock(gpr); 903 info.fillInteger(*m_stream, gpr); 904 result = gpr; 905 } 906 m_jit.zeroExtend32ToPtr(gpr, result); 907 returnFormat = DataFormatInteger; 908 return result; 909 } 910 911 GPRReg gpr = info.gpr(); 912 m_gprs.lock(gpr); 913 returnFormat = DataFormatJSInteger; 914 return gpr; 915 } 916 917 case DataFormatInteger: { 918 GPRReg gpr = info.gpr(); 919 m_gprs.lock(gpr); 920 returnFormat = DataFormatInteger; 921 return gpr; 922 } 923 924 case DataFormatDouble: 925 case DataFormatJSDouble: { 926 if (edge->hasConstant() && isInt32Constant(edge.node())) { 927 GPRReg gpr = allocate(); 928 ASSERT(isInt32Constant(edge.node())); 929 m_jit.move(MacroAssembler::Imm32(valueOfInt32Constant(edge.node())), gpr); 930 returnFormat = DataFormatInteger; 931 return gpr; 932 } 933 } 934 case DataFormatCell: 935 case DataFormatBoolean: 936 case DataFormatJSCell: 937 case DataFormatJSBoolean: { 938 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0); 939 returnFormat = DataFormatInteger; 940 return allocate(); 941 } 942 943 case DataFormatStorage: 944 RELEASE_ASSERT_NOT_REACHED(); 945 946 default: 947 RELEASE_ASSERT_NOT_REACHED(); 948 return InvalidGPRReg; 949 } 950} 951 952GPRReg SpeculativeJIT::fillSpeculateInt(Edge edge, DataFormat& returnFormat) 953{ 954 return fillSpeculateIntInternal<false>(edge, returnFormat); 955} 956 957GPRReg SpeculativeJIT::fillSpeculateIntStrict(Edge edge) 958{ 959 DataFormat mustBeDataFormatInteger; 960 GPRReg result = fillSpeculateIntInternal<true>(edge, mustBeDataFormatInteger); 961 RELEASE_ASSERT(mustBeDataFormatInteger == DataFormatInteger); 962 return result; 963} 964 965FPRReg SpeculativeJIT::fillSpeculateDouble(Edge edge) 966{ 967#if DFG_ENABLE(DEBUG_VERBOSE) 968 dataLogF("SpecDouble@%d ", edge->index()); 969#endif 970 AbstractValue& value = m_state.forNode(edge); 971 SpeculatedType type = value.m_type; 972 ASSERT(edge.useKind() != KnownNumberUse || !(value.m_type & ~SpecNumber)); 973 value.filter(SpecNumber); 974 VirtualRegister virtualRegister = edge->virtualRegister(); 975 GenerationInfo& info = m_generationInfo[virtualRegister]; 976 977 if (info.registerFormat() == DataFormatNone) { 978 if (edge->hasConstant()) { 979 GPRReg gpr = allocate(); 980 981 if (isInt32Constant(edge.node())) { 982 FPRReg fpr = fprAllocate(); 983 m_jit.move(MacroAssembler::Imm64(reinterpretDoubleToInt64(static_cast<double>(valueOfInt32Constant(edge.node())))), gpr); 984 m_jit.move64ToDouble(gpr, fpr); 985 unlock(gpr); 986 987 return fpr; 988 } 989 if (isNumberConstant(edge.node())) { 990 FPRReg fpr = fprAllocate(); 991 m_jit.move(MacroAssembler::Imm64(reinterpretDoubleToInt64(valueOfNumberConstant(edge.node()))), gpr); 992 m_jit.move64ToDouble(gpr, fpr); 993 unlock(gpr); 994 995 m_fprs.retain(fpr, virtualRegister, SpillOrderDouble); 996 info.fillDouble(*m_stream, fpr); 997 return fpr; 998 } 999 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0); 1000 return fprAllocate(); 1001 } 1002 1003 DataFormat spillFormat = info.spillFormat(); 1004 switch (spillFormat) { 1005 case DataFormatDouble: { 1006 FPRReg fpr = fprAllocate(); 1007 m_jit.loadDouble(JITCompiler::addressFor(virtualRegister), fpr); 1008 m_fprs.retain(fpr, virtualRegister, SpillOrderDouble); 1009 info.fillDouble(*m_stream, fpr); 1010 return fpr; 1011 } 1012 1013 case DataFormatInteger: { 1014 GPRReg gpr = allocate(); 1015 1016 m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled); 1017 m_jit.load32(JITCompiler::addressFor(virtualRegister), gpr); 1018 info.fillInteger(*m_stream, gpr); 1019 unlock(gpr); 1020 break; 1021 } 1022 1023 default: 1024 GPRReg gpr = allocate(); 1025 1026 RELEASE_ASSERT(spillFormat & DataFormatJS); 1027 m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled); 1028 m_jit.load64(JITCompiler::addressFor(virtualRegister), gpr); 1029 info.fillJSValue(*m_stream, gpr, spillFormat); 1030 unlock(gpr); 1031 break; 1032 } 1033 } 1034 1035 switch (info.registerFormat()) { 1036 case DataFormatNone: // Should have filled, above. 1037 case DataFormatBoolean: // This type never occurs. 1038 case DataFormatStorage: 1039 RELEASE_ASSERT_NOT_REACHED(); 1040 1041 case DataFormatCell: 1042 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0); 1043 return fprAllocate(); 1044 1045 case DataFormatJSCell: 1046 case DataFormatJS: 1047 case DataFormatJSBoolean: { 1048 GPRReg jsValueGpr = info.gpr(); 1049 m_gprs.lock(jsValueGpr); 1050 FPRReg fpr = fprAllocate(); 1051 GPRReg tempGpr = allocate(); 1052 1053 JITCompiler::Jump isInteger = m_jit.branch64(MacroAssembler::AboveOrEqual, jsValueGpr, GPRInfo::tagTypeNumberRegister); 1054 1055 if (type & ~SpecNumber) 1056 speculationCheck(BadType, JSValueRegs(jsValueGpr), edge, m_jit.branchTest64(MacroAssembler::Zero, jsValueGpr, GPRInfo::tagTypeNumberRegister)); 1057 1058 // First, if we get here we have a double encoded as a JSValue 1059 m_jit.move(jsValueGpr, tempGpr); 1060 unboxDouble(tempGpr, fpr); 1061 JITCompiler::Jump hasUnboxedDouble = m_jit.jump(); 1062 1063 // Finally, handle integers. 1064 isInteger.link(&m_jit); 1065 m_jit.convertInt32ToDouble(jsValueGpr, fpr); 1066 hasUnboxedDouble.link(&m_jit); 1067 1068 m_gprs.release(jsValueGpr); 1069 m_gprs.unlock(jsValueGpr); 1070 m_gprs.unlock(tempGpr); 1071 m_fprs.retain(fpr, virtualRegister, SpillOrderDouble); 1072 info.fillDouble(*m_stream, fpr); 1073 info.killSpilled(); 1074 return fpr; 1075 } 1076 1077 case DataFormatJSInteger: 1078 case DataFormatInteger: { 1079 FPRReg fpr = fprAllocate(); 1080 GPRReg gpr = info.gpr(); 1081 m_gprs.lock(gpr); 1082 m_jit.convertInt32ToDouble(gpr, fpr); 1083 m_gprs.unlock(gpr); 1084 return fpr; 1085 } 1086 1087 // Unbox the double 1088 case DataFormatJSDouble: { 1089 GPRReg gpr = info.gpr(); 1090 FPRReg fpr = fprAllocate(); 1091 if (m_gprs.isLocked(gpr)) { 1092 // Make sure we don't trample gpr if it is in use. 1093 GPRReg temp = allocate(); 1094 m_jit.move(gpr, temp); 1095 unboxDouble(temp, fpr); 1096 unlock(temp); 1097 } else 1098 unboxDouble(gpr, fpr); 1099 1100 m_gprs.release(gpr); 1101 m_fprs.retain(fpr, virtualRegister, SpillOrderDouble); 1102 1103 info.fillDouble(*m_stream, fpr); 1104 return fpr; 1105 } 1106 1107 case DataFormatDouble: { 1108 FPRReg fpr = info.fpr(); 1109 m_fprs.lock(fpr); 1110 return fpr; 1111 } 1112 1113 default: 1114 RELEASE_ASSERT_NOT_REACHED(); 1115 return InvalidFPRReg; 1116 } 1117} 1118 1119GPRReg SpeculativeJIT::fillSpeculateCell(Edge edge) 1120{ 1121#if DFG_ENABLE(DEBUG_VERBOSE) 1122 dataLogF("SpecCell@%d ", edge->index()); 1123#endif 1124 AbstractValue& value = m_state.forNode(edge); 1125 SpeculatedType type = value.m_type; 1126 ASSERT((edge.useKind() != KnownCellUse && edge.useKind() != KnownStringUse) || !(value.m_type & ~SpecCell)); 1127 value.filter(SpecCell); 1128 VirtualRegister virtualRegister = edge->virtualRegister(); 1129 GenerationInfo& info = m_generationInfo[virtualRegister]; 1130 1131 switch (info.registerFormat()) { 1132 case DataFormatNone: { 1133 if (info.spillFormat() == DataFormatInteger || info.spillFormat() == DataFormatDouble) { 1134 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0); 1135 return allocate(); 1136 } 1137 1138 GPRReg gpr = allocate(); 1139 1140 if (edge->hasConstant()) { 1141 JSValue jsValue = valueOfJSConstant(edge.node()); 1142 if (jsValue.isCell()) { 1143 m_gprs.retain(gpr, virtualRegister, SpillOrderConstant); 1144 m_jit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsValue)), gpr); 1145 info.fillJSValue(*m_stream, gpr, DataFormatJSCell); 1146 return gpr; 1147 } 1148 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0); 1149 return gpr; 1150 } 1151 RELEASE_ASSERT(info.spillFormat() & DataFormatJS); 1152 m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled); 1153 m_jit.load64(JITCompiler::addressFor(virtualRegister), gpr); 1154 1155 info.fillJSValue(*m_stream, gpr, DataFormatJS); 1156 if (type & ~SpecCell) 1157 speculationCheck(BadType, JSValueRegs(gpr), edge, m_jit.branchTest64(MacroAssembler::NonZero, gpr, GPRInfo::tagMaskRegister)); 1158 info.fillJSValue(*m_stream, gpr, DataFormatJSCell); 1159 return gpr; 1160 } 1161 1162 case DataFormatCell: 1163 case DataFormatJSCell: { 1164 GPRReg gpr = info.gpr(); 1165 m_gprs.lock(gpr); 1166 return gpr; 1167 } 1168 1169 case DataFormatJS: { 1170 GPRReg gpr = info.gpr(); 1171 m_gprs.lock(gpr); 1172 if (type & ~SpecCell) 1173 speculationCheck(BadType, JSValueRegs(gpr), edge, m_jit.branchTest64(MacroAssembler::NonZero, gpr, GPRInfo::tagMaskRegister)); 1174 info.fillJSValue(*m_stream, gpr, DataFormatJSCell); 1175 return gpr; 1176 } 1177 1178 case DataFormatJSInteger: 1179 case DataFormatInteger: 1180 case DataFormatJSDouble: 1181 case DataFormatDouble: 1182 case DataFormatJSBoolean: 1183 case DataFormatBoolean: { 1184 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0); 1185 return allocate(); 1186 } 1187 1188 case DataFormatStorage: 1189 RELEASE_ASSERT_NOT_REACHED(); 1190 1191 default: 1192 RELEASE_ASSERT_NOT_REACHED(); 1193 return InvalidGPRReg; 1194 } 1195} 1196 1197GPRReg SpeculativeJIT::fillSpeculateBoolean(Edge edge) 1198{ 1199#if DFG_ENABLE(DEBUG_VERBOSE) 1200 dataLogF("SpecBool@%d ", edge->index()); 1201#endif 1202 AbstractValue& value = m_state.forNode(edge); 1203 SpeculatedType type = value.m_type; 1204 value.filter(SpecBoolean); 1205 VirtualRegister virtualRegister = edge->virtualRegister(); 1206 GenerationInfo& info = m_generationInfo[virtualRegister]; 1207 1208 switch (info.registerFormat()) { 1209 case DataFormatNone: { 1210 if (info.spillFormat() == DataFormatInteger || info.spillFormat() == DataFormatDouble) { 1211 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0); 1212 return allocate(); 1213 } 1214 1215 GPRReg gpr = allocate(); 1216 1217 if (edge->hasConstant()) { 1218 JSValue jsValue = valueOfJSConstant(edge.node()); 1219 if (jsValue.isBoolean()) { 1220 m_gprs.retain(gpr, virtualRegister, SpillOrderConstant); 1221 m_jit.move(MacroAssembler::TrustedImm64(JSValue::encode(jsValue)), gpr); 1222 info.fillJSValue(*m_stream, gpr, DataFormatJSBoolean); 1223 return gpr; 1224 } 1225 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0); 1226 return gpr; 1227 } 1228 RELEASE_ASSERT(info.spillFormat() & DataFormatJS); 1229 m_gprs.retain(gpr, virtualRegister, SpillOrderSpilled); 1230 m_jit.load64(JITCompiler::addressFor(virtualRegister), gpr); 1231 1232 info.fillJSValue(*m_stream, gpr, DataFormatJS); 1233 if (type & ~SpecBoolean) { 1234 m_jit.xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), gpr); 1235 speculationCheck(BadType, JSValueRegs(gpr), edge, m_jit.branchTest64(MacroAssembler::NonZero, gpr, TrustedImm32(static_cast<int32_t>(~1))), SpeculationRecovery(BooleanSpeculationCheck, gpr, InvalidGPRReg)); 1236 m_jit.xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), gpr); 1237 } 1238 info.fillJSValue(*m_stream, gpr, DataFormatJSBoolean); 1239 return gpr; 1240 } 1241 1242 case DataFormatBoolean: 1243 case DataFormatJSBoolean: { 1244 GPRReg gpr = info.gpr(); 1245 m_gprs.lock(gpr); 1246 return gpr; 1247 } 1248 1249 case DataFormatJS: { 1250 GPRReg gpr = info.gpr(); 1251 m_gprs.lock(gpr); 1252 if (type & ~SpecBoolean) { 1253 m_jit.xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), gpr); 1254 speculationCheck(BadType, JSValueRegs(gpr), edge, m_jit.branchTest64(MacroAssembler::NonZero, gpr, TrustedImm32(static_cast<int32_t>(~1))), SpeculationRecovery(BooleanSpeculationCheck, gpr, InvalidGPRReg)); 1255 m_jit.xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), gpr); 1256 } 1257 info.fillJSValue(*m_stream, gpr, DataFormatJSBoolean); 1258 return gpr; 1259 } 1260 1261 case DataFormatJSInteger: 1262 case DataFormatInteger: 1263 case DataFormatJSDouble: 1264 case DataFormatDouble: 1265 case DataFormatJSCell: 1266 case DataFormatCell: { 1267 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0); 1268 return allocate(); 1269 } 1270 1271 case DataFormatStorage: 1272 RELEASE_ASSERT_NOT_REACHED(); 1273 1274 default: 1275 RELEASE_ASSERT_NOT_REACHED(); 1276 return InvalidGPRReg; 1277 } 1278} 1279 1280JITCompiler::Jump SpeculativeJIT::convertToDouble(GPRReg value, FPRReg result, GPRReg tmp) 1281{ 1282 JITCompiler::Jump isInteger = m_jit.branch64(MacroAssembler::AboveOrEqual, value, GPRInfo::tagTypeNumberRegister); 1283 1284 JITCompiler::Jump notNumber = m_jit.branchTest64(MacroAssembler::Zero, value, GPRInfo::tagTypeNumberRegister); 1285 1286 m_jit.move(value, tmp); 1287 unboxDouble(tmp, result); 1288 1289 JITCompiler::Jump done = m_jit.jump(); 1290 1291 isInteger.link(&m_jit); 1292 1293 m_jit.convertInt32ToDouble(value, result); 1294 1295 done.link(&m_jit); 1296 1297 return notNumber; 1298} 1299 1300void SpeculativeJIT::compileObjectEquality(Node* node) 1301{ 1302 SpeculateCellOperand op1(this, node->child1()); 1303 SpeculateCellOperand op2(this, node->child2()); 1304 GPRTemporary result(this, op1); 1305 1306 GPRReg op1GPR = op1.gpr(); 1307 GPRReg op2GPR = op2.gpr(); 1308 GPRReg resultGPR = result.gpr(); 1309 1310 if (m_jit.graph().globalObjectFor(node->codeOrigin)->masqueradesAsUndefinedWatchpoint()->isStillValid()) { 1311 m_jit.graph().globalObjectFor(node->codeOrigin)->masqueradesAsUndefinedWatchpoint()->add(speculationWatchpoint()); 1312 DFG_TYPE_CHECK( 1313 JSValueSource::unboxedCell(op1GPR), node->child1(), SpecObject, m_jit.branchPtr( 1314 MacroAssembler::Equal, 1315 MacroAssembler::Address(op1GPR, JSCell::structureOffset()), 1316 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get()))); 1317 DFG_TYPE_CHECK( 1318 JSValueSource::unboxedCell(op2GPR), node->child2(), SpecObject, m_jit.branchPtr( 1319 MacroAssembler::Equal, 1320 MacroAssembler::Address(op2GPR, JSCell::structureOffset()), 1321 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get()))); 1322 } else { 1323 GPRTemporary structure(this); 1324 GPRReg structureGPR = structure.gpr(); 1325 1326 m_jit.loadPtr(MacroAssembler::Address(op1GPR, JSCell::structureOffset()), structureGPR); 1327 DFG_TYPE_CHECK( 1328 JSValueSource::unboxedCell(op1GPR), node->child1(), SpecObject, m_jit.branchPtr( 1329 MacroAssembler::Equal, 1330 structureGPR, 1331 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get()))); 1332 speculationCheck(BadType, JSValueSource::unboxedCell(op1GPR), node->child1(), 1333 m_jit.branchTest8( 1334 MacroAssembler::NonZero, 1335 MacroAssembler::Address(structureGPR, Structure::typeInfoFlagsOffset()), 1336 MacroAssembler::TrustedImm32(MasqueradesAsUndefined))); 1337 1338 m_jit.loadPtr(MacroAssembler::Address(op2GPR, JSCell::structureOffset()), structureGPR); 1339 DFG_TYPE_CHECK( 1340 JSValueSource::unboxedCell(op2GPR), node->child2(), SpecObject, m_jit.branchPtr( 1341 MacroAssembler::Equal, 1342 structureGPR, 1343 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get()))); 1344 speculationCheck(BadType, JSValueSource::unboxedCell(op2GPR), node->child2(), 1345 m_jit.branchTest8( 1346 MacroAssembler::NonZero, 1347 MacroAssembler::Address(structureGPR, Structure::typeInfoFlagsOffset()), 1348 MacroAssembler::TrustedImm32(MasqueradesAsUndefined))); 1349 } 1350 1351 MacroAssembler::Jump falseCase = m_jit.branch64(MacroAssembler::NotEqual, op1GPR, op2GPR); 1352 m_jit.move(TrustedImm32(ValueTrue), resultGPR); 1353 MacroAssembler::Jump done = m_jit.jump(); 1354 falseCase.link(&m_jit); 1355 m_jit.move(TrustedImm32(ValueFalse), resultGPR); 1356 done.link(&m_jit); 1357 1358 jsValueResult(resultGPR, m_currentNode, DataFormatJSBoolean); 1359} 1360 1361void SpeculativeJIT::compileObjectToObjectOrOtherEquality(Edge leftChild, Edge rightChild) 1362{ 1363 SpeculateCellOperand op1(this, leftChild); 1364 JSValueOperand op2(this, rightChild, ManualOperandSpeculation); 1365 GPRTemporary result(this); 1366 1367 GPRReg op1GPR = op1.gpr(); 1368 GPRReg op2GPR = op2.gpr(); 1369 GPRReg resultGPR = result.gpr(); 1370 GPRTemporary structure; 1371 GPRReg structureGPR = InvalidGPRReg; 1372 1373 bool masqueradesAsUndefinedWatchpointValid = m_jit.graph().globalObjectFor(m_currentNode->codeOrigin)->masqueradesAsUndefinedWatchpoint()->isStillValid(); 1374 1375 if (!masqueradesAsUndefinedWatchpointValid) { 1376 // The masquerades as undefined case will use the structure register, so allocate it here. 1377 // Do this at the top of the function to avoid branching around a register allocation. 1378 GPRTemporary realStructure(this); 1379 structure.adopt(realStructure); 1380 structureGPR = structure.gpr(); 1381 } 1382 1383 if (masqueradesAsUndefinedWatchpointValid) { 1384 m_jit.graph().globalObjectFor(m_currentNode->codeOrigin)->masqueradesAsUndefinedWatchpoint()->add(speculationWatchpoint()); 1385 DFG_TYPE_CHECK( 1386 JSValueSource::unboxedCell(op1GPR), leftChild, SpecObject, m_jit.branchPtr( 1387 MacroAssembler::Equal, 1388 MacroAssembler::Address(op1GPR, JSCell::structureOffset()), 1389 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get()))); 1390 } else { 1391 m_jit.loadPtr(MacroAssembler::Address(op1GPR, JSCell::structureOffset()), structureGPR); 1392 DFG_TYPE_CHECK( 1393 JSValueSource::unboxedCell(op1GPR), leftChild, SpecObject, m_jit.branchPtr( 1394 MacroAssembler::Equal, 1395 structureGPR, 1396 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get()))); 1397 speculationCheck(BadType, JSValueSource::unboxedCell(op1GPR), leftChild, 1398 m_jit.branchTest8( 1399 MacroAssembler::NonZero, 1400 MacroAssembler::Address(structureGPR, Structure::typeInfoFlagsOffset()), 1401 MacroAssembler::TrustedImm32(MasqueradesAsUndefined))); 1402 } 1403 1404 // It seems that most of the time when programs do a == b where b may be either null/undefined 1405 // or an object, b is usually an object. Balance the branches to make that case fast. 1406 MacroAssembler::Jump rightNotCell = 1407 m_jit.branchTest64(MacroAssembler::NonZero, op2GPR, GPRInfo::tagMaskRegister); 1408 1409 // We know that within this branch, rightChild must be a cell. 1410 if (masqueradesAsUndefinedWatchpointValid) { 1411 m_jit.graph().globalObjectFor(m_currentNode->codeOrigin)->masqueradesAsUndefinedWatchpoint()->add(speculationWatchpoint()); 1412 DFG_TYPE_CHECK( 1413 JSValueRegs(op2GPR), rightChild, (~SpecCell) | SpecObject, m_jit.branchPtr( 1414 MacroAssembler::Equal, 1415 MacroAssembler::Address(op2GPR, JSCell::structureOffset()), 1416 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get()))); 1417 } else { 1418 m_jit.loadPtr(MacroAssembler::Address(op2GPR, JSCell::structureOffset()), structureGPR); 1419 DFG_TYPE_CHECK( 1420 JSValueRegs(op2GPR), rightChild, (~SpecCell) | SpecObject, m_jit.branchPtr( 1421 MacroAssembler::Equal, 1422 structureGPR, 1423 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get()))); 1424 speculationCheck(BadType, JSValueRegs(op2GPR), rightChild, 1425 m_jit.branchTest8( 1426 MacroAssembler::NonZero, 1427 MacroAssembler::Address(structureGPR, Structure::typeInfoFlagsOffset()), 1428 MacroAssembler::TrustedImm32(MasqueradesAsUndefined))); 1429 } 1430 1431 // At this point we know that we can perform a straight-forward equality comparison on pointer 1432 // values because both left and right are pointers to objects that have no special equality 1433 // protocols. 1434 MacroAssembler::Jump falseCase = m_jit.branch64(MacroAssembler::NotEqual, op1GPR, op2GPR); 1435 MacroAssembler::Jump trueCase = m_jit.jump(); 1436 1437 rightNotCell.link(&m_jit); 1438 1439 // We know that within this branch, rightChild must not be a cell. Check if that is enough to 1440 // prove that it is either null or undefined. 1441 if (needsTypeCheck(rightChild, SpecCell | SpecOther)) { 1442 m_jit.move(op2GPR, resultGPR); 1443 m_jit.and64(MacroAssembler::TrustedImm32(~TagBitUndefined), resultGPR); 1444 1445 typeCheck( 1446 JSValueRegs(op2GPR), rightChild, SpecCell | SpecOther, 1447 m_jit.branch64( 1448 MacroAssembler::NotEqual, resultGPR, 1449 MacroAssembler::TrustedImm64(ValueNull))); 1450 } 1451 1452 falseCase.link(&m_jit); 1453 m_jit.move(TrustedImm32(ValueFalse), resultGPR); 1454 MacroAssembler::Jump done = m_jit.jump(); 1455 trueCase.link(&m_jit); 1456 m_jit.move(TrustedImm32(ValueTrue), resultGPR); 1457 done.link(&m_jit); 1458 1459 jsValueResult(resultGPR, m_currentNode, DataFormatJSBoolean); 1460} 1461 1462void SpeculativeJIT::compilePeepHoleObjectToObjectOrOtherEquality(Edge leftChild, Edge rightChild, Node* branchNode) 1463{ 1464 BlockIndex taken = branchNode->takenBlockIndex(); 1465 BlockIndex notTaken = branchNode->notTakenBlockIndex(); 1466 1467 SpeculateCellOperand op1(this, leftChild); 1468 JSValueOperand op2(this, rightChild, ManualOperandSpeculation); 1469 GPRTemporary result(this); 1470 1471 GPRReg op1GPR = op1.gpr(); 1472 GPRReg op2GPR = op2.gpr(); 1473 GPRReg resultGPR = result.gpr(); 1474 GPRTemporary structure; 1475 GPRReg structureGPR = InvalidGPRReg; 1476 1477 bool masqueradesAsUndefinedWatchpointValid = m_jit.graph().globalObjectFor(m_currentNode->codeOrigin)->masqueradesAsUndefinedWatchpoint()->isStillValid(); 1478 1479 if (!masqueradesAsUndefinedWatchpointValid) { 1480 // The masquerades as undefined case will use the structure register, so allocate it here. 1481 // Do this at the top of the function to avoid branching around a register allocation. 1482 GPRTemporary realStructure(this); 1483 structure.adopt(realStructure); 1484 structureGPR = structure.gpr(); 1485 } 1486 1487 if (masqueradesAsUndefinedWatchpointValid) { 1488 m_jit.graph().globalObjectFor(m_currentNode->codeOrigin)->masqueradesAsUndefinedWatchpoint()->add(speculationWatchpoint()); 1489 DFG_TYPE_CHECK( 1490 JSValueSource::unboxedCell(op1GPR), leftChild, SpecObject, m_jit.branchPtr( 1491 MacroAssembler::Equal, 1492 MacroAssembler::Address(op1GPR, JSCell::structureOffset()), 1493 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get()))); 1494 } else { 1495 m_jit.loadPtr(MacroAssembler::Address(op1GPR, JSCell::structureOffset()), structureGPR); 1496 DFG_TYPE_CHECK( 1497 JSValueSource::unboxedCell(op1GPR), leftChild, SpecObject, m_jit.branchPtr( 1498 MacroAssembler::Equal, 1499 structureGPR, 1500 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get()))); 1501 speculationCheck(BadType, JSValueSource::unboxedCell(op1GPR), leftChild, 1502 m_jit.branchTest8( 1503 MacroAssembler::NonZero, 1504 MacroAssembler::Address(structureGPR, Structure::typeInfoFlagsOffset()), 1505 MacroAssembler::TrustedImm32(MasqueradesAsUndefined))); 1506 } 1507 1508 // It seems that most of the time when programs do a == b where b may be either null/undefined 1509 // or an object, b is usually an object. Balance the branches to make that case fast. 1510 MacroAssembler::Jump rightNotCell = 1511 m_jit.branchTest64(MacroAssembler::NonZero, op2GPR, GPRInfo::tagMaskRegister); 1512 1513 // We know that within this branch, rightChild must be a cell. 1514 if (masqueradesAsUndefinedWatchpointValid) { 1515 m_jit.graph().globalObjectFor(m_currentNode->codeOrigin)->masqueradesAsUndefinedWatchpoint()->add(speculationWatchpoint()); 1516 DFG_TYPE_CHECK( 1517 JSValueRegs(op2GPR), rightChild, (~SpecCell) | SpecObject, m_jit.branchPtr( 1518 MacroAssembler::Equal, 1519 MacroAssembler::Address(op2GPR, JSCell::structureOffset()), 1520 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get()))); 1521 } else { 1522 m_jit.loadPtr(MacroAssembler::Address(op2GPR, JSCell::structureOffset()), structureGPR); 1523 DFG_TYPE_CHECK( 1524 JSValueRegs(op2GPR), rightChild, (~SpecCell) | SpecObject, m_jit.branchPtr( 1525 MacroAssembler::Equal, 1526 structureGPR, 1527 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get()))); 1528 speculationCheck(BadType, JSValueRegs(op2GPR), rightChild, 1529 m_jit.branchTest8( 1530 MacroAssembler::NonZero, 1531 MacroAssembler::Address(structureGPR, Structure::typeInfoFlagsOffset()), 1532 MacroAssembler::TrustedImm32(MasqueradesAsUndefined))); 1533 } 1534 1535 // At this point we know that we can perform a straight-forward equality comparison on pointer 1536 // values because both left and right are pointers to objects that have no special equality 1537 // protocols. 1538 branch64(MacroAssembler::Equal, op1GPR, op2GPR, taken); 1539 1540 // We know that within this branch, rightChild must not be a cell. Check if that is enough to 1541 // prove that it is either null or undefined. 1542 if (!needsTypeCheck(rightChild, SpecCell | SpecOther)) 1543 rightNotCell.link(&m_jit); 1544 else { 1545 jump(notTaken, ForceJump); 1546 1547 rightNotCell.link(&m_jit); 1548 m_jit.move(op2GPR, resultGPR); 1549 m_jit.and64(MacroAssembler::TrustedImm32(~TagBitUndefined), resultGPR); 1550 1551 typeCheck( 1552 JSValueRegs(op2GPR), rightChild, SpecCell | SpecOther, m_jit.branch64( 1553 MacroAssembler::NotEqual, resultGPR, 1554 MacroAssembler::TrustedImm64(ValueNull))); 1555 } 1556 1557 jump(notTaken); 1558} 1559 1560void SpeculativeJIT::compileIntegerCompare(Node* node, MacroAssembler::RelationalCondition condition) 1561{ 1562 SpeculateIntegerOperand op1(this, node->child1()); 1563 SpeculateIntegerOperand op2(this, node->child2()); 1564 GPRTemporary result(this, op1, op2); 1565 1566 m_jit.compare32(condition, op1.gpr(), op2.gpr(), result.gpr()); 1567 1568 // If we add a DataFormatBool, we should use it here. 1569 m_jit.or32(TrustedImm32(ValueFalse), result.gpr()); 1570 jsValueResult(result.gpr(), m_currentNode, DataFormatJSBoolean); 1571} 1572 1573void SpeculativeJIT::compileDoubleCompare(Node* node, MacroAssembler::DoubleCondition condition) 1574{ 1575 SpeculateDoubleOperand op1(this, node->child1()); 1576 SpeculateDoubleOperand op2(this, node->child2()); 1577 GPRTemporary result(this); 1578 1579 m_jit.move(TrustedImm32(ValueTrue), result.gpr()); 1580 MacroAssembler::Jump trueCase = m_jit.branchDouble(condition, op1.fpr(), op2.fpr()); 1581 m_jit.xor64(TrustedImm32(true), result.gpr()); 1582 trueCase.link(&m_jit); 1583 1584 jsValueResult(result.gpr(), node, DataFormatJSBoolean); 1585} 1586 1587void SpeculativeJIT::compileValueAdd(Node* node) 1588{ 1589 JSValueOperand op1(this, node->child1()); 1590 JSValueOperand op2(this, node->child2()); 1591 1592 GPRReg op1GPR = op1.gpr(); 1593 GPRReg op2GPR = op2.gpr(); 1594 1595 flushRegisters(); 1596 1597 GPRResult result(this); 1598 if (isKnownNotNumber(node->child1().node()) || isKnownNotNumber(node->child2().node())) 1599 callOperation(operationValueAddNotNumber, result.gpr(), op1GPR, op2GPR); 1600 else 1601 callOperation(operationValueAdd, result.gpr(), op1GPR, op2GPR); 1602 1603 jsValueResult(result.gpr(), node); 1604} 1605 1606void SpeculativeJIT::compileObjectOrOtherLogicalNot(Edge nodeUse) 1607{ 1608 JSValueOperand value(this, nodeUse, ManualOperandSpeculation); 1609 GPRTemporary result(this); 1610 GPRReg valueGPR = value.gpr(); 1611 GPRReg resultGPR = result.gpr(); 1612 GPRTemporary structure; 1613 GPRReg structureGPR = InvalidGPRReg; 1614 1615 bool masqueradesAsUndefinedWatchpointValid = m_jit.graph().globalObjectFor(m_currentNode->codeOrigin)->masqueradesAsUndefinedWatchpoint()->isStillValid(); 1616 1617 if (!masqueradesAsUndefinedWatchpointValid) { 1618 // The masquerades as undefined case will use the structure register, so allocate it here. 1619 // Do this at the top of the function to avoid branching around a register allocation. 1620 GPRTemporary realStructure(this); 1621 structure.adopt(realStructure); 1622 structureGPR = structure.gpr(); 1623 } 1624 1625 MacroAssembler::Jump notCell = m_jit.branchTest64(MacroAssembler::NonZero, valueGPR, GPRInfo::tagMaskRegister); 1626 if (masqueradesAsUndefinedWatchpointValid) { 1627 m_jit.graph().globalObjectFor(m_currentNode->codeOrigin)->masqueradesAsUndefinedWatchpoint()->add(speculationWatchpoint()); 1628 DFG_TYPE_CHECK( 1629 JSValueRegs(valueGPR), nodeUse, (~SpecCell) | SpecObject, m_jit.branchPtr( 1630 MacroAssembler::Equal, 1631 MacroAssembler::Address(valueGPR, JSCell::structureOffset()), 1632 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get()))); 1633 } else { 1634 m_jit.loadPtr(MacroAssembler::Address(valueGPR, JSCell::structureOffset()), structureGPR); 1635 1636 DFG_TYPE_CHECK( 1637 JSValueRegs(valueGPR), nodeUse, (~SpecCell) | SpecObject, m_jit.branchPtr( 1638 MacroAssembler::Equal, 1639 structureGPR, 1640 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get()))); 1641 1642 MacroAssembler::Jump isNotMasqueradesAsUndefined = 1643 m_jit.branchTest8( 1644 MacroAssembler::Zero, 1645 MacroAssembler::Address(structureGPR, Structure::typeInfoFlagsOffset()), 1646 MacroAssembler::TrustedImm32(MasqueradesAsUndefined)); 1647 1648 speculationCheck(BadType, JSValueRegs(valueGPR), nodeUse, 1649 m_jit.branchPtr( 1650 MacroAssembler::Equal, 1651 MacroAssembler::Address(structureGPR, Structure::globalObjectOffset()), 1652 MacroAssembler::TrustedImmPtr(m_jit.graph().globalObjectFor(m_currentNode->codeOrigin)))); 1653 1654 isNotMasqueradesAsUndefined.link(&m_jit); 1655 } 1656 m_jit.move(TrustedImm32(ValueFalse), resultGPR); 1657 MacroAssembler::Jump done = m_jit.jump(); 1658 1659 notCell.link(&m_jit); 1660 1661 if (needsTypeCheck(nodeUse, SpecCell | SpecOther)) { 1662 m_jit.move(valueGPR, resultGPR); 1663 m_jit.and64(MacroAssembler::TrustedImm32(~TagBitUndefined), resultGPR); 1664 typeCheck( 1665 JSValueRegs(valueGPR), nodeUse, SpecCell | SpecOther, m_jit.branch64( 1666 MacroAssembler::NotEqual, 1667 resultGPR, 1668 MacroAssembler::TrustedImm64(ValueNull))); 1669 } 1670 m_jit.move(TrustedImm32(ValueTrue), resultGPR); 1671 1672 done.link(&m_jit); 1673 1674 jsValueResult(resultGPR, m_currentNode, DataFormatJSBoolean); 1675} 1676 1677void SpeculativeJIT::compileLogicalNot(Node* node) 1678{ 1679 switch (node->child1().useKind()) { 1680 case ObjectOrOtherUse: { 1681 compileObjectOrOtherLogicalNot(node->child1()); 1682 return; 1683 } 1684 1685 case Int32Use: { 1686 SpeculateIntegerOperand value(this, node->child1()); 1687 GPRTemporary result(this, value); 1688 m_jit.compare32(MacroAssembler::Equal, value.gpr(), MacroAssembler::TrustedImm32(0), result.gpr()); 1689 m_jit.or32(TrustedImm32(ValueFalse), result.gpr()); 1690 jsValueResult(result.gpr(), node, DataFormatJSBoolean); 1691 return; 1692 } 1693 1694 case NumberUse: { 1695 SpeculateDoubleOperand value(this, node->child1()); 1696 FPRTemporary scratch(this); 1697 GPRTemporary result(this); 1698 m_jit.move(TrustedImm32(ValueFalse), result.gpr()); 1699 MacroAssembler::Jump nonZero = m_jit.branchDoubleNonZero(value.fpr(), scratch.fpr()); 1700 m_jit.xor32(TrustedImm32(true), result.gpr()); 1701 nonZero.link(&m_jit); 1702 jsValueResult(result.gpr(), node, DataFormatJSBoolean); 1703 return; 1704 } 1705 1706 case BooleanUse: { 1707 if (!needsTypeCheck(node->child1(), SpecBoolean)) { 1708 SpeculateBooleanOperand value(this, node->child1()); 1709 GPRTemporary result(this, value); 1710 1711 m_jit.move(value.gpr(), result.gpr()); 1712 m_jit.xor64(TrustedImm32(true), result.gpr()); 1713 1714 jsValueResult(result.gpr(), node, DataFormatJSBoolean); 1715 return; 1716 } 1717 1718 JSValueOperand value(this, node->child1(), ManualOperandSpeculation); 1719 GPRTemporary result(this); // FIXME: We could reuse, but on speculation fail would need recovery to restore tag (akin to add). 1720 1721 m_jit.move(value.gpr(), result.gpr()); 1722 m_jit.xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), result.gpr()); 1723 typeCheck( 1724 JSValueRegs(value.gpr()), node->child1(), SpecBoolean, m_jit.branchTest64( 1725 JITCompiler::NonZero, result.gpr(), TrustedImm32(static_cast<int32_t>(~1)))); 1726 m_jit.xor64(TrustedImm32(static_cast<int32_t>(ValueTrue)), result.gpr()); 1727 1728 // If we add a DataFormatBool, we should use it here. 1729 jsValueResult(result.gpr(), node, DataFormatJSBoolean); 1730 return; 1731 } 1732 1733 case UntypedUse: { 1734 JSValueOperand arg1(this, node->child1()); 1735 GPRTemporary result(this); 1736 1737 GPRReg arg1GPR = arg1.gpr(); 1738 GPRReg resultGPR = result.gpr(); 1739 1740 arg1.use(); 1741 1742 m_jit.move(arg1GPR, resultGPR); 1743 m_jit.xor64(TrustedImm32(static_cast<int32_t>(ValueFalse)), resultGPR); 1744 JITCompiler::Jump slowCase = m_jit.branchTest64(JITCompiler::NonZero, resultGPR, TrustedImm32(static_cast<int32_t>(~1))); 1745 1746 addSlowPathGenerator( 1747 slowPathCall(slowCase, this, dfgConvertJSValueToBoolean, resultGPR, arg1GPR)); 1748 1749 m_jit.xor64(TrustedImm32(static_cast<int32_t>(ValueTrue)), resultGPR); 1750 jsValueResult(resultGPR, node, DataFormatJSBoolean, UseChildrenCalledExplicitly); 1751 return; 1752 } 1753 1754 default: 1755 RELEASE_ASSERT_NOT_REACHED(); 1756 break; 1757 } 1758} 1759 1760void SpeculativeJIT::emitObjectOrOtherBranch(Edge nodeUse, BlockIndex taken, BlockIndex notTaken) 1761{ 1762 JSValueOperand value(this, nodeUse, ManualOperandSpeculation); 1763 GPRTemporary scratch(this); 1764 GPRReg valueGPR = value.gpr(); 1765 GPRReg scratchGPR = scratch.gpr(); 1766 1767 MacroAssembler::Jump notCell = m_jit.branchTest64(MacroAssembler::NonZero, valueGPR, GPRInfo::tagMaskRegister); 1768 if (m_jit.graph().globalObjectFor(m_currentNode->codeOrigin)->masqueradesAsUndefinedWatchpoint()->isStillValid()) { 1769 m_jit.graph().globalObjectFor(m_currentNode->codeOrigin)->masqueradesAsUndefinedWatchpoint()->add(speculationWatchpoint()); 1770 1771 DFG_TYPE_CHECK( 1772 JSValueRegs(valueGPR), nodeUse, (~SpecCell) | SpecObject, m_jit.branchPtr( 1773 MacroAssembler::Equal, 1774 MacroAssembler::Address(valueGPR, JSCell::structureOffset()), 1775 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get()))); 1776 } else { 1777 m_jit.loadPtr(MacroAssembler::Address(valueGPR, JSCell::structureOffset()), scratchGPR); 1778 1779 DFG_TYPE_CHECK( 1780 JSValueRegs(valueGPR), nodeUse, (~SpecCell) | SpecObject, m_jit.branchPtr( 1781 MacroAssembler::Equal, 1782 scratchGPR, 1783 MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get()))); 1784 1785 JITCompiler::Jump isNotMasqueradesAsUndefined = m_jit.branchTest8(JITCompiler::Zero, MacroAssembler::Address(scratchGPR, Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)); 1786 1787 speculationCheck(BadType, JSValueRegs(valueGPR), nodeUse, 1788 m_jit.branchPtr( 1789 MacroAssembler::Equal, 1790 MacroAssembler::Address(scratchGPR, Structure::globalObjectOffset()), 1791 MacroAssembler::TrustedImmPtr(m_jit.graph().globalObjectFor(m_currentNode->codeOrigin)))); 1792 1793 isNotMasqueradesAsUndefined.link(&m_jit); 1794 } 1795 jump(taken, ForceJump); 1796 1797 notCell.link(&m_jit); 1798 1799 if (needsTypeCheck(nodeUse, SpecCell | SpecOther)) { 1800 m_jit.move(valueGPR, scratchGPR); 1801 m_jit.and64(MacroAssembler::TrustedImm32(~TagBitUndefined), scratchGPR); 1802 typeCheck( 1803 JSValueRegs(valueGPR), nodeUse, SpecCell | SpecOther, m_jit.branch64( 1804 MacroAssembler::NotEqual, scratchGPR, MacroAssembler::TrustedImm64(ValueNull))); 1805 } 1806 jump(notTaken); 1807 1808 noResult(m_currentNode); 1809} 1810 1811void SpeculativeJIT::emitBranch(Node* node) 1812{ 1813 BlockIndex taken = node->takenBlockIndex(); 1814 BlockIndex notTaken = node->notTakenBlockIndex(); 1815 1816 switch (node->child1().useKind()) { 1817 case ObjectOrOtherUse: { 1818 emitObjectOrOtherBranch(node->child1(), taken, notTaken); 1819 return; 1820 } 1821 1822 case Int32Use: 1823 case NumberUse: { 1824 if (node->child1().useKind() == Int32Use) { 1825 bool invert = false; 1826 1827 if (taken == nextBlock()) { 1828 invert = true; 1829 BlockIndex tmp = taken; 1830 taken = notTaken; 1831 notTaken = tmp; 1832 } 1833 1834 SpeculateIntegerOperand value(this, node->child1()); 1835 branchTest32(invert ? MacroAssembler::Zero : MacroAssembler::NonZero, value.gpr(), taken); 1836 } else { 1837 SpeculateDoubleOperand value(this, node->child1()); 1838 FPRTemporary scratch(this); 1839 branchDoubleNonZero(value.fpr(), scratch.fpr(), taken); 1840 } 1841 1842 jump(notTaken); 1843 1844 noResult(node); 1845 return; 1846 } 1847 1848 case UntypedUse: 1849 case BooleanUse: { 1850 JSValueOperand value(this, node->child1(), ManualOperandSpeculation); 1851 GPRReg valueGPR = value.gpr(); 1852 1853 if (node->child1().useKind() == BooleanUse) { 1854 if (!needsTypeCheck(node->child1(), SpecBoolean)) { 1855 MacroAssembler::ResultCondition condition = MacroAssembler::NonZero; 1856 1857 if (taken == nextBlock()) { 1858 condition = MacroAssembler::Zero; 1859 BlockIndex tmp = taken; 1860 taken = notTaken; 1861 notTaken = tmp; 1862 } 1863 1864 branchTest32(condition, valueGPR, TrustedImm32(true), taken); 1865 jump(notTaken); 1866 } else { 1867 branch64(MacroAssembler::Equal, valueGPR, MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(false))), notTaken); 1868 branch64(MacroAssembler::Equal, valueGPR, MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(true))), taken); 1869 1870 typeCheck(JSValueRegs(valueGPR), node->child1(), SpecBoolean, m_jit.jump()); 1871 } 1872 value.use(); 1873 } else { 1874 GPRTemporary result(this); 1875 GPRReg resultGPR = result.gpr(); 1876 1877 if (node->child1()->prediction() & SpecInt32) { 1878 branch64(MacroAssembler::Equal, valueGPR, MacroAssembler::TrustedImm64(JSValue::encode(jsNumber(0))), notTaken); 1879 branch64(MacroAssembler::AboveOrEqual, valueGPR, GPRInfo::tagTypeNumberRegister, taken); 1880 } 1881 1882 if (node->child1()->prediction() & SpecBoolean) { 1883 branch64(MacroAssembler::Equal, valueGPR, MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(false))), notTaken); 1884 branch64(MacroAssembler::Equal, valueGPR, MacroAssembler::TrustedImm64(JSValue::encode(jsBoolean(true))), taken); 1885 } 1886 1887 value.use(); 1888 1889 silentSpillAllRegisters(resultGPR); 1890 callOperation(dfgConvertJSValueToBoolean, resultGPR, valueGPR); 1891 silentFillAllRegisters(resultGPR); 1892 1893 branchTest32(MacroAssembler::NonZero, resultGPR, taken); 1894 jump(notTaken); 1895 } 1896 1897 noResult(node, UseChildrenCalledExplicitly); 1898 return; 1899 } 1900 1901 default: 1902 RELEASE_ASSERT_NOT_REACHED(); 1903 } 1904} 1905 1906void SpeculativeJIT::compile(Node* node) 1907{ 1908 NodeType op = node->op(); 1909 1910#if ENABLE(DFG_REGISTER_ALLOCATION_VALIDATION) 1911 m_jit.clearRegisterAllocationOffsets(); 1912#endif 1913 1914 switch (op) { 1915 case JSConstant: 1916 initConstantInfo(node); 1917 break; 1918 1919 case PhantomArguments: 1920 initConstantInfo(node); 1921 break; 1922 1923 case WeakJSConstant: 1924 m_jit.addWeakReference(node->weakConstant()); 1925 initConstantInfo(node); 1926 break; 1927 1928 case Identity: { 1929 // CSE should always eliminate this. 1930 RELEASE_ASSERT_NOT_REACHED(); 1931 break; 1932 } 1933 1934 case GetLocal: { 1935 SpeculatedType prediction = node->variableAccessData()->prediction(); 1936 AbstractValue& value = m_state.variables().operand(node->local()); 1937 1938 // If we have no prediction for this local, then don't attempt to compile. 1939 if (prediction == SpecNone) { 1940 terminateSpeculativeExecution(InadequateCoverage, JSValueRegs(), 0); 1941 break; 1942 } 1943 1944 // If the CFA is tracking this variable and it found that the variable 1945 // cannot have been assigned, then don't attempt to proceed. 1946 if (value.isClear()) { 1947 // FIXME: We should trap instead. 1948 // https://bugs.webkit.org/show_bug.cgi?id=110383 1949 terminateSpeculativeExecution(InadequateCoverage, JSValueRegs(), 0); 1950 break; 1951 } 1952 1953 if (node->variableAccessData()->shouldUseDoubleFormat()) { 1954 FPRTemporary result(this); 1955 m_jit.loadDouble(JITCompiler::addressFor(node->local()), result.fpr()); 1956 VirtualRegister virtualRegister = node->virtualRegister(); 1957 m_fprs.retain(result.fpr(), virtualRegister, SpillOrderDouble); 1958 m_generationInfo[virtualRegister].initDouble(node, node->refCount(), result.fpr()); 1959 break; 1960 } 1961 1962 if (isInt32Speculation(value.m_type)) { 1963 GPRTemporary result(this); 1964 m_jit.load32(JITCompiler::payloadFor(node->local()), result.gpr()); 1965 1966 // Like integerResult, but don't useChildren - our children are phi nodes, 1967 // and don't represent values within this dataflow with virtual registers. 1968 VirtualRegister virtualRegister = node->virtualRegister(); 1969 m_gprs.retain(result.gpr(), virtualRegister, SpillOrderInteger); 1970 m_generationInfo[virtualRegister].initInteger(node, node->refCount(), result.gpr()); 1971 break; 1972 } 1973 1974 GPRTemporary result(this); 1975 m_jit.load64(JITCompiler::addressFor(node->local()), result.gpr()); 1976 1977 // Like jsValueResult, but don't useChildren - our children are phi nodes, 1978 // and don't represent values within this dataflow with virtual registers. 1979 VirtualRegister virtualRegister = node->virtualRegister(); 1980 m_gprs.retain(result.gpr(), virtualRegister, SpillOrderJS); 1981 1982 DataFormat format; 1983 if (isCellSpeculation(value.m_type)) 1984 format = DataFormatJSCell; 1985 else if (isBooleanSpeculation(value.m_type)) 1986 format = DataFormatJSBoolean; 1987 else 1988 format = DataFormatJS; 1989 1990 m_generationInfo[virtualRegister].initJSValue(node, node->refCount(), result.gpr(), format); 1991 break; 1992 } 1993 1994 case GetLocalUnlinked: { 1995 GPRTemporary result(this); 1996 1997 m_jit.load64(JITCompiler::addressFor(node->unlinkedLocal()), result.gpr()); 1998 1999 jsValueResult(result.gpr(), node); 2000 break; 2001 } 2002 2003 case MovHintAndCheck: { 2004 compileMovHintAndCheck(node); 2005 break; 2006 } 2007 2008 case InlineStart: { 2009 compileInlineStart(node); 2010 break; 2011 } 2012 2013 case MovHint: 2014 case ZombieHint: { 2015 RELEASE_ASSERT_NOT_REACHED(); 2016 break; 2017 } 2018 2019 case SetLocal: { 2020 // SetLocal doubles as a hint as to where a node will be stored and 2021 // as a speculation point. So before we speculate make sure that we 2022 // know where the child of this node needs to go in the virtual 2023 // stack. 2024 compileMovHint(node); 2025 2026 if (node->variableAccessData()->shouldUnboxIfPossible()) { 2027 if (node->variableAccessData()->shouldUseDoubleFormat()) { 2028 SpeculateDoubleOperand value(this, node->child1()); 2029 m_jit.storeDouble(value.fpr(), JITCompiler::addressFor(node->local())); 2030 noResult(node); 2031 // Indicate that it's no longer necessary to retrieve the value of 2032 // this bytecode variable from registers or other locations in the stack, 2033 // but that it is stored as a double. 2034 recordSetLocal(node->local(), ValueSource(DoubleInJSStack)); 2035 break; 2036 } 2037 2038 SpeculatedType predictedType = node->variableAccessData()->argumentAwarePrediction(); 2039 if (isInt32Speculation(predictedType)) { 2040 SpeculateIntegerOperand value(this, node->child1()); 2041 m_jit.store32(value.gpr(), JITCompiler::payloadFor(node->local())); 2042 noResult(node); 2043 recordSetLocal(node->local(), ValueSource(Int32InJSStack)); 2044 break; 2045 } 2046 if (isCellSpeculation(predictedType)) { 2047 SpeculateCellOperand cell(this, node->child1()); 2048 GPRReg cellGPR = cell.gpr(); 2049 m_jit.store64(cellGPR, JITCompiler::addressFor(node->local())); 2050 noResult(node); 2051 recordSetLocal(node->local(), ValueSource(CellInJSStack)); 2052 break; 2053 } 2054 if (isBooleanSpeculation(predictedType)) { 2055 SpeculateBooleanOperand boolean(this, node->child1()); 2056 m_jit.store64(boolean.gpr(), JITCompiler::addressFor(node->local())); 2057 noResult(node); 2058 recordSetLocal(node->local(), ValueSource(BooleanInJSStack)); 2059 break; 2060 } 2061 } 2062 2063 JSValueOperand value(this, node->child1()); 2064 m_jit.store64(value.gpr(), JITCompiler::addressFor(node->local())); 2065 noResult(node); 2066 2067 recordSetLocal(node->local(), ValueSource(ValueInJSStack)); 2068 2069 // If we're storing an arguments object that has been optimized away, 2070 // our variable event stream for OSR exit now reflects the optimized 2071 // value (JSValue()). On the slow path, we want an arguments object 2072 // instead. We add an additional move hint to show OSR exit that it 2073 // needs to reconstruct the arguments object. 2074 if (node->child1()->op() == PhantomArguments) 2075 compileMovHint(node); 2076 2077 break; 2078 } 2079 2080 case SetArgument: 2081 // This is a no-op; it just marks the fact that the argument is being used. 2082 // But it may be profitable to use this as a hook to run speculation checks 2083 // on arguments, thereby allowing us to trivially eliminate such checks if 2084 // the argument is not used. 2085 break; 2086 2087 case BitAnd: 2088 case BitOr: 2089 case BitXor: 2090 if (isInt32Constant(node->child1().node())) { 2091 SpeculateIntegerOperand op2(this, node->child2()); 2092 GPRTemporary result(this, op2); 2093 2094 bitOp(op, valueOfInt32Constant(node->child1().node()), op2.gpr(), result.gpr()); 2095 2096 integerResult(result.gpr(), node); 2097 } else if (isInt32Constant(node->child2().node())) { 2098 SpeculateIntegerOperand op1(this, node->child1()); 2099 GPRTemporary result(this, op1); 2100 2101 bitOp(op, valueOfInt32Constant(node->child2().node()), op1.gpr(), result.gpr()); 2102 2103 integerResult(result.gpr(), node); 2104 } else { 2105 SpeculateIntegerOperand op1(this, node->child1()); 2106 SpeculateIntegerOperand op2(this, node->child2()); 2107 GPRTemporary result(this, op1, op2); 2108 2109 GPRReg reg1 = op1.gpr(); 2110 GPRReg reg2 = op2.gpr(); 2111 bitOp(op, reg1, reg2, result.gpr()); 2112 2113 integerResult(result.gpr(), node); 2114 } 2115 break; 2116 2117 case BitRShift: 2118 case BitLShift: 2119 case BitURShift: 2120 if (isInt32Constant(node->child2().node())) { 2121 SpeculateIntegerOperand op1(this, node->child1()); 2122 GPRTemporary result(this, op1); 2123 2124 shiftOp(op, op1.gpr(), valueOfInt32Constant(node->child2().node()) & 0x1f, result.gpr()); 2125 2126 integerResult(result.gpr(), node); 2127 } else { 2128 // Do not allow shift amount to be used as the result, MacroAssembler does not permit this. 2129 SpeculateIntegerOperand op1(this, node->child1()); 2130 SpeculateIntegerOperand op2(this, node->child2()); 2131 GPRTemporary result(this, op1); 2132 2133 GPRReg reg1 = op1.gpr(); 2134 GPRReg reg2 = op2.gpr(); 2135 shiftOp(op, reg1, reg2, result.gpr()); 2136 2137 integerResult(result.gpr(), node); 2138 } 2139 break; 2140 2141 case UInt32ToNumber: { 2142 compileUInt32ToNumber(node); 2143 break; 2144 } 2145 2146 case DoubleAsInt32: { 2147 compileDoubleAsInt32(node); 2148 break; 2149 } 2150 2151 case ValueToInt32: { 2152 compileValueToInt32(node); 2153 break; 2154 } 2155 2156 case Int32ToDouble: 2157 case ForwardInt32ToDouble: { 2158 compileInt32ToDouble(node); 2159 break; 2160 } 2161 2162 case ValueAdd: 2163 case ArithAdd: 2164 compileAdd(node); 2165 break; 2166 2167 case MakeRope: 2168 compileMakeRope(node); 2169 break; 2170 2171 case ArithSub: 2172 compileArithSub(node); 2173 break; 2174 2175 case ArithNegate: 2176 compileArithNegate(node); 2177 break; 2178 2179 case ArithMul: 2180 compileArithMul(node); 2181 break; 2182 2183 case ArithIMul: 2184 compileArithIMul(node); 2185 break; 2186 2187 case ArithDiv: { 2188 switch (node->binaryUseKind()) { 2189 case Int32Use: { 2190 compileIntegerArithDivForX86(node); 2191 break; 2192 } 2193 2194 case NumberUse: { 2195 SpeculateDoubleOperand op1(this, node->child1()); 2196 SpeculateDoubleOperand op2(this, node->child2()); 2197 FPRTemporary result(this, op1); 2198 2199 FPRReg reg1 = op1.fpr(); 2200 FPRReg reg2 = op2.fpr(); 2201 m_jit.divDouble(reg1, reg2, result.fpr()); 2202 2203 doubleResult(result.fpr(), node); 2204 break; 2205 } 2206 2207 default: 2208 RELEASE_ASSERT_NOT_REACHED(); 2209 break; 2210 } 2211 break; 2212 } 2213 2214 case ArithMod: { 2215 compileArithMod(node); 2216 break; 2217 } 2218 2219 case ArithAbs: { 2220 switch (node->child1().useKind()) { 2221 case Int32Use: { 2222 SpeculateIntegerOperand op1(this, node->child1()); 2223 GPRTemporary result(this); 2224 GPRTemporary scratch(this); 2225 2226 m_jit.zeroExtend32ToPtr(op1.gpr(), result.gpr()); 2227 m_jit.rshift32(result.gpr(), MacroAssembler::TrustedImm32(31), scratch.gpr()); 2228 m_jit.add32(scratch.gpr(), result.gpr()); 2229 m_jit.xor32(scratch.gpr(), result.gpr()); 2230 speculationCheck(Overflow, JSValueRegs(), 0, m_jit.branch32(MacroAssembler::Equal, result.gpr(), MacroAssembler::TrustedImm32(1 << 31))); 2231 integerResult(result.gpr(), node); 2232 break; 2233 } 2234 2235 case NumberUse: { 2236 SpeculateDoubleOperand op1(this, node->child1()); 2237 FPRTemporary result(this); 2238 2239 m_jit.absDouble(op1.fpr(), result.fpr()); 2240 doubleResult(result.fpr(), node); 2241 break; 2242 } 2243 2244 default: 2245 RELEASE_ASSERT_NOT_REACHED(); 2246 break; 2247 } 2248 break; 2249 } 2250 2251 case ArithMin: 2252 case ArithMax: { 2253 switch (node->binaryUseKind()) { 2254 case Int32Use: { 2255 SpeculateStrictInt32Operand op1(this, node->child1()); 2256 SpeculateStrictInt32Operand op2(this, node->child2()); 2257 GPRTemporary result(this, op1); 2258 2259 MacroAssembler::Jump op1Less = m_jit.branch32(op == ArithMin ? MacroAssembler::LessThan : MacroAssembler::GreaterThan, op1.gpr(), op2.gpr()); 2260 m_jit.move(op2.gpr(), result.gpr()); 2261 if (op1.gpr() != result.gpr()) { 2262 MacroAssembler::Jump done = m_jit.jump(); 2263 op1Less.link(&m_jit); 2264 m_jit.move(op1.gpr(), result.gpr()); 2265 done.link(&m_jit); 2266 } else 2267 op1Less.link(&m_jit); 2268 2269 integerResult(result.gpr(), node); 2270 break; 2271 } 2272 2273 case NumberUse: { 2274 SpeculateDoubleOperand op1(this, node->child1()); 2275 SpeculateDoubleOperand op2(this, node->child2()); 2276 FPRTemporary result(this, op1); 2277 2278 FPRReg op1FPR = op1.fpr(); 2279 FPRReg op2FPR = op2.fpr(); 2280 FPRReg resultFPR = result.fpr(); 2281 2282 MacroAssembler::JumpList done; 2283 2284 MacroAssembler::Jump op1Less = m_jit.branchDouble(op == ArithMin ? MacroAssembler::DoubleLessThan : MacroAssembler::DoubleGreaterThan, op1FPR, op2FPR); 2285 2286 // op2 is eather the lesser one or one of then is NaN 2287 MacroAssembler::Jump op2Less = m_jit.branchDouble(op == ArithMin ? MacroAssembler::DoubleGreaterThanOrEqual : MacroAssembler::DoubleLessThanOrEqual, op1FPR, op2FPR); 2288 2289 // Unordered case. We don't know which of op1, op2 is NaN. Manufacture NaN by adding 2290 // op1 + op2 and putting it into result. 2291 m_jit.addDouble(op1FPR, op2FPR, resultFPR); 2292 done.append(m_jit.jump()); 2293 2294 op2Less.link(&m_jit); 2295 m_jit.moveDouble(op2FPR, resultFPR); 2296 2297 if (op1FPR != resultFPR) { 2298 done.append(m_jit.jump()); 2299 2300 op1Less.link(&m_jit); 2301 m_jit.moveDouble(op1FPR, resultFPR); 2302 } else 2303 op1Less.link(&m_jit); 2304 2305 done.link(&m_jit); 2306 2307 doubleResult(resultFPR, node); 2308 break; 2309 } 2310 2311 default: 2312 RELEASE_ASSERT_NOT_REACHED(); 2313 break; 2314 } 2315 break; 2316 } 2317 2318 case ArithSqrt: { 2319 SpeculateDoubleOperand op1(this, node->child1()); 2320 FPRTemporary result(this, op1); 2321 2322 m_jit.sqrtDouble(op1.fpr(), result.fpr()); 2323 2324 doubleResult(result.fpr(), node); 2325 break; 2326 } 2327 2328 case LogicalNot: 2329 compileLogicalNot(node); 2330 break; 2331 2332 case CompareLess: 2333 if (compare(node, JITCompiler::LessThan, JITCompiler::DoubleLessThan, operationCompareLess)) 2334 return; 2335 break; 2336 2337 case CompareLessEq: 2338 if (compare(node, JITCompiler::LessThanOrEqual, JITCompiler::DoubleLessThanOrEqual, operationCompareLessEq)) 2339 return; 2340 break; 2341 2342 case CompareGreater: 2343 if (compare(node, JITCompiler::GreaterThan, JITCompiler::DoubleGreaterThan, operationCompareGreater)) 2344 return; 2345 break; 2346 2347 case CompareGreaterEq: 2348 if (compare(node, JITCompiler::GreaterThanOrEqual, JITCompiler::DoubleGreaterThanOrEqual, operationCompareGreaterEq)) 2349 return; 2350 break; 2351 2352 case CompareEqConstant: 2353 ASSERT(isNullConstant(node->child2().node())); 2354 if (nonSpeculativeCompareNull(node, node->child1())) 2355 return; 2356 break; 2357 2358 case CompareEq: 2359 if (compare(node, JITCompiler::Equal, JITCompiler::DoubleEqual, operationCompareEq)) 2360 return; 2361 break; 2362 2363 case CompareStrictEqConstant: 2364 if (compileStrictEqForConstant(node, node->child1(), valueOfJSConstant(node->child2().node()))) 2365 return; 2366 break; 2367 2368 case CompareStrictEq: 2369 if (compileStrictEq(node)) 2370 return; 2371 break; 2372 2373 case StringCharCodeAt: { 2374 compileGetCharCodeAt(node); 2375 break; 2376 } 2377 2378 case StringCharAt: { 2379 // Relies on StringCharAt node having same basic layout as GetByVal 2380 compileGetByValOnString(node); 2381 break; 2382 } 2383 2384 case StringFromCharCode: { 2385 compileFromCharCode(node); 2386 break; 2387 } 2388 2389 case CheckArray: { 2390 checkArray(node); 2391 break; 2392 } 2393 2394 case Arrayify: 2395 case ArrayifyToStructure: { 2396 arrayify(node); 2397 break; 2398 } 2399 2400 case GetByVal: { 2401 switch (node->arrayMode().type()) { 2402 case Array::SelectUsingPredictions: 2403 case Array::ForceExit: 2404 RELEASE_ASSERT_NOT_REACHED(); 2405 terminateSpeculativeExecution(InadequateCoverage, JSValueRegs(), 0); 2406 break; 2407 case Array::Generic: { 2408 JSValueOperand base(this, node->child1()); 2409 JSValueOperand property(this, node->child2()); 2410 GPRReg baseGPR = base.gpr(); 2411 GPRReg propertyGPR = property.gpr(); 2412 2413 flushRegisters(); 2414 GPRResult result(this); 2415 callOperation(operationGetByVal, result.gpr(), baseGPR, propertyGPR); 2416 2417 jsValueResult(result.gpr(), node); 2418 break; 2419 } 2420 case Array::Int32: 2421 case Array::Contiguous: { 2422 if (node->arrayMode().isInBounds()) { 2423 SpeculateStrictInt32Operand property(this, node->child2()); 2424 StorageOperand storage(this, node->child3()); 2425 2426 GPRReg propertyReg = property.gpr(); 2427 GPRReg storageReg = storage.gpr(); 2428 2429 if (!m_compileOkay) 2430 return; 2431 2432 speculationCheck(OutOfBounds, JSValueRegs(), 0, m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, Butterfly::offsetOfPublicLength()))); 2433 2434 GPRTemporary result(this); 2435 m_jit.load64(MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight), result.gpr()); 2436 speculationCheck(LoadFromHole, JSValueRegs(), 0, m_jit.branchTest64(MacroAssembler::Zero, result.gpr())); 2437 jsValueResult(result.gpr(), node, node->arrayMode().type() == Array::Int32 ? DataFormatJSInteger : DataFormatJS); 2438 break; 2439 } 2440 2441 SpeculateCellOperand base(this, node->child1()); 2442 SpeculateStrictInt32Operand property(this, node->child2()); 2443 StorageOperand storage(this, node->child3()); 2444 2445 GPRReg baseReg = base.gpr(); 2446 GPRReg propertyReg = property.gpr(); 2447 GPRReg storageReg = storage.gpr(); 2448 2449 if (!m_compileOkay) 2450 return; 2451 2452 GPRTemporary result(this); 2453 GPRReg resultReg = result.gpr(); 2454 2455 MacroAssembler::JumpList slowCases; 2456 2457 slowCases.append(m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, Butterfly::offsetOfPublicLength()))); 2458 2459 m_jit.load64(MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight), resultReg); 2460 slowCases.append(m_jit.branchTest64(MacroAssembler::Zero, resultReg)); 2461 2462 addSlowPathGenerator( 2463 slowPathCall( 2464 slowCases, this, operationGetByValArrayInt, 2465 result.gpr(), baseReg, propertyReg)); 2466 2467 jsValueResult(resultReg, node); 2468 break; 2469 } 2470 2471 case Array::Double: { 2472 if (node->arrayMode().isInBounds()) { 2473 if (node->arrayMode().isSaneChain()) { 2474 JSGlobalObject* globalObject = m_jit.globalObjectFor(node->codeOrigin); 2475 ASSERT(globalObject->arrayPrototypeChainIsSane()); 2476 globalObject->arrayPrototype()->structure()->addTransitionWatchpoint(speculationWatchpoint()); 2477 globalObject->objectPrototype()->structure()->addTransitionWatchpoint(speculationWatchpoint()); 2478 } 2479 2480 SpeculateStrictInt32Operand property(this, node->child2()); 2481 StorageOperand storage(this, node->child3()); 2482 2483 GPRReg propertyReg = property.gpr(); 2484 GPRReg storageReg = storage.gpr(); 2485 2486 if (!m_compileOkay) 2487 return; 2488 2489 speculationCheck(OutOfBounds, JSValueRegs(), 0, m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, Butterfly::offsetOfPublicLength()))); 2490 2491 FPRTemporary result(this); 2492 m_jit.loadDouble(MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight), result.fpr()); 2493 if (!node->arrayMode().isSaneChain()) 2494 speculationCheck(LoadFromHole, JSValueRegs(), 0, m_jit.branchDouble(MacroAssembler::DoubleNotEqualOrUnordered, result.fpr(), result.fpr())); 2495 doubleResult(result.fpr(), node); 2496 break; 2497 } 2498 2499 SpeculateCellOperand base(this, node->child1()); 2500 SpeculateStrictInt32Operand property(this, node->child2()); 2501 StorageOperand storage(this, node->child3()); 2502 2503 GPRReg baseReg = base.gpr(); 2504 GPRReg propertyReg = property.gpr(); 2505 GPRReg storageReg = storage.gpr(); 2506 2507 if (!m_compileOkay) 2508 return; 2509 2510 GPRTemporary result(this); 2511 FPRTemporary temp(this); 2512 GPRReg resultReg = result.gpr(); 2513 FPRReg tempReg = temp.fpr(); 2514 2515 MacroAssembler::JumpList slowCases; 2516 2517 slowCases.append(m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, Butterfly::offsetOfPublicLength()))); 2518 2519 m_jit.loadDouble(MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight), tempReg); 2520 slowCases.append(m_jit.branchDouble(MacroAssembler::DoubleNotEqualOrUnordered, tempReg, tempReg)); 2521 boxDouble(tempReg, resultReg); 2522 2523 addSlowPathGenerator( 2524 slowPathCall( 2525 slowCases, this, operationGetByValArrayInt, 2526 result.gpr(), baseReg, propertyReg)); 2527 2528 jsValueResult(resultReg, node); 2529 break; 2530 } 2531 2532 case Array::ArrayStorage: 2533 case Array::SlowPutArrayStorage: { 2534 if (node->arrayMode().isInBounds()) { 2535 SpeculateStrictInt32Operand property(this, node->child2()); 2536 StorageOperand storage(this, node->child3()); 2537 2538 GPRReg propertyReg = property.gpr(); 2539 GPRReg storageReg = storage.gpr(); 2540 2541 if (!m_compileOkay) 2542 return; 2543 2544 speculationCheck(OutOfBounds, JSValueRegs(), 0, m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, ArrayStorage::vectorLengthOffset()))); 2545 2546 GPRTemporary result(this); 2547 m_jit.load64(MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])), result.gpr()); 2548 speculationCheck(LoadFromHole, JSValueRegs(), 0, m_jit.branchTest64(MacroAssembler::Zero, result.gpr())); 2549 2550 jsValueResult(result.gpr(), node); 2551 break; 2552 } 2553 2554 SpeculateCellOperand base(this, node->child1()); 2555 SpeculateStrictInt32Operand property(this, node->child2()); 2556 StorageOperand storage(this, node->child3()); 2557 2558 GPRReg baseReg = base.gpr(); 2559 GPRReg propertyReg = property.gpr(); 2560 GPRReg storageReg = storage.gpr(); 2561 2562 if (!m_compileOkay) 2563 return; 2564 2565 GPRTemporary result(this); 2566 GPRReg resultReg = result.gpr(); 2567 2568 MacroAssembler::JumpList slowCases; 2569 2570 slowCases.append(m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, ArrayStorage::vectorLengthOffset()))); 2571 2572 m_jit.load64(MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])), resultReg); 2573 slowCases.append(m_jit.branchTest64(MacroAssembler::Zero, resultReg)); 2574 2575 addSlowPathGenerator( 2576 slowPathCall( 2577 slowCases, this, operationGetByValArrayInt, 2578 result.gpr(), baseReg, propertyReg)); 2579 2580 jsValueResult(resultReg, node); 2581 break; 2582 } 2583 case Array::String: 2584 compileGetByValOnString(node); 2585 break; 2586 case Array::Arguments: 2587 compileGetByValOnArguments(node); 2588 break; 2589 case Array::Int8Array: 2590 compileGetByValOnIntTypedArray(m_jit.vm()->int8ArrayDescriptor(), node, sizeof(int8_t), SignedTypedArray); 2591 break; 2592 case Array::Int16Array: 2593 compileGetByValOnIntTypedArray(m_jit.vm()->int16ArrayDescriptor(), node, sizeof(int16_t), SignedTypedArray); 2594 break; 2595 case Array::Int32Array: 2596 compileGetByValOnIntTypedArray(m_jit.vm()->int32ArrayDescriptor(), node, sizeof(int32_t), SignedTypedArray); 2597 break; 2598 case Array::Uint8Array: 2599 compileGetByValOnIntTypedArray(m_jit.vm()->uint8ArrayDescriptor(), node, sizeof(uint8_t), UnsignedTypedArray); 2600 break; 2601 case Array::Uint8ClampedArray: 2602 compileGetByValOnIntTypedArray(m_jit.vm()->uint8ClampedArrayDescriptor(), node, sizeof(uint8_t), UnsignedTypedArray); 2603 break; 2604 case Array::Uint16Array: 2605 compileGetByValOnIntTypedArray(m_jit.vm()->uint16ArrayDescriptor(), node, sizeof(uint16_t), UnsignedTypedArray); 2606 break; 2607 case Array::Uint32Array: 2608 compileGetByValOnIntTypedArray(m_jit.vm()->uint32ArrayDescriptor(), node, sizeof(uint32_t), UnsignedTypedArray); 2609 break; 2610 case Array::Float32Array: 2611 compileGetByValOnFloatTypedArray(m_jit.vm()->float32ArrayDescriptor(), node, sizeof(float)); 2612 break; 2613 case Array::Float64Array: 2614 compileGetByValOnFloatTypedArray(m_jit.vm()->float64ArrayDescriptor(), node, sizeof(double)); 2615 break; 2616 default: 2617 RELEASE_ASSERT_NOT_REACHED(); 2618 break; 2619 } 2620 break; 2621 } 2622 2623 case PutByVal: 2624 case PutByValAlias: { 2625 Edge child1 = m_jit.graph().varArgChild(node, 0); 2626 Edge child2 = m_jit.graph().varArgChild(node, 1); 2627 Edge child3 = m_jit.graph().varArgChild(node, 2); 2628 Edge child4 = m_jit.graph().varArgChild(node, 3); 2629 2630 ArrayMode arrayMode = node->arrayMode().modeForPut(); 2631 bool alreadyHandled = false; 2632 2633 switch (arrayMode.type()) { 2634 case Array::SelectUsingPredictions: 2635 case Array::ForceExit: 2636 RELEASE_ASSERT_NOT_REACHED(); 2637 terminateSpeculativeExecution(InadequateCoverage, JSValueRegs(), 0); 2638 alreadyHandled = true; 2639 break; 2640 case Array::Generic: { 2641 RELEASE_ASSERT(node->op() == PutByVal); 2642 2643 JSValueOperand arg1(this, child1); 2644 JSValueOperand arg2(this, child2); 2645 JSValueOperand arg3(this, child3); 2646 GPRReg arg1GPR = arg1.gpr(); 2647 GPRReg arg2GPR = arg2.gpr(); 2648 GPRReg arg3GPR = arg3.gpr(); 2649 flushRegisters(); 2650 2651 callOperation(m_jit.strictModeFor(node->codeOrigin) ? operationPutByValStrict : operationPutByValNonStrict, arg1GPR, arg2GPR, arg3GPR); 2652 2653 noResult(node); 2654 alreadyHandled = true; 2655 break; 2656 } 2657 default: 2658 break; 2659 } 2660 2661 if (alreadyHandled) 2662 break; 2663 2664 // FIXME: the base may not be necessary for some array access modes. But we have to 2665 // keep it alive to this point, so it's likely to be in a register anyway. Likely 2666 // no harm in locking it here. 2667 SpeculateCellOperand base(this, child1); 2668 SpeculateStrictInt32Operand property(this, child2); 2669 2670 GPRReg baseReg = base.gpr(); 2671 GPRReg propertyReg = property.gpr(); 2672 2673 switch (arrayMode.type()) { 2674 case Array::Int32: 2675 case Array::Contiguous: { 2676 JSValueOperand value(this, child3, ManualOperandSpeculation); 2677 2678 GPRReg valueReg = value.gpr(); 2679 2680 if (!m_compileOkay) 2681 return; 2682 2683 if (arrayMode.type() == Array::Int32) { 2684 DFG_TYPE_CHECK( 2685 JSValueRegs(valueReg), child3, SpecInt32, 2686 m_jit.branch64( 2687 MacroAssembler::Below, valueReg, GPRInfo::tagTypeNumberRegister)); 2688 } 2689 2690 if (arrayMode.type() == Array::Contiguous && Heap::isWriteBarrierEnabled()) { 2691 GPRTemporary scratch(this); 2692 writeBarrier(baseReg, value.gpr(), child3, WriteBarrierForPropertyAccess, scratch.gpr()); 2693 } 2694 2695 StorageOperand storage(this, child4); 2696 GPRReg storageReg = storage.gpr(); 2697 2698 if (node->op() == PutByValAlias) { 2699 // Store the value to the array. 2700 GPRReg propertyReg = property.gpr(); 2701 GPRReg valueReg = value.gpr(); 2702 m_jit.store64(valueReg, MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight)); 2703 2704 noResult(node); 2705 break; 2706 } 2707 2708 GPRTemporary temporary; 2709 GPRReg temporaryReg = temporaryRegisterForPutByVal(temporary, node); 2710 2711 MacroAssembler::Jump slowCase; 2712 2713 if (arrayMode.isInBounds()) { 2714 speculationCheck( 2715 StoreToHoleOrOutOfBounds, JSValueRegs(), 0, 2716 m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, Butterfly::offsetOfPublicLength()))); 2717 } else { 2718 MacroAssembler::Jump inBounds = m_jit.branch32(MacroAssembler::Below, propertyReg, MacroAssembler::Address(storageReg, Butterfly::offsetOfPublicLength())); 2719 2720 slowCase = m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, Butterfly::offsetOfVectorLength())); 2721 2722 if (!arrayMode.isOutOfBounds()) 2723 speculationCheck(OutOfBounds, JSValueRegs(), 0, slowCase); 2724 2725 m_jit.add32(TrustedImm32(1), propertyReg, temporaryReg); 2726 m_jit.store32(temporaryReg, MacroAssembler::Address(storageReg, Butterfly::offsetOfPublicLength())); 2727 2728 inBounds.link(&m_jit); 2729 } 2730 2731 m_jit.store64(valueReg, MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight)); 2732 2733 base.use(); 2734 property.use(); 2735 value.use(); 2736 storage.use(); 2737 2738 if (arrayMode.isOutOfBounds()) { 2739 addSlowPathGenerator( 2740 slowPathCall( 2741 slowCase, this, 2742 m_jit.codeBlock()->isStrictMode() ? operationPutByValBeyondArrayBoundsStrict : operationPutByValBeyondArrayBoundsNonStrict, 2743 NoResult, baseReg, propertyReg, valueReg)); 2744 } 2745 2746 noResult(node, UseChildrenCalledExplicitly); 2747 break; 2748 } 2749 2750 case Array::Double: { 2751 compileDoublePutByVal(node, base, property); 2752 break; 2753 } 2754 2755 case Array::ArrayStorage: 2756 case Array::SlowPutArrayStorage: { 2757 JSValueOperand value(this, child3); 2758 2759 GPRReg valueReg = value.gpr(); 2760 2761 if (!m_compileOkay) 2762 return; 2763 2764 if (Heap::isWriteBarrierEnabled()) { 2765 GPRTemporary scratch(this); 2766 writeBarrier(baseReg, value.gpr(), child3, WriteBarrierForPropertyAccess, scratch.gpr()); 2767 } 2768 2769 StorageOperand storage(this, child4); 2770 GPRReg storageReg = storage.gpr(); 2771 2772 if (node->op() == PutByValAlias) { 2773 // Store the value to the array. 2774 GPRReg propertyReg = property.gpr(); 2775 GPRReg valueReg = value.gpr(); 2776 m_jit.store64(valueReg, MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]))); 2777 2778 noResult(node); 2779 break; 2780 } 2781 2782 GPRTemporary temporary; 2783 GPRReg temporaryReg = temporaryRegisterForPutByVal(temporary, node); 2784 2785 MacroAssembler::JumpList slowCases; 2786 2787 MacroAssembler::Jump beyondArrayBounds = m_jit.branch32(MacroAssembler::AboveOrEqual, propertyReg, MacroAssembler::Address(storageReg, ArrayStorage::vectorLengthOffset())); 2788 if (!arrayMode.isOutOfBounds()) 2789 speculationCheck(OutOfBounds, JSValueRegs(), 0, beyondArrayBounds); 2790 else 2791 slowCases.append(beyondArrayBounds); 2792 2793 // Check if we're writing to a hole; if so increment m_numValuesInVector. 2794 if (arrayMode.isInBounds()) { 2795 speculationCheck( 2796 StoreToHole, JSValueRegs(), 0, 2797 m_jit.branchTest64(MacroAssembler::Zero, MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])))); 2798 } else { 2799 MacroAssembler::Jump notHoleValue = m_jit.branchTest64(MacroAssembler::NonZero, MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]))); 2800 if (arrayMode.isSlowPut()) { 2801 // This is sort of strange. If we wanted to optimize this code path, we would invert 2802 // the above branch. But it's simply not worth it since this only happens if we're 2803 // already having a bad time. 2804 slowCases.append(m_jit.jump()); 2805 } else { 2806 m_jit.add32(TrustedImm32(1), MacroAssembler::Address(storageReg, ArrayStorage::numValuesInVectorOffset())); 2807 2808 // If we're writing to a hole we might be growing the array; 2809 MacroAssembler::Jump lengthDoesNotNeedUpdate = m_jit.branch32(MacroAssembler::Below, propertyReg, MacroAssembler::Address(storageReg, ArrayStorage::lengthOffset())); 2810 m_jit.add32(TrustedImm32(1), propertyReg, temporaryReg); 2811 m_jit.store32(temporaryReg, MacroAssembler::Address(storageReg, ArrayStorage::lengthOffset())); 2812 2813 lengthDoesNotNeedUpdate.link(&m_jit); 2814 } 2815 notHoleValue.link(&m_jit); 2816 } 2817 2818 // Store the value to the array. 2819 m_jit.store64(valueReg, MacroAssembler::BaseIndex(storageReg, propertyReg, MacroAssembler::TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]))); 2820 2821 base.use(); 2822 property.use(); 2823 value.use(); 2824 storage.use(); 2825 2826 if (!slowCases.empty()) { 2827 addSlowPathGenerator( 2828 slowPathCall( 2829 slowCases, this, 2830 m_jit.codeBlock()->isStrictMode() ? operationPutByValBeyondArrayBoundsStrict : operationPutByValBeyondArrayBoundsNonStrict, 2831 NoResult, baseReg, propertyReg, valueReg)); 2832 } 2833 2834 noResult(node, UseChildrenCalledExplicitly); 2835 break; 2836 } 2837 2838 case Array::Arguments: { 2839 JSValueOperand value(this, child3); 2840 GPRTemporary scratch(this); 2841 GPRTemporary scratch2(this); 2842 2843 GPRReg valueReg = value.gpr(); 2844 GPRReg scratchReg = scratch.gpr(); 2845 GPRReg scratch2Reg = scratch2.gpr(); 2846 2847 if (!m_compileOkay) 2848 return; 2849 2850 // Two really lame checks. 2851 speculationCheck( 2852 Uncountable, JSValueSource(), 0, 2853 m_jit.branch32( 2854 MacroAssembler::AboveOrEqual, propertyReg, 2855 MacroAssembler::Address(baseReg, OBJECT_OFFSETOF(Arguments, m_numArguments)))); 2856 speculationCheck( 2857 Uncountable, JSValueSource(), 0, 2858 m_jit.branchTestPtr( 2859 MacroAssembler::NonZero, 2860 MacroAssembler::Address( 2861 baseReg, OBJECT_OFFSETOF(Arguments, m_slowArguments)))); 2862 2863 m_jit.move(propertyReg, scratch2Reg); 2864 m_jit.neg32(scratch2Reg); 2865 m_jit.signExtend32ToPtr(scratch2Reg, scratch2Reg); 2866 m_jit.loadPtr( 2867 MacroAssembler::Address(baseReg, OBJECT_OFFSETOF(Arguments, m_registers)), 2868 scratchReg); 2869 2870 m_jit.store64( 2871 valueReg, 2872 MacroAssembler::BaseIndex( 2873 scratchReg, scratch2Reg, MacroAssembler::TimesEight, 2874 CallFrame::thisArgumentOffset() * sizeof(Register) - sizeof(Register))); 2875 2876 noResult(node); 2877 break; 2878 } 2879 2880 case Array::Int8Array: 2881 compilePutByValForIntTypedArray(m_jit.vm()->int8ArrayDescriptor(), base.gpr(), property.gpr(), node, sizeof(int8_t), SignedTypedArray); 2882 break; 2883 2884 case Array::Int16Array: 2885 compilePutByValForIntTypedArray(m_jit.vm()->int16ArrayDescriptor(), base.gpr(), property.gpr(), node, sizeof(int16_t), SignedTypedArray); 2886 break; 2887 2888 case Array::Int32Array: 2889 compilePutByValForIntTypedArray(m_jit.vm()->int32ArrayDescriptor(), base.gpr(), property.gpr(), node, sizeof(int32_t), SignedTypedArray); 2890 break; 2891 2892 case Array::Uint8Array: 2893 compilePutByValForIntTypedArray(m_jit.vm()->uint8ArrayDescriptor(), base.gpr(), property.gpr(), node, sizeof(uint8_t), UnsignedTypedArray); 2894 break; 2895 2896 case Array::Uint8ClampedArray: 2897 compilePutByValForIntTypedArray(m_jit.vm()->uint8ClampedArrayDescriptor(), base.gpr(), property.gpr(), node, sizeof(uint8_t), UnsignedTypedArray, ClampRounding); 2898 break; 2899 2900 case Array::Uint16Array: 2901 compilePutByValForIntTypedArray(m_jit.vm()->uint16ArrayDescriptor(), base.gpr(), property.gpr(), node, sizeof(uint16_t), UnsignedTypedArray); 2902 break; 2903 2904 case Array::Uint32Array: 2905 compilePutByValForIntTypedArray(m_jit.vm()->uint32ArrayDescriptor(), base.gpr(), property.gpr(), node, sizeof(uint32_t), UnsignedTypedArray); 2906 break; 2907 2908 case Array::Float32Array: 2909 compilePutByValForFloatTypedArray(m_jit.vm()->float32ArrayDescriptor(), base.gpr(), property.gpr(), node, sizeof(float)); 2910 break; 2911 2912 case Array::Float64Array: 2913 compilePutByValForFloatTypedArray(m_jit.vm()->float64ArrayDescriptor(), base.gpr(), property.gpr(), node, sizeof(double)); 2914 break; 2915 2916 default: 2917 RELEASE_ASSERT_NOT_REACHED(); 2918 break; 2919 } 2920 2921 break; 2922 } 2923 2924 case RegExpExec: { 2925 if (compileRegExpExec(node)) 2926 return; 2927 if (!node->adjustedRefCount()) { 2928 SpeculateCellOperand base(this, node->child1()); 2929 SpeculateCellOperand argument(this, node->child2()); 2930 GPRReg baseGPR = base.gpr(); 2931 GPRReg argumentGPR = argument.gpr(); 2932 2933 flushRegisters(); 2934 GPRResult result(this); 2935 callOperation(operationRegExpTest, result.gpr(), baseGPR, argumentGPR); 2936 2937 // Must use jsValueResult because otherwise we screw up register 2938 // allocation, which thinks that this node has a result. 2939 jsValueResult(result.gpr(), node); 2940 break; 2941 } 2942 2943 SpeculateCellOperand base(this, node->child1()); 2944 SpeculateCellOperand argument(this, node->child2()); 2945 GPRReg baseGPR = base.gpr(); 2946 GPRReg argumentGPR = argument.gpr(); 2947 2948 flushRegisters(); 2949 GPRResult result(this); 2950 callOperation(operationRegExpExec, result.gpr(), baseGPR, argumentGPR); 2951 2952 jsValueResult(result.gpr(), node); 2953 break; 2954 } 2955 2956 case RegExpTest: { 2957 SpeculateCellOperand base(this, node->child1()); 2958 SpeculateCellOperand argument(this, node->child2()); 2959 GPRReg baseGPR = base.gpr(); 2960 GPRReg argumentGPR = argument.gpr(); 2961 2962 flushRegisters(); 2963 GPRResult result(this); 2964 callOperation(operationRegExpTest, result.gpr(), baseGPR, argumentGPR); 2965 2966 // If we add a DataFormatBool, we should use it here. 2967 m_jit.or32(TrustedImm32(ValueFalse), result.gpr()); 2968 jsValueResult(result.gpr(), node, DataFormatJSBoolean); 2969 break; 2970 } 2971 2972 case ArrayPush: { 2973 ASSERT(node->arrayMode().isJSArray()); 2974 2975 SpeculateCellOperand base(this, node->child1()); 2976 GPRTemporary storageLength(this); 2977 2978 GPRReg baseGPR = base.gpr(); 2979 GPRReg storageLengthGPR = storageLength.gpr(); 2980 2981 StorageOperand storage(this, node->child3()); 2982 GPRReg storageGPR = storage.gpr(); 2983 2984 switch (node->arrayMode().type()) { 2985 case Array::Int32: 2986 case Array::Contiguous: { 2987 JSValueOperand value(this, node->child2(), ManualOperandSpeculation); 2988 GPRReg valueGPR = value.gpr(); 2989 2990 if (node->arrayMode().type() == Array::Int32) { 2991 DFG_TYPE_CHECK( 2992 JSValueRegs(valueGPR), node->child2(), SpecInt32, 2993 m_jit.branch64( 2994 MacroAssembler::Below, valueGPR, GPRInfo::tagTypeNumberRegister)); 2995 } 2996 2997 if (node->arrayMode().type() != Array::Int32 && Heap::isWriteBarrierEnabled()) { 2998 GPRTemporary scratch(this); 2999 writeBarrier(baseGPR, valueGPR, node->child2(), WriteBarrierForPropertyAccess, scratch.gpr(), storageLengthGPR); 3000 } 3001 3002 m_jit.load32(MacroAssembler::Address(storageGPR, Butterfly::offsetOfPublicLength()), storageLengthGPR); 3003 MacroAssembler::Jump slowPath = m_jit.branch32(MacroAssembler::AboveOrEqual, storageLengthGPR, MacroAssembler::Address(storageGPR, Butterfly::offsetOfVectorLength())); 3004 m_jit.store64(valueGPR, MacroAssembler::BaseIndex(storageGPR, storageLengthGPR, MacroAssembler::TimesEight)); 3005 m_jit.add32(TrustedImm32(1), storageLengthGPR); 3006 m_jit.store32(storageLengthGPR, MacroAssembler::Address(storageGPR, Butterfly::offsetOfPublicLength())); 3007 m_jit.or64(GPRInfo::tagTypeNumberRegister, storageLengthGPR); 3008 3009 addSlowPathGenerator( 3010 slowPathCall( 3011 slowPath, this, operationArrayPush, NoResult, storageLengthGPR, 3012 valueGPR, baseGPR)); 3013 3014 jsValueResult(storageLengthGPR, node); 3015 break; 3016 } 3017 3018 case Array::Double: { 3019 SpeculateDoubleOperand value(this, node->child2()); 3020 FPRReg valueFPR = value.fpr(); 3021 3022 DFG_TYPE_CHECK( 3023 JSValueRegs(), node->child2(), SpecRealNumber, 3024 m_jit.branchDouble(MacroAssembler::DoubleNotEqualOrUnordered, valueFPR, valueFPR)); 3025 3026 m_jit.load32(MacroAssembler::Address(storageGPR, Butterfly::offsetOfPublicLength()), storageLengthGPR); 3027 MacroAssembler::Jump slowPath = m_jit.branch32(MacroAssembler::AboveOrEqual, storageLengthGPR, MacroAssembler::Address(storageGPR, Butterfly::offsetOfVectorLength())); 3028 m_jit.storeDouble(valueFPR, MacroAssembler::BaseIndex(storageGPR, storageLengthGPR, MacroAssembler::TimesEight)); 3029 m_jit.add32(TrustedImm32(1), storageLengthGPR); 3030 m_jit.store32(storageLengthGPR, MacroAssembler::Address(storageGPR, Butterfly::offsetOfPublicLength())); 3031 m_jit.or64(GPRInfo::tagTypeNumberRegister, storageLengthGPR); 3032 3033 addSlowPathGenerator( 3034 slowPathCall( 3035 slowPath, this, operationArrayPushDouble, NoResult, storageLengthGPR, 3036 valueFPR, baseGPR)); 3037 3038 jsValueResult(storageLengthGPR, node); 3039 break; 3040 } 3041 3042 case Array::ArrayStorage: { 3043 JSValueOperand value(this, node->child2()); 3044 GPRReg valueGPR = value.gpr(); 3045 3046 if (Heap::isWriteBarrierEnabled()) { 3047 GPRTemporary scratch(this); 3048 writeBarrier(baseGPR, valueGPR, node->child2(), WriteBarrierForPropertyAccess, scratch.gpr(), storageLengthGPR); 3049 } 3050 3051 m_jit.load32(MacroAssembler::Address(storageGPR, ArrayStorage::lengthOffset()), storageLengthGPR); 3052 3053 // Refuse to handle bizarre lengths. 3054 speculationCheck(Uncountable, JSValueRegs(), 0, m_jit.branch32(MacroAssembler::Above, storageLengthGPR, TrustedImm32(0x7ffffffe))); 3055 3056 MacroAssembler::Jump slowPath = m_jit.branch32(MacroAssembler::AboveOrEqual, storageLengthGPR, MacroAssembler::Address(storageGPR, ArrayStorage::vectorLengthOffset())); 3057 3058 m_jit.store64(valueGPR, MacroAssembler::BaseIndex(storageGPR, storageLengthGPR, MacroAssembler::TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]))); 3059 3060 m_jit.add32(TrustedImm32(1), storageLengthGPR); 3061 m_jit.store32(storageLengthGPR, MacroAssembler::Address(storageGPR, ArrayStorage::lengthOffset())); 3062 m_jit.add32(TrustedImm32(1), MacroAssembler::Address(storageGPR, OBJECT_OFFSETOF(ArrayStorage, m_numValuesInVector))); 3063 m_jit.or64(GPRInfo::tagTypeNumberRegister, storageLengthGPR); 3064 3065 addSlowPathGenerator( 3066 slowPathCall( 3067 slowPath, this, operationArrayPush, NoResult, storageLengthGPR, 3068 valueGPR, baseGPR)); 3069 3070 jsValueResult(storageLengthGPR, node); 3071 break; 3072 } 3073 3074 default: 3075 CRASH(); 3076 break; 3077 } 3078 break; 3079 } 3080 3081 case ArrayPop: { 3082 ASSERT(node->arrayMode().isJSArray()); 3083 3084 SpeculateCellOperand base(this, node->child1()); 3085 StorageOperand storage(this, node->child2()); 3086 GPRTemporary value(this); 3087 GPRTemporary storageLength(this); 3088 FPRTemporary temp(this); // This is kind of lame, since we don't always need it. I'm relying on the fact that we don't have FPR pressure, especially in code that uses pop(). 3089 3090 GPRReg baseGPR = base.gpr(); 3091 GPRReg storageGPR = storage.gpr(); 3092 GPRReg valueGPR = value.gpr(); 3093 GPRReg storageLengthGPR = storageLength.gpr(); 3094 FPRReg tempFPR = temp.fpr(); 3095 3096 switch (node->arrayMode().type()) { 3097 case Array::Int32: 3098 case Array::Double: 3099 case Array::Contiguous: { 3100 m_jit.load32( 3101 MacroAssembler::Address(storageGPR, Butterfly::offsetOfPublicLength()), storageLengthGPR); 3102 MacroAssembler::Jump undefinedCase = 3103 m_jit.branchTest32(MacroAssembler::Zero, storageLengthGPR); 3104 m_jit.sub32(TrustedImm32(1), storageLengthGPR); 3105 m_jit.store32( 3106 storageLengthGPR, MacroAssembler::Address(storageGPR, Butterfly::offsetOfPublicLength())); 3107 MacroAssembler::Jump slowCase; 3108 if (node->arrayMode().type() == Array::Double) { 3109 m_jit.loadDouble( 3110 MacroAssembler::BaseIndex(storageGPR, storageLengthGPR, MacroAssembler::TimesEight), 3111 tempFPR); 3112 // FIXME: This would not have to be here if changing the publicLength also zeroed the values between the old 3113 // length and the new length. 3114 m_jit.store64( 3115 MacroAssembler::TrustedImm64((int64_t)0), MacroAssembler::BaseIndex(storageGPR, storageLengthGPR, MacroAssembler::TimesEight)); 3116 slowCase = m_jit.branchDouble(MacroAssembler::DoubleNotEqualOrUnordered, tempFPR, tempFPR); 3117 boxDouble(tempFPR, valueGPR); 3118 } else { 3119 m_jit.load64( 3120 MacroAssembler::BaseIndex(storageGPR, storageLengthGPR, MacroAssembler::TimesEight), 3121 valueGPR); 3122 // FIXME: This would not have to be here if changing the publicLength also zeroed the values between the old 3123 // length and the new length. 3124 m_jit.store64( 3125 MacroAssembler::TrustedImm64((int64_t)0), MacroAssembler::BaseIndex(storageGPR, storageLengthGPR, MacroAssembler::TimesEight)); 3126 slowCase = m_jit.branchTest64(MacroAssembler::Zero, valueGPR); 3127 } 3128 3129 addSlowPathGenerator( 3130 slowPathMove( 3131 undefinedCase, this, 3132 MacroAssembler::TrustedImm64(JSValue::encode(jsUndefined())), valueGPR)); 3133 addSlowPathGenerator( 3134 slowPathCall( 3135 slowCase, this, operationArrayPopAndRecoverLength, valueGPR, baseGPR)); 3136 3137 // We can't know for sure that the result is an int because of the slow paths. :-/ 3138 jsValueResult(valueGPR, node); 3139 break; 3140 } 3141 3142 case Array::ArrayStorage: { 3143 m_jit.load32(MacroAssembler::Address(storageGPR, ArrayStorage::lengthOffset()), storageLengthGPR); 3144 3145 JITCompiler::Jump undefinedCase = 3146 m_jit.branchTest32(MacroAssembler::Zero, storageLengthGPR); 3147 3148 m_jit.sub32(TrustedImm32(1), storageLengthGPR); 3149 3150 JITCompiler::JumpList slowCases; 3151 slowCases.append(m_jit.branch32(MacroAssembler::AboveOrEqual, storageLengthGPR, MacroAssembler::Address(storageGPR, ArrayStorage::vectorLengthOffset()))); 3152 3153 m_jit.load64(MacroAssembler::BaseIndex(storageGPR, storageLengthGPR, MacroAssembler::TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])), valueGPR); 3154 slowCases.append(m_jit.branchTest64(MacroAssembler::Zero, valueGPR)); 3155 3156 m_jit.store32(storageLengthGPR, MacroAssembler::Address(storageGPR, ArrayStorage::lengthOffset())); 3157 3158 m_jit.store64(MacroAssembler::TrustedImm64((int64_t)0), MacroAssembler::BaseIndex(storageGPR, storageLengthGPR, MacroAssembler::TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]))); 3159 m_jit.sub32(MacroAssembler::TrustedImm32(1), MacroAssembler::Address(storageGPR, OBJECT_OFFSETOF(ArrayStorage, m_numValuesInVector))); 3160 3161 addSlowPathGenerator( 3162 slowPathMove( 3163 undefinedCase, this, 3164 MacroAssembler::TrustedImm64(JSValue::encode(jsUndefined())), valueGPR)); 3165 3166 addSlowPathGenerator( 3167 slowPathCall( 3168 slowCases, this, operationArrayPop, valueGPR, baseGPR)); 3169 3170 jsValueResult(valueGPR, node); 3171 break; 3172 } 3173 3174 default: 3175 CRASH(); 3176 break; 3177 } 3178 break; 3179 } 3180 3181 case DFG::Jump: { 3182 BlockIndex taken = node->takenBlockIndex(); 3183 jump(taken); 3184 noResult(node); 3185 break; 3186 } 3187 3188 case Branch: 3189 emitBranch(node); 3190 break; 3191 3192 case Return: { 3193 ASSERT(GPRInfo::callFrameRegister != GPRInfo::regT1); 3194 ASSERT(GPRInfo::regT1 != GPRInfo::returnValueGPR); 3195 ASSERT(GPRInfo::returnValueGPR != GPRInfo::callFrameRegister); 3196 3197#if DFG_ENABLE(SUCCESS_STATS) 3198 static SamplingCounter counter("SpeculativeJIT"); 3199 m_jit.emitCount(counter); 3200#endif 3201 3202 // Return the result in returnValueGPR. 3203 JSValueOperand op1(this, node->child1()); 3204 m_jit.move(op1.gpr(), GPRInfo::returnValueGPR); 3205 3206 // Grab the return address. 3207 m_jit.emitGetFromCallFrameHeaderPtr(JSStack::ReturnPC, GPRInfo::regT1); 3208 // Restore our caller's "r". 3209 m_jit.emitGetFromCallFrameHeaderPtr(JSStack::CallerFrame, GPRInfo::callFrameRegister); 3210 // Return. 3211 m_jit.restoreReturnAddressBeforeReturn(GPRInfo::regT1); 3212 m_jit.ret(); 3213 3214 noResult(node); 3215 break; 3216 } 3217 3218 case Throw: 3219 case ThrowReferenceError: { 3220 // We expect that throw statements are rare and are intended to exit the code block 3221 // anyway, so we just OSR back to the old JIT for now. 3222 terminateSpeculativeExecution(Uncountable, JSValueRegs(), 0); 3223 break; 3224 } 3225 3226 case ToPrimitive: { 3227 RELEASE_ASSERT(node->child1().useKind() == UntypedUse); 3228 JSValueOperand op1(this, node->child1()); 3229 GPRTemporary result(this, op1); 3230 3231 GPRReg op1GPR = op1.gpr(); 3232 GPRReg resultGPR = result.gpr(); 3233 3234 op1.use(); 3235 3236 if (!(m_state.forNode(node->child1()).m_type & ~(SpecNumber | SpecBoolean))) 3237 m_jit.move(op1GPR, resultGPR); 3238 else { 3239 MacroAssembler::Jump alreadyPrimitive = m_jit.branchTest64(MacroAssembler::NonZero, op1GPR, GPRInfo::tagMaskRegister); 3240 MacroAssembler::Jump notPrimitive = m_jit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(op1GPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(m_jit.vm()->stringStructure.get())); 3241 3242 alreadyPrimitive.link(&m_jit); 3243 m_jit.move(op1GPR, resultGPR); 3244 3245 addSlowPathGenerator( 3246 slowPathCall(notPrimitive, this, operationToPrimitive, resultGPR, op1GPR)); 3247 } 3248 3249 jsValueResult(resultGPR, node, UseChildrenCalledExplicitly); 3250 break; 3251 } 3252 3253 case ToString: { 3254 if (node->child1().useKind() == UntypedUse) { 3255 JSValueOperand op1(this, node->child1()); 3256 GPRReg op1GPR = op1.gpr(); 3257 3258 GPRResult result(this); 3259 GPRReg resultGPR = result.gpr(); 3260 3261 flushRegisters(); 3262 3263 JITCompiler::Jump done; 3264 if (node->child1()->prediction() & SpecString) { 3265 JITCompiler::Jump slowPath1 = m_jit.branchTest64( 3266 JITCompiler::NonZero, op1GPR, GPRInfo::tagMaskRegister); 3267 JITCompiler::Jump slowPath2 = m_jit.branchPtr( 3268 JITCompiler::NotEqual, 3269 JITCompiler::Address(op1GPR, JSCell::structureOffset()), 3270 TrustedImmPtr(m_jit.vm()->stringStructure.get())); 3271 m_jit.move(op1GPR, resultGPR); 3272 done = m_jit.jump(); 3273 slowPath1.link(&m_jit); 3274 slowPath2.link(&m_jit); 3275 } 3276 callOperation(operationToString, resultGPR, op1GPR); 3277 if (done.isSet()) 3278 done.link(&m_jit); 3279 cellResult(resultGPR, node); 3280 break; 3281 } 3282 3283 compileToStringOnCell(node); 3284 break; 3285 } 3286 3287 case NewStringObject: { 3288 compileNewStringObject(node); 3289 break; 3290 } 3291 3292 case NewArray: { 3293 JSGlobalObject* globalObject = m_jit.graph().globalObjectFor(node->codeOrigin); 3294 if (!globalObject->isHavingABadTime() && !hasArrayStorage(node->indexingType())) { 3295 globalObject->havingABadTimeWatchpoint()->add(speculationWatchpoint()); 3296 3297 Structure* structure = globalObject->arrayStructureForIndexingTypeDuringAllocation(node->indexingType()); 3298 RELEASE_ASSERT(structure->indexingType() == node->indexingType()); 3299 ASSERT( 3300 hasUndecided(structure->indexingType()) 3301 || hasInt32(structure->indexingType()) 3302 || hasDouble(structure->indexingType()) 3303 || hasContiguous(structure->indexingType())); 3304 3305 unsigned numElements = node->numChildren(); 3306 3307 GPRTemporary result(this); 3308 GPRTemporary storage(this); 3309 3310 GPRReg resultGPR = result.gpr(); 3311 GPRReg storageGPR = storage.gpr(); 3312 3313 emitAllocateJSArray(resultGPR, structure, storageGPR, numElements); 3314 3315 // At this point, one way or another, resultGPR and storageGPR have pointers to 3316 // the JSArray and the Butterfly, respectively. 3317 3318 ASSERT(!hasUndecided(structure->indexingType()) || !node->numChildren()); 3319 3320 for (unsigned operandIdx = 0; operandIdx < node->numChildren(); ++operandIdx) { 3321 Edge use = m_jit.graph().m_varArgChildren[node->firstChild() + operandIdx]; 3322 switch (node->indexingType()) { 3323 case ALL_BLANK_INDEXING_TYPES: 3324 case ALL_UNDECIDED_INDEXING_TYPES: 3325 CRASH(); 3326 break; 3327 case ALL_DOUBLE_INDEXING_TYPES: { 3328 SpeculateDoubleOperand operand(this, use); 3329 FPRReg opFPR = operand.fpr(); 3330 DFG_TYPE_CHECK( 3331 JSValueRegs(), use, SpecRealNumber, 3332 m_jit.branchDouble( 3333 MacroAssembler::DoubleNotEqualOrUnordered, opFPR, opFPR)); 3334 m_jit.storeDouble(opFPR, MacroAssembler::Address(storageGPR, sizeof(double) * operandIdx)); 3335 break; 3336 } 3337 case ALL_INT32_INDEXING_TYPES: 3338 case ALL_CONTIGUOUS_INDEXING_TYPES: { 3339 JSValueOperand operand(this, use, ManualOperandSpeculation); 3340 GPRReg opGPR = operand.gpr(); 3341 if (hasInt32(node->indexingType())) { 3342 DFG_TYPE_CHECK( 3343 JSValueRegs(opGPR), use, SpecInt32, 3344 m_jit.branch64( 3345 MacroAssembler::Below, opGPR, GPRInfo::tagTypeNumberRegister)); 3346 } 3347 m_jit.store64(opGPR, MacroAssembler::Address(storageGPR, sizeof(JSValue) * operandIdx)); 3348 break; 3349 } 3350 default: 3351 CRASH(); 3352 break; 3353 } 3354 } 3355 3356 // Yuck, we should *really* have a way of also returning the storageGPR. But 3357 // that's the least of what's wrong with this code. We really shouldn't be 3358 // allocating the array after having computed - and probably spilled to the 3359 // stack - all of the things that will go into the array. The solution to that 3360 // bigger problem will also likely fix the redundancy in reloading the storage 3361 // pointer that we currently have. 3362 3363 cellResult(resultGPR, node); 3364 break; 3365 } 3366 3367 if (!node->numChildren()) { 3368 flushRegisters(); 3369 GPRResult result(this); 3370 callOperation(operationNewEmptyArray, result.gpr(), globalObject->arrayStructureForIndexingTypeDuringAllocation(node->indexingType())); 3371 cellResult(result.gpr(), node); 3372 break; 3373 } 3374 3375 size_t scratchSize = sizeof(EncodedJSValue) * node->numChildren(); 3376 ScratchBuffer* scratchBuffer = m_jit.vm()->scratchBufferForSize(scratchSize); 3377 EncodedJSValue* buffer = scratchBuffer ? static_cast<EncodedJSValue*>(scratchBuffer->dataBuffer()) : 0; 3378 3379 for (unsigned operandIdx = 0; operandIdx < node->numChildren(); ++operandIdx) { 3380 // Need to perform the speculations that this node promises to perform. If we're 3381 // emitting code here and the indexing type is not array storage then there is 3382 // probably something hilarious going on and we're already failing at all the 3383 // things, but at least we're going to be sound. 3384 Edge use = m_jit.graph().m_varArgChildren[node->firstChild() + operandIdx]; 3385 switch (node->indexingType()) { 3386 case ALL_BLANK_INDEXING_TYPES: 3387 case ALL_UNDECIDED_INDEXING_TYPES: 3388 CRASH(); 3389 break; 3390 case ALL_DOUBLE_INDEXING_TYPES: { 3391 SpeculateDoubleOperand operand(this, use); 3392 GPRTemporary scratch(this); 3393 FPRReg opFPR = operand.fpr(); 3394 GPRReg scratchGPR = scratch.gpr(); 3395 DFG_TYPE_CHECK( 3396 JSValueRegs(), use, SpecRealNumber, 3397 m_jit.branchDouble( 3398 MacroAssembler::DoubleNotEqualOrUnordered, opFPR, opFPR)); 3399 m_jit.boxDouble(opFPR, scratchGPR); 3400 m_jit.store64(scratchGPR, buffer + operandIdx); 3401 break; 3402 } 3403 case ALL_INT32_INDEXING_TYPES: { 3404 JSValueOperand operand(this, use, ManualOperandSpeculation); 3405 GPRReg opGPR = operand.gpr(); 3406 if (hasInt32(node->indexingType())) { 3407 DFG_TYPE_CHECK( 3408 JSValueRegs(opGPR), use, SpecInt32, 3409 m_jit.branch64( 3410 MacroAssembler::Below, opGPR, GPRInfo::tagTypeNumberRegister)); 3411 } 3412 m_jit.store64(opGPR, buffer + operandIdx); 3413 break; 3414 } 3415 case ALL_CONTIGUOUS_INDEXING_TYPES: 3416 case ALL_ARRAY_STORAGE_INDEXING_TYPES: { 3417 JSValueOperand operand(this, use); 3418 GPRReg opGPR = operand.gpr(); 3419 m_jit.store64(opGPR, buffer + operandIdx); 3420 operand.use(); 3421 break; 3422 } 3423 default: 3424 CRASH(); 3425 break; 3426 } 3427 } 3428 3429 switch (node->indexingType()) { 3430 case ALL_DOUBLE_INDEXING_TYPES: 3431 case ALL_INT32_INDEXING_TYPES: 3432 useChildren(node); 3433 break; 3434 default: 3435 break; 3436 } 3437 3438 flushRegisters(); 3439 3440 if (scratchSize) { 3441 GPRTemporary scratch(this); 3442 3443 // Tell GC mark phase how much of the scratch buffer is active during call. 3444 m_jit.move(TrustedImmPtr(scratchBuffer->activeLengthPtr()), scratch.gpr()); 3445 m_jit.storePtr(TrustedImmPtr(scratchSize), scratch.gpr()); 3446 } 3447 3448 GPRResult result(this); 3449 3450 callOperation( 3451 operationNewArray, result.gpr(), globalObject->arrayStructureForIndexingTypeDuringAllocation(node->indexingType()), 3452 static_cast<void*>(buffer), node->numChildren()); 3453 3454 if (scratchSize) { 3455 GPRTemporary scratch(this); 3456 3457 m_jit.move(TrustedImmPtr(scratchBuffer->activeLengthPtr()), scratch.gpr()); 3458 m_jit.storePtr(TrustedImmPtr(0), scratch.gpr()); 3459 } 3460 3461 cellResult(result.gpr(), node, UseChildrenCalledExplicitly); 3462 break; 3463 } 3464 3465 case NewArrayWithSize: { 3466 JSGlobalObject* globalObject = m_jit.graph().globalObjectFor(node->codeOrigin); 3467 if (!globalObject->isHavingABadTime() && !hasArrayStorage(node->indexingType())) { 3468 globalObject->havingABadTimeWatchpoint()->add(speculationWatchpoint()); 3469 3470 SpeculateStrictInt32Operand size(this, node->child1()); 3471 GPRTemporary result(this); 3472 GPRTemporary storage(this); 3473 GPRTemporary scratch(this); 3474 GPRTemporary scratch2(this); 3475 3476 GPRReg sizeGPR = size.gpr(); 3477 GPRReg resultGPR = result.gpr(); 3478 GPRReg storageGPR = storage.gpr(); 3479 GPRReg scratchGPR = scratch.gpr(); 3480 GPRReg scratch2GPR = scratch2.gpr(); 3481 3482 MacroAssembler::JumpList slowCases; 3483 slowCases.append(m_jit.branch32(MacroAssembler::AboveOrEqual, sizeGPR, TrustedImm32(MIN_SPARSE_ARRAY_INDEX))); 3484 3485 ASSERT((1 << 3) == sizeof(JSValue)); 3486 m_jit.move(sizeGPR, scratchGPR); 3487 m_jit.lshift32(TrustedImm32(3), scratchGPR); 3488 m_jit.add32(TrustedImm32(sizeof(IndexingHeader)), scratchGPR, resultGPR); 3489 slowCases.append( 3490 emitAllocateBasicStorage(resultGPR, storageGPR)); 3491 m_jit.subPtr(scratchGPR, storageGPR); 3492 Structure* structure = globalObject->arrayStructureForIndexingTypeDuringAllocation(node->indexingType()); 3493 emitAllocateJSObject<JSArray>(resultGPR, ImmPtr(structure), storageGPR, scratchGPR, scratch2GPR, slowCases); 3494 3495 m_jit.store32(sizeGPR, MacroAssembler::Address(storageGPR, Butterfly::offsetOfPublicLength())); 3496 m_jit.store32(sizeGPR, MacroAssembler::Address(storageGPR, Butterfly::offsetOfVectorLength())); 3497 3498 if (hasDouble(node->indexingType())) { 3499 m_jit.move(TrustedImm64(bitwise_cast<int64_t>(QNaN)), scratchGPR); 3500 m_jit.move(sizeGPR, scratch2GPR); 3501 MacroAssembler::Jump done = m_jit.branchTest32(MacroAssembler::Zero, scratch2GPR); 3502 MacroAssembler::Label loop = m_jit.label(); 3503 m_jit.sub32(TrustedImm32(1), scratch2GPR); 3504 m_jit.store64(scratchGPR, MacroAssembler::BaseIndex(storageGPR, scratch2GPR, MacroAssembler::TimesEight)); 3505 m_jit.branchTest32(MacroAssembler::NonZero, scratch2GPR).linkTo(loop, &m_jit); 3506 done.link(&m_jit); 3507 } 3508 3509 addSlowPathGenerator(adoptPtr( 3510 new CallArrayAllocatorWithVariableSizeSlowPathGenerator( 3511 slowCases, this, operationNewArrayWithSize, resultGPR, 3512 globalObject->arrayStructureForIndexingTypeDuringAllocation(node->indexingType()), 3513 globalObject->arrayStructureForIndexingTypeDuringAllocation(ArrayWithArrayStorage), 3514 sizeGPR))); 3515 3516 cellResult(resultGPR, node); 3517 break; 3518 } 3519 3520 SpeculateStrictInt32Operand size(this, node->child1()); 3521 GPRReg sizeGPR = size.gpr(); 3522 flushRegisters(); 3523 GPRResult result(this); 3524 GPRReg resultGPR = result.gpr(); 3525 GPRReg structureGPR = selectScratchGPR(sizeGPR); 3526 MacroAssembler::Jump bigLength = m_jit.branch32(MacroAssembler::AboveOrEqual, sizeGPR, TrustedImm32(MIN_SPARSE_ARRAY_INDEX)); 3527 m_jit.move(TrustedImmPtr(globalObject->arrayStructureForIndexingTypeDuringAllocation(node->indexingType())), structureGPR); 3528 MacroAssembler::Jump done = m_jit.jump(); 3529 bigLength.link(&m_jit); 3530 m_jit.move(TrustedImmPtr(globalObject->arrayStructureForIndexingTypeDuringAllocation(ArrayWithArrayStorage)), structureGPR); 3531 done.link(&m_jit); 3532 callOperation(operationNewArrayWithSize, resultGPR, structureGPR, sizeGPR); 3533 cellResult(resultGPR, node); 3534 break; 3535 } 3536 3537 case NewArrayBuffer: { 3538 JSGlobalObject* globalObject = m_jit.graph().globalObjectFor(node->codeOrigin); 3539 IndexingType indexingType = node->indexingType(); 3540 if (!globalObject->isHavingABadTime() && !hasArrayStorage(indexingType)) { 3541 globalObject->havingABadTimeWatchpoint()->add(speculationWatchpoint()); 3542 3543 unsigned numElements = node->numConstants(); 3544 3545 GPRTemporary result(this); 3546 GPRTemporary storage(this); 3547 3548 GPRReg resultGPR = result.gpr(); 3549 GPRReg storageGPR = storage.gpr(); 3550 3551 emitAllocateJSArray(resultGPR, globalObject->arrayStructureForIndexingTypeDuringAllocation(indexingType), storageGPR, numElements); 3552 3553 RELEASE_ASSERT(indexingType & IsArray); 3554 JSValue* data = m_jit.codeBlock()->constantBuffer(node->startConstant()); 3555 if (indexingType == ArrayWithDouble) { 3556 for (unsigned index = 0; index < node->numConstants(); ++index) { 3557 double value = data[index].asNumber(); 3558 m_jit.store64( 3559 Imm64(bitwise_cast<int64_t>(value)), 3560 MacroAssembler::Address(storageGPR, sizeof(double) * index)); 3561 } 3562 } else { 3563 for (unsigned index = 0; index < node->numConstants(); ++index) { 3564 m_jit.store64( 3565 Imm64(JSValue::encode(data[index])), 3566 MacroAssembler::Address(storageGPR, sizeof(JSValue) * index)); 3567 } 3568 } 3569 3570 cellResult(resultGPR, node); 3571 break; 3572 } 3573 3574 flushRegisters(); 3575 GPRResult result(this); 3576 3577 callOperation(operationNewArrayBuffer, result.gpr(), globalObject->arrayStructureForIndexingTypeDuringAllocation(node->indexingType()), node->startConstant(), node->numConstants()); 3578 3579 cellResult(result.gpr(), node); 3580 break; 3581 } 3582 3583 case NewRegexp: { 3584 flushRegisters(); 3585 GPRResult result(this); 3586 3587 callOperation(operationNewRegexp, result.gpr(), m_jit.codeBlock()->regexp(node->regexpIndex())); 3588 3589 cellResult(result.gpr(), node); 3590 break; 3591 } 3592 3593 case ConvertThis: { 3594 ASSERT(node->child1().useKind() == UntypedUse); 3595 JSValueOperand thisValue(this, node->child1()); 3596 GPRReg thisValueGPR = thisValue.gpr(); 3597 3598 flushRegisters(); 3599 3600 GPRResult result(this); 3601 callOperation(operationConvertThis, result.gpr(), thisValueGPR); 3602 3603 cellResult(result.gpr(), node); 3604 break; 3605 } 3606 3607 case CreateThis: { 3608 // Note that there is not so much profit to speculate here. The only things we 3609 // speculate on are (1) that it's a cell, since that eliminates cell checks 3610 // later if the proto is reused, and (2) if we have a FinalObject prediction 3611 // then we speculate because we want to get recompiled if it isn't (since 3612 // otherwise we'd start taking slow path a lot). 3613 3614 SpeculateCellOperand callee(this, node->child1()); 3615 GPRTemporary result(this); 3616 GPRTemporary allocator(this); 3617 GPRTemporary structure(this); 3618 GPRTemporary scratch(this); 3619 3620 GPRReg calleeGPR = callee.gpr(); 3621 GPRReg resultGPR = result.gpr(); 3622 GPRReg allocatorGPR = allocator.gpr(); 3623 GPRReg structureGPR = structure.gpr(); 3624 GPRReg scratchGPR = scratch.gpr(); 3625 3626 MacroAssembler::JumpList slowPath; 3627 3628 m_jit.loadPtr(JITCompiler::Address(calleeGPR, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfAllocator()), allocatorGPR); 3629 m_jit.loadPtr(JITCompiler::Address(calleeGPR, JSFunction::offsetOfAllocationProfile() + ObjectAllocationProfile::offsetOfStructure()), structureGPR); 3630 slowPath.append(m_jit.branchTestPtr(MacroAssembler::Zero, allocatorGPR)); 3631 emitAllocateJSObject(resultGPR, allocatorGPR, structureGPR, TrustedImmPtr(0), scratchGPR, slowPath); 3632 3633 addSlowPathGenerator(slowPathCall(slowPath, this, operationCreateThis, resultGPR, calleeGPR, node->inlineCapacity())); 3634 3635 cellResult(resultGPR, node); 3636 break; 3637 } 3638 3639 case AllocationProfileWatchpoint: { 3640 jsCast<JSFunction*>(node->function())->addAllocationProfileWatchpoint(speculationWatchpoint()); 3641 noResult(node); 3642 break; 3643 } 3644 3645 case NewObject: { 3646 GPRTemporary result(this); 3647 GPRTemporary allocator(this); 3648 GPRTemporary scratch(this); 3649 3650 GPRReg resultGPR = result.gpr(); 3651 GPRReg allocatorGPR = allocator.gpr(); 3652 GPRReg scratchGPR = scratch.gpr(); 3653 3654 MacroAssembler::JumpList slowPath; 3655 3656 Structure* structure = node->structure(); 3657 size_t allocationSize = JSObject::allocationSize(structure->inlineCapacity()); 3658 MarkedAllocator* allocatorPtr = &m_jit.vm()->heap.allocatorForObjectWithoutDestructor(allocationSize); 3659 3660 m_jit.move(TrustedImmPtr(allocatorPtr), allocatorGPR); 3661 emitAllocateJSObject(resultGPR, allocatorGPR, TrustedImmPtr(structure), TrustedImmPtr(0), scratchGPR, slowPath); 3662 3663 addSlowPathGenerator(slowPathCall(slowPath, this, operationNewObject, resultGPR, structure)); 3664 3665 cellResult(resultGPR, node); 3666 break; 3667 } 3668 3669 case GetCallee: { 3670 GPRTemporary result(this); 3671 m_jit.loadPtr(JITCompiler::addressFor(static_cast<VirtualRegister>(node->codeOrigin.stackOffset() + static_cast<int>(JSStack::Callee))), result.gpr()); 3672 cellResult(result.gpr(), node); 3673 break; 3674 } 3675 3676 case SetCallee: { 3677 SpeculateCellOperand callee(this, node->child1()); 3678 m_jit.storePtr(callee.gpr(), JITCompiler::addressFor(static_cast<VirtualRegister>(node->codeOrigin.stackOffset() + static_cast<int>(JSStack::Callee)))); 3679 noResult(node); 3680 break; 3681 } 3682 3683 case GetScope: { 3684 SpeculateCellOperand function(this, node->child1()); 3685 GPRTemporary result(this, function); 3686 m_jit.loadPtr(JITCompiler::Address(function.gpr(), JSFunction::offsetOfScopeChain()), result.gpr()); 3687 cellResult(result.gpr(), node); 3688 break; 3689 } 3690 3691 case GetMyScope: { 3692 GPRTemporary result(this); 3693 GPRReg resultGPR = result.gpr(); 3694 3695 m_jit.loadPtr(JITCompiler::addressFor(static_cast<VirtualRegister>(node->codeOrigin.stackOffset() + static_cast<int>(JSStack::ScopeChain))), resultGPR); 3696 cellResult(resultGPR, node); 3697 break; 3698 } 3699 3700 case SetMyScope: { 3701 SpeculateCellOperand callee(this, node->child1()); 3702 m_jit.storePtr(callee.gpr(), JITCompiler::addressFor(static_cast<VirtualRegister>(node->codeOrigin.stackOffset() + static_cast<int>(JSStack::ScopeChain)))); 3703 noResult(node); 3704 break; 3705 } 3706 3707 case SkipTopScope: { 3708 SpeculateCellOperand scope(this, node->child1()); 3709 GPRTemporary result(this, scope); 3710 GPRReg resultGPR = result.gpr(); 3711 m_jit.move(scope.gpr(), resultGPR); 3712 JITCompiler::Jump activationNotCreated = 3713 m_jit.branchTest64( 3714 JITCompiler::Zero, 3715 JITCompiler::addressFor( 3716 static_cast<VirtualRegister>(m_jit.codeBlock()->activationRegister()))); 3717 m_jit.loadPtr(JITCompiler::Address(resultGPR, JSScope::offsetOfNext()), resultGPR); 3718 activationNotCreated.link(&m_jit); 3719 cellResult(resultGPR, node); 3720 break; 3721 } 3722 3723 case SkipScope: { 3724 SpeculateCellOperand scope(this, node->child1()); 3725 GPRTemporary result(this, scope); 3726 m_jit.loadPtr(JITCompiler::Address(scope.gpr(), JSScope::offsetOfNext()), result.gpr()); 3727 cellResult(result.gpr(), node); 3728 break; 3729 } 3730 3731 case GetScopeRegisters: { 3732 SpeculateCellOperand scope(this, node->child1()); 3733 GPRTemporary result(this); 3734 GPRReg scopeGPR = scope.gpr(); 3735 GPRReg resultGPR = result.gpr(); 3736 3737 m_jit.loadPtr(JITCompiler::Address(scopeGPR, JSVariableObject::offsetOfRegisters()), resultGPR); 3738 storageResult(resultGPR, node); 3739 break; 3740 } 3741 case GetScopedVar: { 3742 StorageOperand registers(this, node->child1()); 3743 GPRTemporary result(this); 3744 GPRReg registersGPR = registers.gpr(); 3745 GPRReg resultGPR = result.gpr(); 3746 3747 m_jit.load64(JITCompiler::Address(registersGPR, node->varNumber() * sizeof(Register)), resultGPR); 3748 jsValueResult(resultGPR, node); 3749 break; 3750 } 3751 case PutScopedVar: { 3752 SpeculateCellOperand scope(this, node->child1()); 3753 StorageOperand registers(this, node->child2()); 3754 JSValueOperand value(this, node->child3()); 3755 GPRTemporary scratchRegister(this); 3756 3757 GPRReg scopeGPR = scope.gpr(); 3758 GPRReg registersGPR = registers.gpr(); 3759 GPRReg valueGPR = value.gpr(); 3760 GPRReg scratchGPR = scratchRegister.gpr(); 3761 3762 m_jit.store64(valueGPR, JITCompiler::Address(registersGPR, node->varNumber() * sizeof(Register))); 3763 writeBarrier(scopeGPR, valueGPR, node->child3(), WriteBarrierForVariableAccess, scratchGPR); 3764 noResult(node); 3765 break; 3766 } 3767 case GetById: { 3768 if (!node->prediction()) { 3769 terminateSpeculativeExecution(InadequateCoverage, JSValueRegs(), 0); 3770 break; 3771 } 3772 3773 switch (node->child1().useKind()) { 3774 case CellUse: { 3775 SpeculateCellOperand base(this, node->child1()); 3776 GPRTemporary result(this, base); 3777 3778 GPRReg baseGPR = base.gpr(); 3779 GPRReg resultGPR = result.gpr(); 3780 3781 base.use(); 3782 3783 cachedGetById(node->codeOrigin, baseGPR, resultGPR, node->identifierNumber()); 3784 3785 jsValueResult(resultGPR, node, UseChildrenCalledExplicitly); 3786 break; 3787 } 3788 3789 case UntypedUse: { 3790 JSValueOperand base(this, node->child1()); 3791 GPRTemporary result(this, base); 3792 3793 GPRReg baseGPR = base.gpr(); 3794 GPRReg resultGPR = result.gpr(); 3795 3796 base.use(); 3797 3798 JITCompiler::Jump notCell = m_jit.branchTest64(JITCompiler::NonZero, baseGPR, GPRInfo::tagMaskRegister); 3799 3800 cachedGetById(node->codeOrigin, baseGPR, resultGPR, node->identifierNumber(), notCell); 3801 3802 jsValueResult(resultGPR, node, UseChildrenCalledExplicitly); 3803 break; 3804 } 3805 3806 default: 3807 RELEASE_ASSERT_NOT_REACHED(); 3808 break; 3809 } 3810 break; 3811 } 3812 3813 case GetByIdFlush: { 3814 if (!node->prediction()) { 3815 terminateSpeculativeExecution(InadequateCoverage, JSValueRegs(), 0); 3816 break; 3817 } 3818 3819 switch (node->child1().useKind()) { 3820 case CellUse: { 3821 SpeculateCellOperand base(this, node->child1()); 3822 GPRReg baseGPR = base.gpr(); 3823 3824 GPRResult result(this); 3825 3826 GPRReg resultGPR = result.gpr(); 3827 3828 base.use(); 3829 3830 flushRegisters(); 3831 3832 cachedGetById(node->codeOrigin, baseGPR, resultGPR, node->identifierNumber(), JITCompiler::Jump(), DontSpill); 3833 3834 jsValueResult(resultGPR, node, UseChildrenCalledExplicitly); 3835 break; 3836 } 3837 3838 case UntypedUse: { 3839 JSValueOperand base(this, node->child1()); 3840 GPRReg baseGPR = base.gpr(); 3841 3842 GPRResult result(this); 3843 GPRReg resultGPR = result.gpr(); 3844 3845 base.use(); 3846 flushRegisters(); 3847 3848 JITCompiler::Jump notCell = m_jit.branchTest64(JITCompiler::NonZero, baseGPR, GPRInfo::tagMaskRegister); 3849 3850 cachedGetById(node->codeOrigin, baseGPR, resultGPR, node->identifierNumber(), notCell, DontSpill); 3851 3852 jsValueResult(resultGPR, node, UseChildrenCalledExplicitly); 3853 break; 3854 } 3855 3856 default: 3857 RELEASE_ASSERT_NOT_REACHED(); 3858 break; 3859 } 3860 break; 3861 } 3862 3863 case GetArrayLength: 3864 compileGetArrayLength(node); 3865 break; 3866 3867 case CheckFunction: { 3868 SpeculateCellOperand function(this, node->child1()); 3869 speculationCheck(BadFunction, JSValueSource::unboxedCell(function.gpr()), node->child1(), m_jit.branchWeakPtr(JITCompiler::NotEqual, function.gpr(), node->function())); 3870 noResult(node); 3871 break; 3872 } 3873 3874 case CheckExecutable: { 3875 SpeculateCellOperand function(this, node->child1()); 3876 speculationCheck(BadExecutable, JSValueSource::unboxedCell(function.gpr()), node->child1(), m_jit.branchWeakPtr(JITCompiler::NotEqual, JITCompiler::Address(function.gpr(), JSFunction::offsetOfExecutable()), node->executable())); 3877 noResult(node); 3878 break; 3879 } 3880 3881 case CheckStructure: 3882 case ForwardCheckStructure: { 3883 SpeculateCellOperand base(this, node->child1()); 3884 3885 ASSERT(node->structureSet().size()); 3886 3887 ExitKind exitKind; 3888 if (node->child1()->op() == WeakJSConstant) 3889 exitKind = BadWeakConstantCache; 3890 else 3891 exitKind = BadCache; 3892 3893 if (node->structureSet().size() == 1) { 3894 speculationCheck( 3895 exitKind, JSValueSource::unboxedCell(base.gpr()), 0, 3896 m_jit.branchWeakPtr( 3897 JITCompiler::NotEqual, 3898 JITCompiler::Address(base.gpr(), JSCell::structureOffset()), 3899 node->structureSet()[0])); 3900 } else { 3901 GPRTemporary structure(this); 3902 3903 m_jit.loadPtr(JITCompiler::Address(base.gpr(), JSCell::structureOffset()), structure.gpr()); 3904 3905 JITCompiler::JumpList done; 3906 3907 for (size_t i = 0; i < node->structureSet().size() - 1; ++i) 3908 done.append(m_jit.branchWeakPtr(JITCompiler::Equal, structure.gpr(), node->structureSet()[i])); 3909 3910 speculationCheck( 3911 exitKind, JSValueSource::unboxedCell(base.gpr()), 0, 3912 m_jit.branchWeakPtr( 3913 JITCompiler::NotEqual, structure.gpr(), node->structureSet().last())); 3914 3915 done.link(&m_jit); 3916 } 3917 3918 noResult(node); 3919 break; 3920 } 3921 3922 case StructureTransitionWatchpoint: 3923 case ForwardStructureTransitionWatchpoint: { 3924 // There is a fascinating question here of what to do about array profiling. 3925 // We *could* try to tell the OSR exit about where the base of the access is. 3926 // The DFG will have kept it alive, though it may not be in a register, and 3927 // we shouldn't really load it since that could be a waste. For now though, 3928 // we'll just rely on the fact that when a watchpoint fires then that's 3929 // quite a hint already. 3930 3931 m_jit.addWeakReference(node->structure()); 3932 node->structure()->addTransitionWatchpoint( 3933 speculationWatchpoint( 3934 node->child1()->op() == WeakJSConstant ? BadWeakConstantCache : BadCache)); 3935 3936#if !ASSERT_DISABLED 3937 SpeculateCellOperand op1(this, node->child1()); 3938 JITCompiler::Jump isOK = m_jit.branchPtr(JITCompiler::Equal, JITCompiler::Address(op1.gpr(), JSCell::structureOffset()), TrustedImmPtr(node->structure())); 3939 m_jit.breakpoint(); 3940 isOK.link(&m_jit); 3941#else 3942 speculateCell(node->child1()); 3943#endif 3944 3945 noResult(node); 3946 break; 3947 } 3948 3949 case PhantomPutStructure: { 3950 ASSERT(isKnownCell(node->child1().node())); 3951 3952 ASSERT(node->structureTransitionData().previousStructure->transitionWatchpointSetHasBeenInvalidated()); 3953 m_jit.addWeakReferenceTransition( 3954 node->codeOrigin.codeOriginOwner(), 3955 node->structureTransitionData().previousStructure, 3956 node->structureTransitionData().newStructure); 3957 noResult(node); 3958 break; 3959 } 3960 3961 case PutStructure: { 3962 ASSERT(node->structureTransitionData().previousStructure->transitionWatchpointSetHasBeenInvalidated()); 3963 3964 SpeculateCellOperand base(this, node->child1()); 3965 GPRReg baseGPR = base.gpr(); 3966 3967 m_jit.addWeakReferenceTransition( 3968 node->codeOrigin.codeOriginOwner(), 3969 node->structureTransitionData().previousStructure, 3970 node->structureTransitionData().newStructure); 3971 3972#if ENABLE(WRITE_BARRIER_PROFILING) 3973 // Must always emit this write barrier as the structure transition itself requires it 3974 writeBarrier(baseGPR, node->structureTransitionData().newStructure, WriteBarrierForGenericAccess); 3975#endif 3976 3977 m_jit.storePtr(MacroAssembler::TrustedImmPtr(node->structureTransitionData().newStructure), MacroAssembler::Address(baseGPR, JSCell::structureOffset())); 3978 3979 noResult(node); 3980 break; 3981 } 3982 3983 case AllocatePropertyStorage: 3984 compileAllocatePropertyStorage(node); 3985 break; 3986 3987 case ReallocatePropertyStorage: 3988 compileReallocatePropertyStorage(node); 3989 break; 3990 3991 case GetButterfly: { 3992 SpeculateCellOperand base(this, node->child1()); 3993 GPRTemporary result(this, base); 3994 3995 GPRReg baseGPR = base.gpr(); 3996 GPRReg resultGPR = result.gpr(); 3997 3998 m_jit.loadPtr(JITCompiler::Address(baseGPR, JSObject::butterflyOffset()), resultGPR); 3999 4000 storageResult(resultGPR, node); 4001 break; 4002 } 4003 4004 case GetIndexedPropertyStorage: { 4005 compileGetIndexedPropertyStorage(node); 4006 break; 4007 } 4008 4009 case GetByOffset: { 4010 StorageOperand storage(this, node->child1()); 4011 GPRTemporary result(this, storage); 4012 4013 GPRReg storageGPR = storage.gpr(); 4014 GPRReg resultGPR = result.gpr(); 4015 4016 StorageAccessData& storageAccessData = m_jit.graph().m_storageAccessData[node->storageAccessDataIndex()]; 4017 4018 m_jit.load64(JITCompiler::Address(storageGPR, storageAccessData.offset * sizeof(EncodedJSValue)), resultGPR); 4019 4020 jsValueResult(resultGPR, node); 4021 break; 4022 } 4023 4024 case PutByOffset: { 4025#if ENABLE(WRITE_BARRIER_PROFILING) 4026 SpeculateCellOperand base(this, node->child2()); 4027#endif 4028 StorageOperand storage(this, node->child1()); 4029 JSValueOperand value(this, node->child3()); 4030 4031 GPRReg storageGPR = storage.gpr(); 4032 GPRReg valueGPR = value.gpr(); 4033 4034#if ENABLE(WRITE_BARRIER_PROFILING) 4035 writeBarrier(base.gpr(), value.gpr(), node->child3(), WriteBarrierForPropertyAccess); 4036#endif 4037 4038 StorageAccessData& storageAccessData = m_jit.graph().m_storageAccessData[node->storageAccessDataIndex()]; 4039 4040 m_jit.store64(valueGPR, JITCompiler::Address(storageGPR, storageAccessData.offset * sizeof(EncodedJSValue))); 4041 4042 noResult(node); 4043 break; 4044 } 4045 4046 case PutById: { 4047 SpeculateCellOperand base(this, node->child1()); 4048 JSValueOperand value(this, node->child2()); 4049 GPRTemporary scratch(this); 4050 4051 GPRReg baseGPR = base.gpr(); 4052 GPRReg valueGPR = value.gpr(); 4053 GPRReg scratchGPR = scratch.gpr(); 4054 4055 base.use(); 4056 value.use(); 4057 4058 cachedPutById(node->codeOrigin, baseGPR, valueGPR, node->child2(), scratchGPR, node->identifierNumber(), NotDirect); 4059 4060 noResult(node, UseChildrenCalledExplicitly); 4061 break; 4062 } 4063 4064 case PutByIdDirect: { 4065 SpeculateCellOperand base(this, node->child1()); 4066 JSValueOperand value(this, node->child2()); 4067 GPRTemporary scratch(this); 4068 4069 GPRReg baseGPR = base.gpr(); 4070 GPRReg valueGPR = value.gpr(); 4071 GPRReg scratchGPR = scratch.gpr(); 4072 4073 base.use(); 4074 value.use(); 4075 4076 cachedPutById(node->codeOrigin, baseGPR, valueGPR, node->child2(), scratchGPR, node->identifierNumber(), Direct); 4077 4078 noResult(node, UseChildrenCalledExplicitly); 4079 break; 4080 } 4081 4082 case GetGlobalVar: { 4083 GPRTemporary result(this); 4084 4085 m_jit.load64(node->registerPointer(), result.gpr()); 4086 4087 jsValueResult(result.gpr(), node); 4088 break; 4089 } 4090 4091 case PutGlobalVar: { 4092 JSValueOperand value(this, node->child1()); 4093 4094 if (Heap::isWriteBarrierEnabled()) { 4095 GPRTemporary scratch(this); 4096 GPRReg scratchReg = scratch.gpr(); 4097 4098 writeBarrier(m_jit.globalObjectFor(node->codeOrigin), value.gpr(), node->child1(), WriteBarrierForVariableAccess, scratchReg); 4099 } 4100 4101 m_jit.store64(value.gpr(), node->registerPointer()); 4102 4103 noResult(node); 4104 break; 4105 } 4106 4107 case PutGlobalVarCheck: { 4108 JSValueOperand value(this, node->child1()); 4109 4110 WatchpointSet* watchpointSet = 4111 m_jit.globalObjectFor(node->codeOrigin)->symbolTable()->get( 4112 identifier(node->identifierNumberForCheck())->impl()).watchpointSet(); 4113 addSlowPathGenerator( 4114 slowPathCall( 4115 m_jit.branchTest8( 4116 JITCompiler::NonZero, 4117 JITCompiler::AbsoluteAddress(watchpointSet->addressOfIsWatched())), 4118 this, operationNotifyGlobalVarWrite, NoResult, watchpointSet)); 4119 4120 if (Heap::isWriteBarrierEnabled()) { 4121 GPRTemporary scratch(this); 4122 GPRReg scratchReg = scratch.gpr(); 4123 4124 writeBarrier(m_jit.globalObjectFor(node->codeOrigin), value.gpr(), node->child1(), WriteBarrierForVariableAccess, scratchReg); 4125 } 4126 4127 m_jit.store64(value.gpr(), node->registerPointer()); 4128 4129 noResult(node); 4130 break; 4131 } 4132 4133 case GlobalVarWatchpoint: { 4134 m_jit.globalObjectFor(node->codeOrigin)->symbolTable()->get( 4135 identifier(node->identifierNumberForCheck())->impl()).addWatchpoint( 4136 speculationWatchpoint()); 4137 4138#if DFG_ENABLE(JIT_ASSERT) 4139 GPRTemporary scratch(this); 4140 GPRReg scratchGPR = scratch.gpr(); 4141 m_jit.load64(node->registerPointer(), scratchGPR); 4142 JITCompiler::Jump ok = m_jit.branch64( 4143 JITCompiler::Equal, scratchGPR, 4144 TrustedImm64(JSValue::encode(node->registerPointer()->get()))); 4145 m_jit.breakpoint(); 4146 ok.link(&m_jit); 4147#endif 4148 4149 noResult(node); 4150 break; 4151 } 4152 4153 case CheckHasInstance: { 4154 SpeculateCellOperand base(this, node->child1()); 4155 GPRTemporary structure(this); 4156 4157 // Speculate that base 'ImplementsDefaultHasInstance'. 4158 m_jit.loadPtr(MacroAssembler::Address(base.gpr(), JSCell::structureOffset()), structure.gpr()); 4159 speculationCheck(Uncountable, JSValueRegs(), 0, m_jit.branchTest8(MacroAssembler::Zero, MacroAssembler::Address(structure.gpr(), Structure::typeInfoFlagsOffset()), MacroAssembler::TrustedImm32(ImplementsDefaultHasInstance))); 4160 4161 noResult(node); 4162 break; 4163 } 4164 4165 case InstanceOf: { 4166 compileInstanceOf(node); 4167 break; 4168 } 4169 4170 case IsUndefined: { 4171 JSValueOperand value(this, node->child1()); 4172 GPRTemporary result(this); 4173 GPRTemporary localGlobalObject(this); 4174 GPRTemporary remoteGlobalObject(this); 4175 4176 JITCompiler::Jump isCell = m_jit.branchTest64(JITCompiler::Zero, value.gpr(), GPRInfo::tagMaskRegister); 4177 4178 m_jit.compare64(JITCompiler::Equal, value.gpr(), TrustedImm32(ValueUndefined), result.gpr()); 4179 JITCompiler::Jump done = m_jit.jump(); 4180 4181 isCell.link(&m_jit); 4182 JITCompiler::Jump notMasqueradesAsUndefined; 4183 if (m_jit.graph().globalObjectFor(node->codeOrigin)->masqueradesAsUndefinedWatchpoint()->isStillValid()) { 4184 m_jit.graph().globalObjectFor(node->codeOrigin)->masqueradesAsUndefinedWatchpoint()->add(speculationWatchpoint()); 4185 m_jit.move(TrustedImm32(0), result.gpr()); 4186 notMasqueradesAsUndefined = m_jit.jump(); 4187 } else { 4188 m_jit.loadPtr(JITCompiler::Address(value.gpr(), JSCell::structureOffset()), result.gpr()); 4189 JITCompiler::Jump isMasqueradesAsUndefined = m_jit.branchTest8(JITCompiler::NonZero, JITCompiler::Address(result.gpr(), Structure::typeInfoFlagsOffset()), TrustedImm32(MasqueradesAsUndefined)); 4190 m_jit.move(TrustedImm32(0), result.gpr()); 4191 notMasqueradesAsUndefined = m_jit.jump(); 4192 4193 isMasqueradesAsUndefined.link(&m_jit); 4194 GPRReg localGlobalObjectGPR = localGlobalObject.gpr(); 4195 GPRReg remoteGlobalObjectGPR = remoteGlobalObject.gpr(); 4196 m_jit.move(TrustedImmPtr(m_jit.globalObjectFor(node->codeOrigin)), localGlobalObjectGPR); 4197 m_jit.loadPtr(JITCompiler::Address(result.gpr(), Structure::globalObjectOffset()), remoteGlobalObjectGPR); 4198 m_jit.comparePtr(JITCompiler::Equal, localGlobalObjectGPR, remoteGlobalObjectGPR, result.gpr()); 4199 } 4200 4201 notMasqueradesAsUndefined.link(&m_jit); 4202 done.link(&m_jit); 4203 m_jit.or32(TrustedImm32(ValueFalse), result.gpr()); 4204 jsValueResult(result.gpr(), node, DataFormatJSBoolean); 4205 break; 4206 } 4207 4208 case IsBoolean: { 4209 JSValueOperand value(this, node->child1()); 4210 GPRTemporary result(this, value); 4211 4212 m_jit.move(value.gpr(), result.gpr()); 4213 m_jit.xor64(JITCompiler::TrustedImm32(ValueFalse), result.gpr()); 4214 m_jit.test64(JITCompiler::Zero, result.gpr(), JITCompiler::TrustedImm32(static_cast<int32_t>(~1)), result.gpr()); 4215 m_jit.or32(TrustedImm32(ValueFalse), result.gpr()); 4216 jsValueResult(result.gpr(), node, DataFormatJSBoolean); 4217 break; 4218 } 4219 4220 case IsNumber: { 4221 JSValueOperand value(this, node->child1()); 4222 GPRTemporary result(this, value); 4223 4224 m_jit.test64(JITCompiler::NonZero, value.gpr(), GPRInfo::tagTypeNumberRegister, result.gpr()); 4225 m_jit.or32(TrustedImm32(ValueFalse), result.gpr()); 4226 jsValueResult(result.gpr(), node, DataFormatJSBoolean); 4227 break; 4228 } 4229 4230 case IsString: { 4231 JSValueOperand value(this, node->child1()); 4232 GPRTemporary result(this, value); 4233 4234 JITCompiler::Jump isNotCell = m_jit.branchTest64(JITCompiler::NonZero, value.gpr(), GPRInfo::tagMaskRegister); 4235 4236 m_jit.loadPtr(JITCompiler::Address(value.gpr(), JSCell::structureOffset()), result.gpr()); 4237 m_jit.compare8(JITCompiler::Equal, JITCompiler::Address(result.gpr(), Structure::typeInfoTypeOffset()), TrustedImm32(StringType), result.gpr()); 4238 m_jit.or32(TrustedImm32(ValueFalse), result.gpr()); 4239 JITCompiler::Jump done = m_jit.jump(); 4240 4241 isNotCell.link(&m_jit); 4242 m_jit.move(TrustedImm32(ValueFalse), result.gpr()); 4243 4244 done.link(&m_jit); 4245 jsValueResult(result.gpr(), node, DataFormatJSBoolean); 4246 break; 4247 } 4248 4249 case IsObject: { 4250 JSValueOperand value(this, node->child1()); 4251 GPRReg valueGPR = value.gpr(); 4252 GPRResult result(this); 4253 GPRReg resultGPR = result.gpr(); 4254 flushRegisters(); 4255 callOperation(operationIsObject, resultGPR, valueGPR); 4256 m_jit.or32(TrustedImm32(ValueFalse), resultGPR); 4257 jsValueResult(result.gpr(), node, DataFormatJSBoolean); 4258 break; 4259 } 4260 4261 case IsFunction: { 4262 JSValueOperand value(this, node->child1()); 4263 GPRReg valueGPR = value.gpr(); 4264 GPRResult result(this); 4265 GPRReg resultGPR = result.gpr(); 4266 flushRegisters(); 4267 callOperation(operationIsFunction, resultGPR, valueGPR); 4268 m_jit.or32(TrustedImm32(ValueFalse), resultGPR); 4269 jsValueResult(result.gpr(), node, DataFormatJSBoolean); 4270 break; 4271 } 4272 4273 case TypeOf: { 4274 JSValueOperand value(this, node->child1(), ManualOperandSpeculation); 4275 GPRReg valueGPR = value.gpr(); 4276 GPRTemporary temp(this); 4277 GPRReg tempGPR = temp.gpr(); 4278 GPRResult result(this); 4279 GPRReg resultGPR = result.gpr(); 4280 JITCompiler::JumpList doneJumps; 4281 4282 flushRegisters(); 4283 4284 ASSERT(node->child1().useKind() == UntypedUse || node->child1().useKind() == CellUse || node->child1().useKind() == StringUse); 4285 4286 JITCompiler::Jump isNotCell = m_jit.branchTest64(JITCompiler::NonZero, valueGPR, GPRInfo::tagMaskRegister); 4287 if (node->child1().useKind() != UntypedUse) 4288 DFG_TYPE_CHECK(JSValueSource(valueGPR), node->child1(), SpecCell, isNotCell); 4289 4290 if (!node->child1()->shouldSpeculateObject() || node->child1().useKind() == StringUse) { 4291 m_jit.loadPtr(JITCompiler::Address(valueGPR, JSCell::structureOffset()), tempGPR); 4292 JITCompiler::Jump notString = m_jit.branch8(JITCompiler::NotEqual, JITCompiler::Address(tempGPR, Structure::typeInfoTypeOffset()), TrustedImm32(StringType)); 4293 if (node->child1().useKind() == StringUse) 4294 DFG_TYPE_CHECK(JSValueSource(valueGPR), node->child1(), SpecString, notString); 4295 m_jit.move(TrustedImmPtr(m_jit.vm()->smallStrings.stringString()), resultGPR); 4296 doneJumps.append(m_jit.jump()); 4297 if (node->child1().useKind() != StringUse) { 4298 notString.link(&m_jit); 4299 callOperation(operationTypeOf, resultGPR, valueGPR); 4300 doneJumps.append(m_jit.jump()); 4301 } 4302 } else { 4303 callOperation(operationTypeOf, resultGPR, valueGPR); 4304 doneJumps.append(m_jit.jump()); 4305 } 4306 4307 if (node->child1().useKind() == UntypedUse) { 4308 isNotCell.link(&m_jit); 4309 JITCompiler::Jump notNumber = m_jit.branchTest64(JITCompiler::Zero, valueGPR, GPRInfo::tagTypeNumberRegister); 4310 m_jit.move(TrustedImmPtr(m_jit.vm()->smallStrings.numberString()), resultGPR); 4311 doneJumps.append(m_jit.jump()); 4312 notNumber.link(&m_jit); 4313 4314 JITCompiler::Jump notUndefined = m_jit.branch64(JITCompiler::NotEqual, valueGPR, JITCompiler::TrustedImm64(ValueUndefined)); 4315 m_jit.move(TrustedImmPtr(m_jit.vm()->smallStrings.undefinedString()), resultGPR); 4316 doneJumps.append(m_jit.jump()); 4317 notUndefined.link(&m_jit); 4318 4319 JITCompiler::Jump notNull = m_jit.branch64(JITCompiler::NotEqual, valueGPR, JITCompiler::TrustedImm64(ValueNull)); 4320 m_jit.move(TrustedImmPtr(m_jit.vm()->smallStrings.objectString()), resultGPR); 4321 doneJumps.append(m_jit.jump()); 4322 notNull.link(&m_jit); 4323 4324 // Only boolean left 4325 m_jit.move(TrustedImmPtr(m_jit.vm()->smallStrings.booleanString()), resultGPR); 4326 } 4327 doneJumps.link(&m_jit); 4328 cellResult(resultGPR, node); 4329 break; 4330 } 4331 4332 case Flush: 4333 case Phi: 4334 break; 4335 4336 case Breakpoint: 4337#if ENABLE(DEBUG_WITH_BREAKPOINT) 4338 m_jit.breakpoint(); 4339#else 4340 RELEASE_ASSERT_NOT_REACHED(); 4341#endif 4342 break; 4343 4344 case Call: 4345 case Construct: 4346 emitCall(node); 4347 break; 4348 4349 case Resolve: { 4350 flushRegisters(); 4351 GPRResult result(this); 4352 ResolveOperationData& data = m_jit.graph().m_resolveOperationsData[node->resolveOperationsDataIndex()]; 4353 callOperation(operationResolve, result.gpr(), identifier(data.identifierNumber), data.resolveOperations); 4354 jsValueResult(result.gpr(), node); 4355 break; 4356 } 4357 4358 case ResolveBase: { 4359 flushRegisters(); 4360 GPRResult result(this); 4361 ResolveOperationData& data = m_jit.graph().m_resolveOperationsData[node->resolveOperationsDataIndex()]; 4362 callOperation(operationResolveBase, result.gpr(), identifier(data.identifierNumber), data.resolveOperations, data.putToBaseOperation); 4363 jsValueResult(result.gpr(), node); 4364 break; 4365 } 4366 4367 case ResolveBaseStrictPut: { 4368 flushRegisters(); 4369 GPRResult result(this); 4370 ResolveOperationData& data = m_jit.graph().m_resolveOperationsData[node->resolveOperationsDataIndex()]; 4371 callOperation(operationResolveBaseStrictPut, result.gpr(), identifier(data.identifierNumber), data.resolveOperations, data.putToBaseOperation); 4372 jsValueResult(result.gpr(), node); 4373 break; 4374 } 4375 4376 case ResolveGlobal: { 4377 GPRTemporary globalObject(this); 4378 GPRTemporary resolveInfo(this); 4379 GPRTemporary result(this); 4380 4381 GPRReg globalObjectGPR = globalObject.gpr(); 4382 GPRReg resolveInfoGPR = resolveInfo.gpr(); 4383 GPRReg resultGPR = result.gpr(); 4384 4385 ResolveGlobalData& data = m_jit.graph().m_resolveGlobalData[node->resolveGlobalDataIndex()]; 4386 ResolveOperation* resolveOperationAddress = &(data.resolveOperations->data()[data.resolvePropertyIndex]); 4387 4388 // Check Structure of global object 4389 m_jit.move(JITCompiler::TrustedImmPtr(m_jit.globalObjectFor(node->codeOrigin)), globalObjectGPR); 4390 m_jit.move(JITCompiler::TrustedImmPtr(resolveOperationAddress), resolveInfoGPR); 4391 m_jit.loadPtr(JITCompiler::Address(resolveInfoGPR, OBJECT_OFFSETOF(ResolveOperation, m_structure)), resultGPR); 4392 JITCompiler::Jump structuresDontMatch = m_jit.branchPtr(JITCompiler::NotEqual, resultGPR, JITCompiler::Address(globalObjectGPR, JSCell::structureOffset())); 4393 4394 // Fast case 4395 m_jit.load32(JITCompiler::Address(resolveInfoGPR, OBJECT_OFFSETOF(ResolveOperation, m_offset)), resolveInfoGPR); 4396#if DFG_ENABLE(JIT_ASSERT) 4397 JITCompiler::Jump isOutOfLine = m_jit.branch32(JITCompiler::GreaterThanOrEqual, resolveInfoGPR, TrustedImm32(firstOutOfLineOffset)); 4398 m_jit.breakpoint(); 4399 isOutOfLine.link(&m_jit); 4400#endif 4401 m_jit.neg32(resolveInfoGPR); 4402 m_jit.signExtend32ToPtr(resolveInfoGPR, resolveInfoGPR); 4403 m_jit.loadPtr(JITCompiler::Address(globalObjectGPR, JSObject::butterflyOffset()), resultGPR); 4404 m_jit.load64(JITCompiler::BaseIndex(resultGPR, resolveInfoGPR, JITCompiler::TimesEight, (firstOutOfLineOffset - 2) * static_cast<ptrdiff_t>(sizeof(JSValue))), resultGPR); 4405 4406 addSlowPathGenerator( 4407 slowPathCall( 4408 structuresDontMatch, this, operationResolveGlobal, 4409 resultGPR, resolveInfoGPR, globalObjectGPR, 4410 &m_jit.codeBlock()->identifier(data.identifierNumber))); 4411 4412 jsValueResult(resultGPR, node); 4413 break; 4414 } 4415 4416 case CreateActivation: { 4417 RELEASE_ASSERT(!node->codeOrigin.inlineCallFrame); 4418 4419 JSValueOperand value(this, node->child1()); 4420 GPRTemporary result(this, value); 4421 4422 GPRReg valueGPR = value.gpr(); 4423 GPRReg resultGPR = result.gpr(); 4424 4425 m_jit.move(valueGPR, resultGPR); 4426 4427 JITCompiler::Jump notCreated = m_jit.branchTest64(JITCompiler::Zero, resultGPR); 4428 4429 addSlowPathGenerator( 4430 slowPathCall(notCreated, this, operationCreateActivation, resultGPR)); 4431 4432 cellResult(resultGPR, node); 4433 break; 4434 } 4435 4436 case CreateArguments: { 4437 JSValueOperand value(this, node->child1()); 4438 GPRTemporary result(this, value); 4439 4440 GPRReg valueGPR = value.gpr(); 4441 GPRReg resultGPR = result.gpr(); 4442 4443 m_jit.move(valueGPR, resultGPR); 4444 4445 JITCompiler::Jump notCreated = m_jit.branchTest64(JITCompiler::Zero, resultGPR); 4446 4447 if (node->codeOrigin.inlineCallFrame) { 4448 addSlowPathGenerator( 4449 slowPathCall( 4450 notCreated, this, operationCreateInlinedArguments, resultGPR, 4451 node->codeOrigin.inlineCallFrame)); 4452 } else { 4453 addSlowPathGenerator( 4454 slowPathCall(notCreated, this, operationCreateArguments, resultGPR)); 4455 } 4456 4457 cellResult(resultGPR, node); 4458 break; 4459 } 4460 4461 case TearOffActivation: { 4462 RELEASE_ASSERT(!node->codeOrigin.inlineCallFrame); 4463 4464 JSValueOperand activationValue(this, node->child1()); 4465 GPRTemporary scratch(this); 4466 GPRReg activationValueGPR = activationValue.gpr(); 4467 GPRReg scratchGPR = scratch.gpr(); 4468 4469 JITCompiler::Jump notCreated = m_jit.branchTest64(JITCompiler::Zero, activationValueGPR); 4470 4471 SharedSymbolTable* symbolTable = m_jit.symbolTableFor(node->codeOrigin); 4472 int registersOffset = JSActivation::registersOffset(symbolTable); 4473 4474 int captureEnd = symbolTable->captureEnd(); 4475 for (int i = symbolTable->captureStart(); i < captureEnd; ++i) { 4476 m_jit.load64( 4477 JITCompiler::Address( 4478 GPRInfo::callFrameRegister, i * sizeof(Register)), scratchGPR); 4479 m_jit.store64( 4480 scratchGPR, JITCompiler::Address( 4481 activationValueGPR, registersOffset + i * sizeof(Register))); 4482 } 4483 m_jit.addPtr(TrustedImm32(registersOffset), activationValueGPR, scratchGPR); 4484 m_jit.storePtr(scratchGPR, JITCompiler::Address(activationValueGPR, JSActivation::offsetOfRegisters())); 4485 4486 notCreated.link(&m_jit); 4487 noResult(node); 4488 break; 4489 } 4490 4491 case TearOffArguments: { 4492 JSValueOperand unmodifiedArgumentsValue(this, node->child1()); 4493 JSValueOperand activationValue(this, node->child2()); 4494 GPRReg unmodifiedArgumentsValueGPR = unmodifiedArgumentsValue.gpr(); 4495 GPRReg activationValueGPR = activationValue.gpr(); 4496 4497 JITCompiler::Jump created = m_jit.branchTest64(JITCompiler::NonZero, unmodifiedArgumentsValueGPR); 4498 4499 if (node->codeOrigin.inlineCallFrame) { 4500 addSlowPathGenerator( 4501 slowPathCall( 4502 created, this, operationTearOffInlinedArguments, NoResult, 4503 unmodifiedArgumentsValueGPR, activationValueGPR, node->codeOrigin.inlineCallFrame)); 4504 } else { 4505 addSlowPathGenerator( 4506 slowPathCall( 4507 created, this, operationTearOffArguments, NoResult, unmodifiedArgumentsValueGPR, activationValueGPR)); 4508 } 4509 4510 noResult(node); 4511 break; 4512 } 4513 4514 case GetMyArgumentsLength: { 4515 GPRTemporary result(this); 4516 GPRReg resultGPR = result.gpr(); 4517 4518 if (!isEmptySpeculation( 4519 m_state.variables().operand( 4520 m_jit.graph().argumentsRegisterFor(node->codeOrigin)).m_type)) { 4521 speculationCheck( 4522 ArgumentsEscaped, JSValueRegs(), 0, 4523 m_jit.branchTest64( 4524 JITCompiler::NonZero, 4525 JITCompiler::addressFor( 4526 m_jit.argumentsRegisterFor(node->codeOrigin)))); 4527 } 4528 4529 RELEASE_ASSERT(!node->codeOrigin.inlineCallFrame); 4530 m_jit.load32(JITCompiler::payloadFor(JSStack::ArgumentCount), resultGPR); 4531 m_jit.sub32(TrustedImm32(1), resultGPR); 4532 integerResult(resultGPR, node); 4533 break; 4534 } 4535 4536 case GetMyArgumentsLengthSafe: { 4537 GPRTemporary result(this); 4538 GPRReg resultGPR = result.gpr(); 4539 4540 JITCompiler::Jump created = m_jit.branchTest64( 4541 JITCompiler::NonZero, 4542 JITCompiler::addressFor( 4543 m_jit.argumentsRegisterFor(node->codeOrigin))); 4544 4545 if (node->codeOrigin.inlineCallFrame) { 4546 m_jit.move( 4547 Imm64(JSValue::encode(jsNumber(node->codeOrigin.inlineCallFrame->arguments.size() - 1))), 4548 resultGPR); 4549 } else { 4550 m_jit.load32(JITCompiler::payloadFor(JSStack::ArgumentCount), resultGPR); 4551 m_jit.sub32(TrustedImm32(1), resultGPR); 4552 m_jit.or64(GPRInfo::tagTypeNumberRegister, resultGPR); 4553 } 4554 4555 // FIXME: the slow path generator should perform a forward speculation that the 4556 // result is an integer. For now we postpone the speculation by having this return 4557 // a JSValue. 4558 4559 addSlowPathGenerator( 4560 slowPathCall( 4561 created, this, operationGetArgumentsLength, resultGPR, 4562 m_jit.argumentsRegisterFor(node->codeOrigin))); 4563 4564 jsValueResult(resultGPR, node); 4565 break; 4566 } 4567 4568 case GetMyArgumentByVal: { 4569 SpeculateStrictInt32Operand index(this, node->child1()); 4570 GPRTemporary result(this); 4571 GPRReg indexGPR = index.gpr(); 4572 GPRReg resultGPR = result.gpr(); 4573 4574 if (!isEmptySpeculation( 4575 m_state.variables().operand( 4576 m_jit.graph().argumentsRegisterFor(node->codeOrigin)).m_type)) { 4577 speculationCheck( 4578 ArgumentsEscaped, JSValueRegs(), 0, 4579 m_jit.branchTest64( 4580 JITCompiler::NonZero, 4581 JITCompiler::addressFor( 4582 m_jit.argumentsRegisterFor(node->codeOrigin)))); 4583 } 4584 4585 m_jit.add32(TrustedImm32(1), indexGPR, resultGPR); 4586 if (node->codeOrigin.inlineCallFrame) { 4587 speculationCheck( 4588 Uncountable, JSValueRegs(), 0, 4589 m_jit.branch32( 4590 JITCompiler::AboveOrEqual, 4591 resultGPR, 4592 Imm32(node->codeOrigin.inlineCallFrame->arguments.size()))); 4593 } else { 4594 speculationCheck( 4595 Uncountable, JSValueRegs(), 0, 4596 m_jit.branch32( 4597 JITCompiler::AboveOrEqual, 4598 resultGPR, 4599 JITCompiler::payloadFor(JSStack::ArgumentCount))); 4600 } 4601 4602 JITCompiler::JumpList slowArgument; 4603 JITCompiler::JumpList slowArgumentOutOfBounds; 4604 if (const SlowArgument* slowArguments = m_jit.symbolTableFor(node->codeOrigin)->slowArguments()) { 4605 slowArgumentOutOfBounds.append( 4606 m_jit.branch32( 4607 JITCompiler::AboveOrEqual, indexGPR, 4608 Imm32(m_jit.symbolTableFor(node->codeOrigin)->parameterCount()))); 4609 4610 COMPILE_ASSERT(sizeof(SlowArgument) == 8, SlowArgument_size_is_eight_bytes); 4611 m_jit.move(ImmPtr(slowArguments), resultGPR); 4612 m_jit.load32( 4613 JITCompiler::BaseIndex( 4614 resultGPR, indexGPR, JITCompiler::TimesEight, 4615 OBJECT_OFFSETOF(SlowArgument, index)), 4616 resultGPR); 4617 m_jit.signExtend32ToPtr(resultGPR, resultGPR); 4618 m_jit.load64( 4619 JITCompiler::BaseIndex( 4620 GPRInfo::callFrameRegister, resultGPR, JITCompiler::TimesEight, m_jit.offsetOfLocals(node->codeOrigin)), 4621 resultGPR); 4622 slowArgument.append(m_jit.jump()); 4623 } 4624 slowArgumentOutOfBounds.link(&m_jit); 4625 4626 m_jit.neg32(resultGPR); 4627 m_jit.signExtend32ToPtr(resultGPR, resultGPR); 4628 4629 m_jit.load64( 4630 JITCompiler::BaseIndex( 4631 GPRInfo::callFrameRegister, resultGPR, JITCompiler::TimesEight, m_jit.offsetOfArgumentsIncludingThis(node->codeOrigin)), 4632 resultGPR); 4633 4634 slowArgument.link(&m_jit); 4635 jsValueResult(resultGPR, node); 4636 break; 4637 } 4638 4639 case GetMyArgumentByValSafe: { 4640 SpeculateStrictInt32Operand index(this, node->child1()); 4641 GPRTemporary result(this); 4642 GPRReg indexGPR = index.gpr(); 4643 GPRReg resultGPR = result.gpr(); 4644 4645 JITCompiler::JumpList slowPath; 4646 slowPath.append( 4647 m_jit.branchTest64( 4648 JITCompiler::NonZero, 4649 JITCompiler::addressFor( 4650 m_jit.argumentsRegisterFor(node->codeOrigin)))); 4651 4652 m_jit.add32(TrustedImm32(1), indexGPR, resultGPR); 4653 if (node->codeOrigin.inlineCallFrame) { 4654 slowPath.append( 4655 m_jit.branch32( 4656 JITCompiler::AboveOrEqual, 4657 resultGPR, 4658 Imm32(node->codeOrigin.inlineCallFrame->arguments.size()))); 4659 } else { 4660 slowPath.append( 4661 m_jit.branch32( 4662 JITCompiler::AboveOrEqual, 4663 resultGPR, 4664 JITCompiler::payloadFor(JSStack::ArgumentCount))); 4665 } 4666 4667 JITCompiler::JumpList slowArgument; 4668 JITCompiler::JumpList slowArgumentOutOfBounds; 4669 if (const SlowArgument* slowArguments = m_jit.symbolTableFor(node->codeOrigin)->slowArguments()) { 4670 slowArgumentOutOfBounds.append( 4671 m_jit.branch32( 4672 JITCompiler::AboveOrEqual, indexGPR, 4673 Imm32(m_jit.symbolTableFor(node->codeOrigin)->parameterCount()))); 4674 4675 COMPILE_ASSERT(sizeof(SlowArgument) == 8, SlowArgument_size_is_eight_bytes); 4676 m_jit.move(ImmPtr(slowArguments), resultGPR); 4677 m_jit.load32( 4678 JITCompiler::BaseIndex( 4679 resultGPR, indexGPR, JITCompiler::TimesEight, 4680 OBJECT_OFFSETOF(SlowArgument, index)), 4681 resultGPR); 4682 m_jit.signExtend32ToPtr(resultGPR, resultGPR); 4683 m_jit.load64( 4684 JITCompiler::BaseIndex( 4685 GPRInfo::callFrameRegister, resultGPR, JITCompiler::TimesEight, m_jit.offsetOfLocals(node->codeOrigin)), 4686 resultGPR); 4687 slowArgument.append(m_jit.jump()); 4688 } 4689 slowArgumentOutOfBounds.link(&m_jit); 4690 4691 m_jit.neg32(resultGPR); 4692 m_jit.signExtend32ToPtr(resultGPR, resultGPR); 4693 4694 m_jit.load64( 4695 JITCompiler::BaseIndex( 4696 GPRInfo::callFrameRegister, resultGPR, JITCompiler::TimesEight, m_jit.offsetOfArgumentsIncludingThis(node->codeOrigin)), 4697 resultGPR); 4698 4699 if (node->codeOrigin.inlineCallFrame) { 4700 addSlowPathGenerator( 4701 slowPathCall( 4702 slowPath, this, operationGetInlinedArgumentByVal, resultGPR, 4703 m_jit.argumentsRegisterFor(node->codeOrigin), 4704 node->codeOrigin.inlineCallFrame, 4705 indexGPR)); 4706 } else { 4707 addSlowPathGenerator( 4708 slowPathCall( 4709 slowPath, this, operationGetArgumentByVal, resultGPR, 4710 m_jit.argumentsRegisterFor(node->codeOrigin), 4711 indexGPR)); 4712 } 4713 4714 slowArgument.link(&m_jit); 4715 jsValueResult(resultGPR, node); 4716 break; 4717 } 4718 4719 case CheckArgumentsNotCreated: { 4720 ASSERT(!isEmptySpeculation( 4721 m_state.variables().operand( 4722 m_jit.graph().argumentsRegisterFor(node->codeOrigin)).m_type)); 4723 speculationCheck( 4724 ArgumentsEscaped, JSValueRegs(), 0, 4725 m_jit.branchTest64( 4726 JITCompiler::NonZero, 4727 JITCompiler::addressFor( 4728 m_jit.argumentsRegisterFor(node->codeOrigin)))); 4729 noResult(node); 4730 break; 4731 } 4732 4733 case NewFunctionNoCheck: 4734 compileNewFunctionNoCheck(node); 4735 break; 4736 4737 case NewFunction: { 4738 JSValueOperand value(this, node->child1()); 4739 GPRTemporary result(this, value); 4740 4741 GPRReg valueGPR = value.gpr(); 4742 GPRReg resultGPR = result.gpr(); 4743 4744 m_jit.move(valueGPR, resultGPR); 4745 4746 JITCompiler::Jump notCreated = m_jit.branchTest64(JITCompiler::Zero, resultGPR); 4747 4748 addSlowPathGenerator( 4749 slowPathCall( 4750 notCreated, this, operationNewFunction, 4751 resultGPR, m_jit.codeBlock()->functionDecl(node->functionDeclIndex()))); 4752 4753 jsValueResult(resultGPR, node); 4754 break; 4755 } 4756 4757 case NewFunctionExpression: 4758 compileNewFunctionExpression(node); 4759 break; 4760 4761 case CountExecution: 4762 m_jit.add64(TrustedImm32(1), MacroAssembler::AbsoluteAddress(node->executionCounter()->address())); 4763 break; 4764 4765 case GarbageValue: 4766 // We should never get to the point of code emission for a GarbageValue 4767 CRASH(); 4768 break; 4769 4770 case ForceOSRExit: { 4771 terminateSpeculativeExecution(InadequateCoverage, JSValueRegs(), 0); 4772 break; 4773 } 4774 4775 case CheckWatchdogTimer: 4776 speculationCheck( 4777 WatchdogTimerFired, JSValueRegs(), 0, 4778 m_jit.branchTest8( 4779 JITCompiler::NonZero, 4780 JITCompiler::AbsoluteAddress(m_jit.vm()->watchdog.timerDidFireAddress()))); 4781 break; 4782 4783 case Phantom: 4784 DFG_NODE_DO_TO_CHILDREN(m_jit.graph(), node, speculate); 4785 noResult(node); 4786 break; 4787 4788 case PhantomLocal: 4789 // This is a no-op. 4790 noResult(node); 4791 break; 4792 4793 case Unreachable: 4794 RELEASE_ASSERT_NOT_REACHED(); 4795 break; 4796 4797 case Nop: 4798 RELEASE_ASSERT_NOT_REACHED(); 4799 break; 4800 4801 case LastNodeType: 4802 RELEASE_ASSERT_NOT_REACHED(); 4803 break; 4804 } 4805 4806#if ENABLE(DFG_REGISTER_ALLOCATION_VALIDATION) 4807 m_jit.clearRegisterAllocationOffsets(); 4808#endif 4809 4810 if (!m_compileOkay) 4811 return; 4812 4813 if (node->hasResult() && node->mustGenerate()) 4814 use(node); 4815} 4816 4817#endif 4818 4819} } // namespace JSC::DFG 4820 4821#endif 4822