1/*
2 * Copyright (C) 2011, 2012, 2013 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 *    notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 *    notice, this list of conditions and the following disclaimer in the
11 *    documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "DFGByteCodeParser.h"
28
29#if ENABLE(DFG_JIT)
30
31#include "ArrayConstructor.h"
32#include "CallLinkStatus.h"
33#include "CodeBlock.h"
34#include "CodeBlockWithJITType.h"
35#include "DFGArrayMode.h"
36#include "DFGCapabilities.h"
37#include "GetByIdStatus.h"
38#include "Operations.h"
39#include "PreciseJumpTargets.h"
40#include "PutByIdStatus.h"
41#include "ResolveGlobalStatus.h"
42#include "StringConstructor.h"
43#include <wtf/CommaPrinter.h>
44#include <wtf/HashMap.h>
45#include <wtf/MathExtras.h>
46
47namespace JSC { namespace DFG {
48
49class ConstantBufferKey {
50public:
51    ConstantBufferKey()
52        : m_codeBlock(0)
53        , m_index(0)
54    {
55    }
56
57    ConstantBufferKey(WTF::HashTableDeletedValueType)
58        : m_codeBlock(0)
59        , m_index(1)
60    {
61    }
62
63    ConstantBufferKey(CodeBlock* codeBlock, unsigned index)
64        : m_codeBlock(codeBlock)
65        , m_index(index)
66    {
67    }
68
69    bool operator==(const ConstantBufferKey& other) const
70    {
71        return m_codeBlock == other.m_codeBlock
72            && m_index == other.m_index;
73    }
74
75    unsigned hash() const
76    {
77        return WTF::PtrHash<CodeBlock*>::hash(m_codeBlock) ^ m_index;
78    }
79
80    bool isHashTableDeletedValue() const
81    {
82        return !m_codeBlock && m_index;
83    }
84
85    CodeBlock* codeBlock() const { return m_codeBlock; }
86    unsigned index() const { return m_index; }
87
88private:
89    CodeBlock* m_codeBlock;
90    unsigned m_index;
91};
92
93struct ConstantBufferKeyHash {
94    static unsigned hash(const ConstantBufferKey& key) { return key.hash(); }
95    static bool equal(const ConstantBufferKey& a, const ConstantBufferKey& b)
96    {
97        return a == b;
98    }
99
100    static const bool safeToCompareToEmptyOrDeleted = true;
101};
102
103} } // namespace JSC::DFG
104
105namespace WTF {
106
107template<typename T> struct DefaultHash;
108template<> struct DefaultHash<JSC::DFG::ConstantBufferKey> {
109    typedef JSC::DFG::ConstantBufferKeyHash Hash;
110};
111
112template<typename T> struct HashTraits;
113template<> struct HashTraits<JSC::DFG::ConstantBufferKey> : SimpleClassHashTraits<JSC::DFG::ConstantBufferKey> { };
114
115} // namespace WTF
116
117namespace JSC { namespace DFG {
118
119// === ByteCodeParser ===
120//
121// This class is used to compile the dataflow graph from a CodeBlock.
122class ByteCodeParser {
123public:
124    ByteCodeParser(Graph& graph)
125        : m_vm(&graph.m_vm)
126        , m_codeBlock(graph.m_codeBlock)
127        , m_profiledBlock(graph.m_profiledBlock)
128        , m_graph(graph)
129        , m_currentBlock(0)
130        , m_currentIndex(0)
131        , m_currentProfilingIndex(0)
132        , m_constantUndefined(UINT_MAX)
133        , m_constantNull(UINT_MAX)
134        , m_constantNaN(UINT_MAX)
135        , m_constant1(UINT_MAX)
136        , m_constants(m_codeBlock->numberOfConstantRegisters())
137        , m_numArguments(m_codeBlock->numParameters())
138        , m_numLocals(m_codeBlock->m_numCalleeRegisters)
139        , m_preservedVars(m_codeBlock->m_numVars)
140        , m_parameterSlots(0)
141        , m_numPassedVarArgs(0)
142        , m_inlineStackTop(0)
143        , m_haveBuiltOperandMaps(false)
144        , m_emptyJSValueIndex(UINT_MAX)
145        , m_currentInstruction(0)
146    {
147        ASSERT(m_profiledBlock);
148
149        for (int i = 0; i < m_codeBlock->m_numVars; ++i)
150            m_preservedVars.set(i);
151    }
152
153    // Parse a full CodeBlock of bytecode.
154    bool parse();
155
156private:
157    struct InlineStackEntry;
158
159    // Just parse from m_currentIndex to the end of the current CodeBlock.
160    void parseCodeBlock();
161
162    // Helper for min and max.
163    bool handleMinMax(bool usesResult, int resultOperand, NodeType op, int registerOffset, int argumentCountIncludingThis);
164
165    // Handle calls. This resolves issues surrounding inlining and intrinsics.
166    void handleCall(Interpreter*, Instruction* currentInstruction, NodeType op, CodeSpecializationKind);
167    void emitFunctionChecks(const CallLinkStatus&, Node* callTarget, int registerOffset, CodeSpecializationKind);
168    void emitArgumentPhantoms(int registerOffset, int argumentCountIncludingThis, CodeSpecializationKind);
169    // Handle inlining. Return true if it succeeded, false if we need to plant a call.
170    bool handleInlining(bool usesResult, Node* callTargetNode, int resultOperand, const CallLinkStatus&, int registerOffset, int argumentCountIncludingThis, unsigned nextOffset, CodeSpecializationKind);
171    // Handle setting the result of an intrinsic.
172    void setIntrinsicResult(bool usesResult, int resultOperand, Node*);
173    // Handle intrinsic functions. Return true if it succeeded, false if we need to plant a call.
174    bool handleIntrinsic(bool usesResult, int resultOperand, Intrinsic, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction);
175    bool handleConstantInternalFunction(bool usesResult, int resultOperand, InternalFunction*, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction, CodeSpecializationKind);
176    Node* handleGetByOffset(SpeculatedType, Node* base, unsigned identifierNumber, PropertyOffset);
177    void handleGetByOffset(
178        int destinationOperand, SpeculatedType, Node* base, unsigned identifierNumber,
179        PropertyOffset);
180    void handleGetById(
181        int destinationOperand, SpeculatedType, Node* base, unsigned identifierNumber,
182        const GetByIdStatus&);
183
184    Node* getScope(bool skipTop, unsigned skipCount);
185
186    // Convert a set of ResolveOperations into graph nodes
187    bool parseResolveOperations(SpeculatedType, unsigned identifierNumber, ResolveOperations*, PutToBaseOperation*, Node** base, Node** value);
188
189    // Prepare to parse a block.
190    void prepareToParseBlock();
191    // Parse a single basic block of bytecode instructions.
192    bool parseBlock(unsigned limit);
193    // Link block successors.
194    void linkBlock(BasicBlock*, Vector<BlockIndex>& possibleTargets);
195    void linkBlocks(Vector<UnlinkedBlock>& unlinkedBlocks, Vector<BlockIndex>& possibleTargets);
196
197    VariableAccessData* newVariableAccessData(int operand, bool isCaptured)
198    {
199        ASSERT(operand < FirstConstantRegisterIndex);
200
201        m_graph.m_variableAccessData.append(VariableAccessData(static_cast<VirtualRegister>(operand), isCaptured));
202        return &m_graph.m_variableAccessData.last();
203    }
204
205    // Get/Set the operands/result of a bytecode instruction.
206    Node* getDirect(int operand)
207    {
208        // Is this a constant?
209        if (operand >= FirstConstantRegisterIndex) {
210            unsigned constant = operand - FirstConstantRegisterIndex;
211            ASSERT(constant < m_constants.size());
212            return getJSConstant(constant);
213        }
214
215        ASSERT(operand != JSStack::Callee);
216
217        // Is this an argument?
218        if (operandIsArgument(operand))
219            return getArgument(operand);
220
221        // Must be a local.
222        return getLocal((unsigned)operand);
223    }
224    Node* get(int operand)
225    {
226        if (operand == JSStack::Callee) {
227            if (inlineCallFrame() && inlineCallFrame()->callee)
228                return cellConstant(inlineCallFrame()->callee.get());
229
230            return getCallee();
231        }
232
233        return getDirect(m_inlineStackTop->remapOperand(operand));
234    }
235    enum SetMode { NormalSet, SetOnEntry };
236    void setDirect(int operand, Node* value, SetMode setMode = NormalSet)
237    {
238        // Is this an argument?
239        if (operandIsArgument(operand)) {
240            setArgument(operand, value, setMode);
241            return;
242        }
243
244        // Must be a local.
245        setLocal((unsigned)operand, value, setMode);
246    }
247    void set(int operand, Node* value, SetMode setMode = NormalSet)
248    {
249        setDirect(m_inlineStackTop->remapOperand(operand), value, setMode);
250    }
251
252    void setPair(int operand1, Node* value1, int operand2, Node* value2)
253    {
254        // First emit dead SetLocals for the benefit of OSR.
255        set(operand1, value1);
256        set(operand2, value2);
257
258        // Now emit the real SetLocals.
259        set(operand1, value1);
260        set(operand2, value2);
261    }
262
263    Node* injectLazyOperandSpeculation(Node* node)
264    {
265        ASSERT(node->op() == GetLocal);
266        ASSERT(node->codeOrigin.bytecodeIndex == m_currentIndex);
267        SpeculatedType prediction =
268            m_inlineStackTop->m_lazyOperands.prediction(
269                LazyOperandValueProfileKey(m_currentIndex, node->local()));
270#if DFG_ENABLE(DEBUG_VERBOSE)
271        dataLog("Lazy operand [@", node->index(), ", bc#", m_currentIndex, ", r", node->local(), "] prediction: ", SpeculationDump(prediction), "\n");
272#endif
273        node->variableAccessData()->predict(prediction);
274        return node;
275    }
276
277    // Used in implementing get/set, above, where the operand is a local variable.
278    Node* getLocal(unsigned operand)
279    {
280        Node* node = m_currentBlock->variablesAtTail.local(operand);
281        bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
282
283        // This has two goals: 1) link together variable access datas, and 2)
284        // try to avoid creating redundant GetLocals. (1) is required for
285        // correctness - no other phase will ensure that block-local variable
286        // access data unification is done correctly. (2) is purely opportunistic
287        // and is meant as an compile-time optimization only.
288
289        VariableAccessData* variable;
290
291        if (node) {
292            variable = node->variableAccessData();
293            variable->mergeIsCaptured(isCaptured);
294
295            if (!isCaptured) {
296                switch (node->op()) {
297                case GetLocal:
298                    return node;
299                case SetLocal:
300                    return node->child1().node();
301                default:
302                    break;
303                }
304            }
305        } else {
306            m_preservedVars.set(operand);
307            variable = newVariableAccessData(operand, isCaptured);
308        }
309
310        node = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variable)));
311        m_currentBlock->variablesAtTail.local(operand) = node;
312        return node;
313    }
314    void setLocal(unsigned operand, Node* value, SetMode setMode = NormalSet)
315    {
316        bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
317
318        if (setMode == NormalSet) {
319            ArgumentPosition* argumentPosition = findArgumentPositionForLocal(operand);
320            if (isCaptured || argumentPosition)
321                flushDirect(operand, argumentPosition);
322        }
323
324        VariableAccessData* variableAccessData = newVariableAccessData(operand, isCaptured);
325        variableAccessData->mergeStructureCheckHoistingFailed(
326            m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
327        Node* node = addToGraph(SetLocal, OpInfo(variableAccessData), value);
328        m_currentBlock->variablesAtTail.local(operand) = node;
329    }
330
331    // Used in implementing get/set, above, where the operand is an argument.
332    Node* getArgument(unsigned operand)
333    {
334        unsigned argument = operandToArgument(operand);
335        ASSERT(argument < m_numArguments);
336
337        Node* node = m_currentBlock->variablesAtTail.argument(argument);
338        bool isCaptured = m_codeBlock->isCaptured(operand);
339
340        VariableAccessData* variable;
341
342        if (node) {
343            variable = node->variableAccessData();
344            variable->mergeIsCaptured(isCaptured);
345
346            switch (node->op()) {
347            case GetLocal:
348                return node;
349            case SetLocal:
350                return node->child1().node();
351            default:
352                break;
353            }
354        } else
355            variable = newVariableAccessData(operand, isCaptured);
356
357        node = injectLazyOperandSpeculation(addToGraph(GetLocal, OpInfo(variable)));
358        m_currentBlock->variablesAtTail.argument(argument) = node;
359        return node;
360    }
361    void setArgument(int operand, Node* value, SetMode setMode = NormalSet)
362    {
363        unsigned argument = operandToArgument(operand);
364        ASSERT(argument < m_numArguments);
365
366        bool isCaptured = m_codeBlock->isCaptured(operand);
367
368        VariableAccessData* variableAccessData = newVariableAccessData(operand, isCaptured);
369
370        // Always flush arguments, except for 'this'. If 'this' is created by us,
371        // then make sure that it's never unboxed.
372        if (argument) {
373            if (setMode == NormalSet)
374                flushDirect(operand);
375        } else if (m_codeBlock->specializationKind() == CodeForConstruct)
376            variableAccessData->mergeShouldNeverUnbox(true);
377
378        variableAccessData->mergeStructureCheckHoistingFailed(
379            m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
380        Node* node = addToGraph(SetLocal, OpInfo(variableAccessData), value);
381        m_currentBlock->variablesAtTail.argument(argument) = node;
382    }
383
384    ArgumentPosition* findArgumentPositionForArgument(int argument)
385    {
386        InlineStackEntry* stack = m_inlineStackTop;
387        while (stack->m_inlineCallFrame)
388            stack = stack->m_caller;
389        return stack->m_argumentPositions[argument];
390    }
391
392    ArgumentPosition* findArgumentPositionForLocal(int operand)
393    {
394        for (InlineStackEntry* stack = m_inlineStackTop; ; stack = stack->m_caller) {
395            InlineCallFrame* inlineCallFrame = stack->m_inlineCallFrame;
396            if (!inlineCallFrame)
397                break;
398            if (operand >= static_cast<int>(inlineCallFrame->stackOffset - JSStack::CallFrameHeaderSize))
399                continue;
400            if (operand == inlineCallFrame->stackOffset + CallFrame::thisArgumentOffset())
401                continue;
402            if (operand < static_cast<int>(inlineCallFrame->stackOffset - JSStack::CallFrameHeaderSize - inlineCallFrame->arguments.size()))
403                continue;
404            int argument = operandToArgument(operand - inlineCallFrame->stackOffset);
405            return stack->m_argumentPositions[argument];
406        }
407        return 0;
408    }
409
410    ArgumentPosition* findArgumentPosition(int operand)
411    {
412        if (operandIsArgument(operand))
413            return findArgumentPositionForArgument(operandToArgument(operand));
414        return findArgumentPositionForLocal(operand);
415    }
416
417    void flush(int operand)
418    {
419        flushDirect(m_inlineStackTop->remapOperand(operand));
420    }
421
422    void flushDirect(int operand)
423    {
424        flushDirect(operand, findArgumentPosition(operand));
425    }
426
427    void flushDirect(int operand, ArgumentPosition* argumentPosition)
428    {
429        bool isCaptured = m_codeBlock->isCaptured(operand, inlineCallFrame());
430
431        ASSERT(operand < FirstConstantRegisterIndex);
432
433        if (!operandIsArgument(operand))
434            m_preservedVars.set(operand);
435
436        Node* node = m_currentBlock->variablesAtTail.operand(operand);
437
438        VariableAccessData* variable;
439
440        if (node) {
441            variable = node->variableAccessData();
442            variable->mergeIsCaptured(isCaptured);
443        } else
444            variable = newVariableAccessData(operand, isCaptured);
445
446        node = addToGraph(Flush, OpInfo(variable));
447        m_currentBlock->variablesAtTail.operand(operand) = node;
448        if (argumentPosition)
449            argumentPosition->addVariable(variable);
450    }
451
452    void flush(InlineStackEntry* inlineStackEntry)
453    {
454        int numArguments;
455        if (InlineCallFrame* inlineCallFrame = inlineStackEntry->m_inlineCallFrame)
456            numArguments = inlineCallFrame->arguments.size();
457        else
458            numArguments = inlineStackEntry->m_codeBlock->numParameters();
459        for (unsigned argument = numArguments; argument-- > 1;)
460            flushDirect(inlineStackEntry->remapOperand(argumentToOperand(argument)));
461        for (int local = 0; local < inlineStackEntry->m_codeBlock->m_numVars; ++local) {
462            if (!inlineStackEntry->m_codeBlock->isCaptured(local))
463                continue;
464            flushDirect(inlineStackEntry->remapOperand(local));
465        }
466    }
467
468    void flushAllArgumentsAndCapturedVariablesInInlineStack()
469    {
470        for (InlineStackEntry* inlineStackEntry = m_inlineStackTop; inlineStackEntry; inlineStackEntry = inlineStackEntry->m_caller)
471            flush(inlineStackEntry);
472    }
473
474    void flushArgumentsAndCapturedVariables()
475    {
476        flush(m_inlineStackTop);
477    }
478
479    // Get an operand, and perform a ToInt32/ToNumber conversion on it.
480    Node* getToInt32(int operand)
481    {
482        return toInt32(get(operand));
483    }
484
485    // Perform an ES5 ToInt32 operation - returns a node of type NodeResultInt32.
486    Node* toInt32(Node* node)
487    {
488        if (node->hasInt32Result())
489            return node;
490
491        if (node->op() == UInt32ToNumber)
492            return node->child1().node();
493
494        // Check for numeric constants boxed as JSValues.
495        if (canFold(node)) {
496            JSValue v = valueOfJSConstant(node);
497            if (v.isInt32())
498                return getJSConstant(node->constantNumber());
499            if (v.isNumber())
500                return getJSConstantForValue(JSValue(JSC::toInt32(v.asNumber())));
501        }
502
503        return addToGraph(ValueToInt32, node);
504    }
505
506    // NOTE: Only use this to construct constants that arise from non-speculative
507    // constant folding. I.e. creating constants using this if we had constant
508    // field inference would be a bad idea, since the bytecode parser's folding
509    // doesn't handle liveness preservation.
510    Node* getJSConstantForValue(JSValue constantValue)
511    {
512        unsigned constantIndex = m_codeBlock->addOrFindConstant(constantValue);
513        if (constantIndex >= m_constants.size())
514            m_constants.append(ConstantRecord());
515
516        ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
517
518        return getJSConstant(constantIndex);
519    }
520
521    Node* getJSConstant(unsigned constant)
522    {
523        Node* node = m_constants[constant].asJSValue;
524        if (node)
525            return node;
526
527        Node* result = addToGraph(JSConstant, OpInfo(constant));
528        m_constants[constant].asJSValue = result;
529        return result;
530    }
531
532    Node* getCallee()
533    {
534        return addToGraph(GetCallee);
535    }
536
537    // Helper functions to get/set the this value.
538    Node* getThis()
539    {
540        return get(m_inlineStackTop->m_codeBlock->thisRegister());
541    }
542    void setThis(Node* value)
543    {
544        set(m_inlineStackTop->m_codeBlock->thisRegister(), value);
545    }
546
547    // Convenience methods for checking nodes for constants.
548    bool isJSConstant(Node* node)
549    {
550        return node->op() == JSConstant;
551    }
552    bool isInt32Constant(Node* node)
553    {
554        return isJSConstant(node) && valueOfJSConstant(node).isInt32();
555    }
556    // Convenience methods for getting constant values.
557    JSValue valueOfJSConstant(Node* node)
558    {
559        ASSERT(isJSConstant(node));
560        return m_codeBlock->getConstant(FirstConstantRegisterIndex + node->constantNumber());
561    }
562    int32_t valueOfInt32Constant(Node* node)
563    {
564        ASSERT(isInt32Constant(node));
565        return valueOfJSConstant(node).asInt32();
566    }
567
568    // This method returns a JSConstant with the value 'undefined'.
569    Node* constantUndefined()
570    {
571        // Has m_constantUndefined been set up yet?
572        if (m_constantUndefined == UINT_MAX) {
573            // Search the constant pool for undefined, if we find it, we can just reuse this!
574            unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
575            for (m_constantUndefined = 0; m_constantUndefined < numberOfConstants; ++m_constantUndefined) {
576                JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantUndefined);
577                if (testMe.isUndefined())
578                    return getJSConstant(m_constantUndefined);
579            }
580
581            // Add undefined to the CodeBlock's constants, and add a corresponding slot in m_constants.
582            ASSERT(m_constants.size() == numberOfConstants);
583            m_codeBlock->addConstant(jsUndefined());
584            m_constants.append(ConstantRecord());
585            ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
586        }
587
588        // m_constantUndefined must refer to an entry in the CodeBlock's constant pool that has the value 'undefined'.
589        ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantUndefined).isUndefined());
590        return getJSConstant(m_constantUndefined);
591    }
592
593    // This method returns a JSConstant with the value 'null'.
594    Node* constantNull()
595    {
596        // Has m_constantNull been set up yet?
597        if (m_constantNull == UINT_MAX) {
598            // Search the constant pool for null, if we find it, we can just reuse this!
599            unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
600            for (m_constantNull = 0; m_constantNull < numberOfConstants; ++m_constantNull) {
601                JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNull);
602                if (testMe.isNull())
603                    return getJSConstant(m_constantNull);
604            }
605
606            // Add null to the CodeBlock's constants, and add a corresponding slot in m_constants.
607            ASSERT(m_constants.size() == numberOfConstants);
608            m_codeBlock->addConstant(jsNull());
609            m_constants.append(ConstantRecord());
610            ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
611        }
612
613        // m_constantNull must refer to an entry in the CodeBlock's constant pool that has the value 'null'.
614        ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNull).isNull());
615        return getJSConstant(m_constantNull);
616    }
617
618    // This method returns a DoubleConstant with the value 1.
619    Node* one()
620    {
621        // Has m_constant1 been set up yet?
622        if (m_constant1 == UINT_MAX) {
623            // Search the constant pool for the value 1, if we find it, we can just reuse this!
624            unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
625            for (m_constant1 = 0; m_constant1 < numberOfConstants; ++m_constant1) {
626                JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1);
627                if (testMe.isInt32() && testMe.asInt32() == 1)
628                    return getJSConstant(m_constant1);
629            }
630
631            // Add the value 1 to the CodeBlock's constants, and add a corresponding slot in m_constants.
632            ASSERT(m_constants.size() == numberOfConstants);
633            m_codeBlock->addConstant(jsNumber(1));
634            m_constants.append(ConstantRecord());
635            ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
636        }
637
638        // m_constant1 must refer to an entry in the CodeBlock's constant pool that has the integer value 1.
639        ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1).isInt32());
640        ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constant1).asInt32() == 1);
641        return getJSConstant(m_constant1);
642    }
643
644    // This method returns a DoubleConstant with the value NaN.
645    Node* constantNaN()
646    {
647        JSValue nan = jsNaN();
648
649        // Has m_constantNaN been set up yet?
650        if (m_constantNaN == UINT_MAX) {
651            // Search the constant pool for the value NaN, if we find it, we can just reuse this!
652            unsigned numberOfConstants = m_codeBlock->numberOfConstantRegisters();
653            for (m_constantNaN = 0; m_constantNaN < numberOfConstants; ++m_constantNaN) {
654                JSValue testMe = m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN);
655                if (JSValue::encode(testMe) == JSValue::encode(nan))
656                    return getJSConstant(m_constantNaN);
657            }
658
659            // Add the value nan to the CodeBlock's constants, and add a corresponding slot in m_constants.
660            ASSERT(m_constants.size() == numberOfConstants);
661            m_codeBlock->addConstant(nan);
662            m_constants.append(ConstantRecord());
663            ASSERT(m_constants.size() == m_codeBlock->numberOfConstantRegisters());
664        }
665
666        // m_constantNaN must refer to an entry in the CodeBlock's constant pool that has the value nan.
667        ASSERT(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN).isDouble());
668        ASSERT(std::isnan(m_codeBlock->getConstant(FirstConstantRegisterIndex + m_constantNaN).asDouble()));
669        return getJSConstant(m_constantNaN);
670    }
671
672    Node* cellConstant(JSCell* cell)
673    {
674        HashMap<JSCell*, Node*>::AddResult result = m_cellConstantNodes.add(cell, 0);
675        if (result.isNewEntry)
676            result.iterator->value = addToGraph(WeakJSConstant, OpInfo(cell));
677
678        return result.iterator->value;
679    }
680
681    InlineCallFrame* inlineCallFrame()
682    {
683        return m_inlineStackTop->m_inlineCallFrame;
684    }
685
686    CodeOrigin currentCodeOrigin()
687    {
688        return CodeOrigin(m_currentIndex, inlineCallFrame(), m_currentProfilingIndex - m_currentIndex);
689    }
690
691    bool canFold(Node* node)
692    {
693        return node->isStronglyProvedConstantIn(inlineCallFrame());
694    }
695
696    // Our codegen for constant strict equality performs a bitwise comparison,
697    // so we can only select values that have a consistent bitwise identity.
698    bool isConstantForCompareStrictEq(Node* node)
699    {
700        if (!node->isConstant())
701            return false;
702        JSValue value = valueOfJSConstant(node);
703        return value.isBoolean() || value.isUndefinedOrNull();
704    }
705
706    Node* addToGraph(NodeType op, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
707    {
708        Node* result = m_graph.addNode(
709            SpecNone, op, currentCodeOrigin(), Edge(child1), Edge(child2), Edge(child3));
710        ASSERT(op != Phi);
711        m_currentBlock->append(result);
712        return result;
713    }
714    Node* addToGraph(NodeType op, Edge child1, Edge child2 = Edge(), Edge child3 = Edge())
715    {
716        Node* result = m_graph.addNode(
717            SpecNone, op, currentCodeOrigin(), child1, child2, child3);
718        ASSERT(op != Phi);
719        m_currentBlock->append(result);
720        return result;
721    }
722    Node* addToGraph(NodeType op, OpInfo info, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
723    {
724        Node* result = m_graph.addNode(
725            SpecNone, op, currentCodeOrigin(), info, Edge(child1), Edge(child2), Edge(child3));
726        ASSERT(op != Phi);
727        m_currentBlock->append(result);
728        return result;
729    }
730    Node* addToGraph(NodeType op, OpInfo info1, OpInfo info2, Node* child1 = 0, Node* child2 = 0, Node* child3 = 0)
731    {
732        Node* result = m_graph.addNode(
733            SpecNone, op, currentCodeOrigin(), info1, info2,
734            Edge(child1), Edge(child2), Edge(child3));
735        ASSERT(op != Phi);
736        m_currentBlock->append(result);
737        return result;
738    }
739
740    Node* addToGraph(Node::VarArgTag, NodeType op, OpInfo info1, OpInfo info2)
741    {
742        Node* result = m_graph.addNode(
743            SpecNone, Node::VarArg, op, currentCodeOrigin(), info1, info2,
744            m_graph.m_varArgChildren.size() - m_numPassedVarArgs, m_numPassedVarArgs);
745        ASSERT(op != Phi);
746        m_currentBlock->append(result);
747
748        m_numPassedVarArgs = 0;
749
750        return result;
751    }
752
753    void addVarArgChild(Node* child)
754    {
755        m_graph.m_varArgChildren.append(Edge(child));
756        m_numPassedVarArgs++;
757    }
758
759    Node* addCall(Interpreter* interpreter, Instruction* currentInstruction, NodeType op)
760    {
761        Instruction* putInstruction = currentInstruction + OPCODE_LENGTH(op_call);
762
763        SpeculatedType prediction = SpecNone;
764        if (interpreter->getOpcodeID(putInstruction->u.opcode) == op_call_put_result) {
765            m_currentProfilingIndex = m_currentIndex + OPCODE_LENGTH(op_call);
766            prediction = getPrediction();
767        }
768
769        addVarArgChild(get(currentInstruction[1].u.operand));
770        int argCount = currentInstruction[2].u.operand;
771        if (JSStack::CallFrameHeaderSize + (unsigned)argCount > m_parameterSlots)
772            m_parameterSlots = JSStack::CallFrameHeaderSize + argCount;
773
774        int registerOffset = currentInstruction[3].u.operand;
775        int dummyThisArgument = op == Call ? 0 : 1;
776        for (int i = 0 + dummyThisArgument; i < argCount; ++i)
777            addVarArgChild(get(registerOffset + argumentToOperand(i)));
778
779        Node* call = addToGraph(Node::VarArg, op, OpInfo(0), OpInfo(prediction));
780        if (interpreter->getOpcodeID(putInstruction->u.opcode) == op_call_put_result)
781            set(putInstruction[1].u.operand, call);
782        return call;
783    }
784
785    Node* addStructureTransitionCheck(JSCell* object, Structure* structure)
786    {
787        // Add a weak JS constant for the object regardless, since the code should
788        // be jettisoned if the object ever dies.
789        Node* objectNode = cellConstant(object);
790
791        if (object->structure() == structure && structure->transitionWatchpointSetIsStillValid()) {
792            addToGraph(StructureTransitionWatchpoint, OpInfo(structure), objectNode);
793            return objectNode;
794        }
795
796        addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(structure)), objectNode);
797
798        return objectNode;
799    }
800
801    Node* addStructureTransitionCheck(JSCell* object)
802    {
803        return addStructureTransitionCheck(object, object->structure());
804    }
805
806    SpeculatedType getPredictionWithoutOSRExit(unsigned bytecodeIndex)
807    {
808        return m_inlineStackTop->m_profiledBlock->valueProfilePredictionForBytecodeOffset(bytecodeIndex);
809    }
810
811    SpeculatedType getPrediction(unsigned bytecodeIndex)
812    {
813        SpeculatedType prediction = getPredictionWithoutOSRExit(bytecodeIndex);
814
815        if (prediction == SpecNone) {
816            // We have no information about what values this node generates. Give up
817            // on executing this code, since we're likely to do more damage than good.
818            addToGraph(ForceOSRExit);
819        }
820
821        return prediction;
822    }
823
824    SpeculatedType getPredictionWithoutOSRExit()
825    {
826        return getPredictionWithoutOSRExit(m_currentProfilingIndex);
827    }
828
829    SpeculatedType getPrediction()
830    {
831        return getPrediction(m_currentProfilingIndex);
832    }
833
834    ArrayMode getArrayMode(ArrayProfile* profile, Array::Action action)
835    {
836        profile->computeUpdatedPrediction(m_inlineStackTop->m_codeBlock);
837        return ArrayMode::fromObserved(profile, action, false);
838    }
839
840    ArrayMode getArrayMode(ArrayProfile* profile)
841    {
842        return getArrayMode(profile, Array::Read);
843    }
844
845    ArrayMode getArrayModeAndEmitChecks(ArrayProfile* profile, Array::Action action, Node* base)
846    {
847        profile->computeUpdatedPrediction(m_inlineStackTop->m_codeBlock);
848
849#if DFG_ENABLE(DEBUG_PROPAGATION_VERBOSE)
850        if (m_inlineStackTop->m_profiledBlock->numberOfRareCaseProfiles())
851            dataLogF("Slow case profile for bc#%u: %u\n", m_currentIndex, m_inlineStackTop->m_profiledBlock->rareCaseProfileForBytecodeOffset(m_currentIndex)->m_counter);
852        dataLogF("Array profile for bc#%u: %p%s%s, %u\n", m_currentIndex, profile->expectedStructure(), profile->structureIsPolymorphic() ? " (polymorphic)" : "", profile->mayInterceptIndexedAccesses() ? " (may intercept)" : "", profile->observedArrayModes());
853#endif
854
855        bool makeSafe =
856            m_inlineStackTop->m_profiledBlock->likelyToTakeSlowCase(m_currentIndex)
857            || profile->outOfBounds();
858
859        ArrayMode result = ArrayMode::fromObserved(profile, action, makeSafe);
860
861        if (profile->hasDefiniteStructure()
862            && result.benefitsFromStructureCheck()
863            && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache))
864            addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(profile->expectedStructure())), base);
865
866        return result;
867    }
868
869    Node* makeSafe(Node* node)
870    {
871        bool likelyToTakeSlowCase;
872        if (!isX86() && node->op() == ArithMod)
873            likelyToTakeSlowCase = false;
874        else
875            likelyToTakeSlowCase = m_inlineStackTop->m_profiledBlock->likelyToTakeSlowCase(m_currentIndex);
876
877        if (!likelyToTakeSlowCase
878            && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow)
879            && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero))
880            return node;
881
882        switch (node->op()) {
883        case UInt32ToNumber:
884        case ArithAdd:
885        case ArithSub:
886        case ArithNegate:
887        case ValueAdd:
888        case ArithMod: // for ArithMod "MayOverflow" means we tried to divide by zero, or we saw double.
889            node->mergeFlags(NodeMayOverflow);
890            break;
891
892        case ArithMul:
893            if (m_inlineStackTop->m_profiledBlock->likelyToTakeDeepestSlowCase(m_currentIndex)
894                || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow)) {
895#if DFG_ENABLE(DEBUG_VERBOSE)
896                dataLogF("Making ArithMul @%u take deepest slow case.\n", node->index());
897#endif
898                node->mergeFlags(NodeMayOverflow | NodeMayNegZero);
899            } else if (m_inlineStackTop->m_profiledBlock->likelyToTakeSlowCase(m_currentIndex)
900                       || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero)) {
901#if DFG_ENABLE(DEBUG_VERBOSE)
902                dataLogF("Making ArithMul @%u take faster slow case.\n", node->index());
903#endif
904                node->mergeFlags(NodeMayNegZero);
905            }
906            break;
907
908        default:
909            RELEASE_ASSERT_NOT_REACHED();
910            break;
911        }
912
913        return node;
914    }
915
916    Node* makeDivSafe(Node* node)
917    {
918        ASSERT(node->op() == ArithDiv);
919
920        // The main slow case counter for op_div in the old JIT counts only when
921        // the operands are not numbers. We don't care about that since we already
922        // have speculations in place that take care of that separately. We only
923        // care about when the outcome of the division is not an integer, which
924        // is what the special fast case counter tells us.
925
926        if (!m_inlineStackTop->m_profiledBlock->couldTakeSpecialFastCase(m_currentIndex)
927            && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow)
928            && !m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero))
929            return node;
930
931#if DFG_ENABLE(DEBUG_VERBOSE)
932        dataLogF("Making %s @%u safe at bc#%u because special fast-case counter is at %u and exit profiles say %d, %d\n", Graph::opName(node->op()), node->index(), m_currentIndex, m_inlineStackTop->m_profiledBlock->specialFastCaseProfileForBytecodeOffset(m_currentIndex)->m_counter, m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow), m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, NegativeZero));
933#endif
934
935        // FIXME: It might be possible to make this more granular. The DFG certainly can
936        // distinguish between negative zero and overflow in its exit profiles.
937        node->mergeFlags(NodeMayOverflow | NodeMayNegZero);
938
939        return node;
940    }
941
942    bool structureChainIsStillValid(bool direct, Structure* previousStructure, StructureChain* chain)
943    {
944        if (direct)
945            return true;
946
947        if (!previousStructure->storedPrototype().isNull() && previousStructure->storedPrototype().asCell()->structure() != chain->head()->get())
948            return false;
949
950        for (WriteBarrier<Structure>* it = chain->head(); *it; ++it) {
951            if (!(*it)->storedPrototype().isNull() && (*it)->storedPrototype().asCell()->structure() != it[1].get())
952                return false;
953        }
954
955        return true;
956    }
957
958    void buildOperandMapsIfNecessary();
959
960    VM* m_vm;
961    CodeBlock* m_codeBlock;
962    CodeBlock* m_profiledBlock;
963    Graph& m_graph;
964
965    // The current block being generated.
966    BasicBlock* m_currentBlock;
967    // The bytecode index of the current instruction being generated.
968    unsigned m_currentIndex;
969    // The bytecode index of the value profile of the current instruction being generated.
970    unsigned m_currentProfilingIndex;
971
972    // We use these values during code generation, and to avoid the need for
973    // special handling we make sure they are available as constants in the
974    // CodeBlock's constant pool. These variables are initialized to
975    // UINT_MAX, and lazily updated to hold an index into the CodeBlock's
976    // constant pool, as necessary.
977    unsigned m_constantUndefined;
978    unsigned m_constantNull;
979    unsigned m_constantNaN;
980    unsigned m_constant1;
981    HashMap<JSCell*, unsigned> m_cellConstants;
982    HashMap<JSCell*, Node*> m_cellConstantNodes;
983
984    // A constant in the constant pool may be represented by more than one
985    // node in the graph, depending on the context in which it is being used.
986    struct ConstantRecord {
987        ConstantRecord()
988            : asInt32(0)
989            , asNumeric(0)
990            , asJSValue(0)
991        {
992        }
993
994        Node* asInt32;
995        Node* asNumeric;
996        Node* asJSValue;
997    };
998
999    // Track the index of the node whose result is the current value for every
1000    // register value in the bytecode - argument, local, and temporary.
1001    Vector<ConstantRecord, 16> m_constants;
1002
1003    // The number of arguments passed to the function.
1004    unsigned m_numArguments;
1005    // The number of locals (vars + temporaries) used in the function.
1006    unsigned m_numLocals;
1007    // The set of registers we need to preserve across BasicBlock boundaries;
1008    // typically equal to the set of vars, but we expand this to cover all
1009    // temporaries that persist across blocks (dues to ?:, &&, ||, etc).
1010    BitVector m_preservedVars;
1011    // The number of slots (in units of sizeof(Register)) that we need to
1012    // preallocate for calls emanating from this frame. This includes the
1013    // size of the CallFrame, only if this is not a leaf function.  (I.e.
1014    // this is 0 if and only if this function is a leaf.)
1015    unsigned m_parameterSlots;
1016    // The number of var args passed to the next var arg node.
1017    unsigned m_numPassedVarArgs;
1018
1019    HashMap<ConstantBufferKey, unsigned> m_constantBufferCache;
1020
1021    struct InlineStackEntry {
1022        ByteCodeParser* m_byteCodeParser;
1023
1024        CodeBlock* m_codeBlock;
1025        CodeBlock* m_profiledBlock;
1026        InlineCallFrame* m_inlineCallFrame;
1027
1028        ScriptExecutable* executable() { return m_codeBlock->ownerExecutable(); }
1029
1030        QueryableExitProfile m_exitProfile;
1031
1032        // Remapping of identifier and constant numbers from the code block being
1033        // inlined (inline callee) to the code block that we're inlining into
1034        // (the machine code block, which is the transitive, though not necessarily
1035        // direct, caller).
1036        Vector<unsigned> m_identifierRemap;
1037        Vector<unsigned> m_constantRemap;
1038        Vector<unsigned> m_constantBufferRemap;
1039
1040        // Blocks introduced by this code block, which need successor linking.
1041        // May include up to one basic block that includes the continuation after
1042        // the callsite in the caller. These must be appended in the order that they
1043        // are created, but their bytecodeBegin values need not be in order as they
1044        // are ignored.
1045        Vector<UnlinkedBlock> m_unlinkedBlocks;
1046
1047        // Potential block linking targets. Must be sorted by bytecodeBegin, and
1048        // cannot have two blocks that have the same bytecodeBegin. For this very
1049        // reason, this is not equivalent to
1050        Vector<BlockIndex> m_blockLinkingTargets;
1051
1052        // If the callsite's basic block was split into two, then this will be
1053        // the head of the callsite block. It needs its successors linked to the
1054        // m_unlinkedBlocks, but not the other way around: there's no way for
1055        // any blocks in m_unlinkedBlocks to jump back into this block.
1056        BlockIndex m_callsiteBlockHead;
1057
1058        // Does the callsite block head need linking? This is typically true
1059        // but will be false for the machine code block's inline stack entry
1060        // (since that one is not inlined) and for cases where an inline callee
1061        // did the linking for us.
1062        bool m_callsiteBlockHeadNeedsLinking;
1063
1064        VirtualRegister m_returnValue;
1065
1066        // Speculations about variable types collected from the profiled code block,
1067        // which are based on OSR exit profiles that past DFG compilatins of this
1068        // code block had gathered.
1069        LazyOperandValueProfileParser m_lazyOperands;
1070
1071        // Did we see any returns? We need to handle the (uncommon but necessary)
1072        // case where a procedure that does not return was inlined.
1073        bool m_didReturn;
1074
1075        // Did we have any early returns?
1076        bool m_didEarlyReturn;
1077
1078        // Pointers to the argument position trackers for this slice of code.
1079        Vector<ArgumentPosition*> m_argumentPositions;
1080
1081        InlineStackEntry* m_caller;
1082
1083        InlineStackEntry(
1084            ByteCodeParser*,
1085            CodeBlock*,
1086            CodeBlock* profiledBlock,
1087            BlockIndex callsiteBlockHead,
1088            JSFunction* callee, // Null if this is a closure call.
1089            VirtualRegister returnValueVR,
1090            VirtualRegister inlineCallFrameStart,
1091            int argumentCountIncludingThis,
1092            CodeSpecializationKind);
1093
1094        ~InlineStackEntry()
1095        {
1096            m_byteCodeParser->m_inlineStackTop = m_caller;
1097        }
1098
1099        int remapOperand(int operand) const
1100        {
1101            if (!m_inlineCallFrame)
1102                return operand;
1103
1104            if (operand >= FirstConstantRegisterIndex) {
1105                int result = m_constantRemap[operand - FirstConstantRegisterIndex];
1106                ASSERT(result >= FirstConstantRegisterIndex);
1107                return result;
1108            }
1109
1110            ASSERT(operand != JSStack::Callee);
1111
1112            return operand + m_inlineCallFrame->stackOffset;
1113        }
1114    };
1115
1116    InlineStackEntry* m_inlineStackTop;
1117
1118    // Have we built operand maps? We initialize them lazily, and only when doing
1119    // inlining.
1120    bool m_haveBuiltOperandMaps;
1121    // Mapping between identifier names and numbers.
1122    IdentifierMap m_identifierMap;
1123    // Mapping between values and constant numbers.
1124    JSValueMap m_jsValueMap;
1125    // Index of the empty value, or UINT_MAX if there is no mapping. This is a horrible
1126    // work-around for the fact that JSValueMap can't handle "empty" values.
1127    unsigned m_emptyJSValueIndex;
1128
1129    Instruction* m_currentInstruction;
1130};
1131
1132#define NEXT_OPCODE(name) \
1133    m_currentIndex += OPCODE_LENGTH(name); \
1134    continue
1135
1136#define LAST_OPCODE(name) \
1137    m_currentIndex += OPCODE_LENGTH(name); \
1138    return shouldContinueParsing
1139
1140
1141void ByteCodeParser::handleCall(Interpreter* interpreter, Instruction* currentInstruction, NodeType op, CodeSpecializationKind kind)
1142{
1143    ASSERT(OPCODE_LENGTH(op_call) == OPCODE_LENGTH(op_construct));
1144
1145    Node* callTarget = get(currentInstruction[1].u.operand);
1146
1147    CallLinkStatus callLinkStatus;
1148
1149    if (m_graph.isConstant(callTarget))
1150        callLinkStatus = CallLinkStatus(m_graph.valueOfJSConstant(callTarget)).setIsProved(true);
1151    else {
1152        callLinkStatus = CallLinkStatus::computeFor(m_inlineStackTop->m_profiledBlock, m_currentIndex);
1153        callLinkStatus.setHasBadFunctionExitSite(m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadFunction));
1154        callLinkStatus.setHasBadCacheExitSite(m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
1155        callLinkStatus.setHasBadExecutableExitSite(m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadExecutable));
1156    }
1157
1158#if DFG_ENABLE(DEBUG_VERBOSE)
1159    dataLog("For call at bc#", m_currentIndex, ": ", callLinkStatus, "\n");
1160#endif
1161
1162    if (!callLinkStatus.canOptimize()) {
1163        // Oddly, this conflates calls that haven't executed with calls that behaved sufficiently polymorphically
1164        // that we cannot optimize them.
1165
1166        addCall(interpreter, currentInstruction, op);
1167        return;
1168    }
1169
1170    int argumentCountIncludingThis = currentInstruction[2].u.operand;
1171    int registerOffset = currentInstruction[3].u.operand;
1172
1173    // Do we have a result?
1174    bool usesResult = false;
1175    int resultOperand = 0; // make compiler happy
1176    unsigned nextOffset = m_currentIndex + OPCODE_LENGTH(op_call);
1177    Instruction* putInstruction = currentInstruction + OPCODE_LENGTH(op_call);
1178    SpeculatedType prediction = SpecNone;
1179    if (interpreter->getOpcodeID(putInstruction->u.opcode) == op_call_put_result) {
1180        resultOperand = putInstruction[1].u.operand;
1181        usesResult = true;
1182        m_currentProfilingIndex = nextOffset;
1183        prediction = getPrediction();
1184        nextOffset += OPCODE_LENGTH(op_call_put_result);
1185    }
1186
1187    if (InternalFunction* function = callLinkStatus.internalFunction()) {
1188        if (handleConstantInternalFunction(usesResult, resultOperand, function, registerOffset, argumentCountIncludingThis, prediction, kind)) {
1189            // This phantoming has to be *after* the code for the intrinsic, to signify that
1190            // the inputs must be kept alive whatever exits the intrinsic may do.
1191            addToGraph(Phantom, callTarget);
1192            emitArgumentPhantoms(registerOffset, argumentCountIncludingThis, kind);
1193            return;
1194        }
1195
1196        // Can only handle this using the generic call handler.
1197        addCall(interpreter, currentInstruction, op);
1198        return;
1199    }
1200
1201    Intrinsic intrinsic = callLinkStatus.intrinsicFor(kind);
1202    if (intrinsic != NoIntrinsic) {
1203        emitFunctionChecks(callLinkStatus, callTarget, registerOffset, kind);
1204
1205        if (handleIntrinsic(usesResult, resultOperand, intrinsic, registerOffset, argumentCountIncludingThis, prediction)) {
1206            // This phantoming has to be *after* the code for the intrinsic, to signify that
1207            // the inputs must be kept alive whatever exits the intrinsic may do.
1208            addToGraph(Phantom, callTarget);
1209            emitArgumentPhantoms(registerOffset, argumentCountIncludingThis, kind);
1210            if (m_graph.m_compilation)
1211                m_graph.m_compilation->noticeInlinedCall();
1212            return;
1213        }
1214    } else if (handleInlining(usesResult, callTarget, resultOperand, callLinkStatus, registerOffset, argumentCountIncludingThis, nextOffset, kind)) {
1215        if (m_graph.m_compilation)
1216            m_graph.m_compilation->noticeInlinedCall();
1217        return;
1218    }
1219
1220    addCall(interpreter, currentInstruction, op);
1221}
1222
1223void ByteCodeParser::emitFunctionChecks(const CallLinkStatus& callLinkStatus, Node* callTarget, int registerOffset, CodeSpecializationKind kind)
1224{
1225    Node* thisArgument;
1226    if (kind == CodeForCall)
1227        thisArgument = get(registerOffset + argumentToOperand(0));
1228    else
1229        thisArgument = 0;
1230
1231    if (callLinkStatus.isProved()) {
1232        addToGraph(Phantom, callTarget, thisArgument);
1233        return;
1234    }
1235
1236    ASSERT(callLinkStatus.canOptimize());
1237
1238    if (JSFunction* function = callLinkStatus.function())
1239        addToGraph(CheckFunction, OpInfo(function), callTarget, thisArgument);
1240    else {
1241        ASSERT(callLinkStatus.structure());
1242        ASSERT(callLinkStatus.executable());
1243
1244        addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(callLinkStatus.structure())), callTarget);
1245        addToGraph(CheckExecutable, OpInfo(callLinkStatus.executable()), callTarget, thisArgument);
1246    }
1247}
1248
1249void ByteCodeParser::emitArgumentPhantoms(int registerOffset, int argumentCountIncludingThis, CodeSpecializationKind kind)
1250{
1251    for (int i = kind == CodeForCall ? 0 : 1; i < argumentCountIncludingThis; ++i)
1252        addToGraph(Phantom, get(registerOffset + argumentToOperand(i)));
1253}
1254
1255bool ByteCodeParser::handleInlining(bool usesResult, Node* callTargetNode, int resultOperand, const CallLinkStatus& callLinkStatus, int registerOffset, int argumentCountIncludingThis, unsigned nextOffset, CodeSpecializationKind kind)
1256{
1257    // First, the really simple checks: do we have an actual JS function?
1258    if (!callLinkStatus.executable())
1259        return false;
1260    if (callLinkStatus.executable()->isHostFunction())
1261        return false;
1262
1263    FunctionExecutable* executable = jsCast<FunctionExecutable*>(callLinkStatus.executable());
1264
1265    // Does the number of arguments we're passing match the arity of the target? We currently
1266    // inline only if the number of arguments passed is greater than or equal to the number
1267    // arguments expected.
1268    if (static_cast<int>(executable->parameterCount()) + 1 > argumentCountIncludingThis)
1269        return false;
1270
1271    // Have we exceeded inline stack depth, or are we trying to inline a recursive call?
1272    // If either of these are detected, then don't inline.
1273    unsigned depth = 0;
1274    for (InlineStackEntry* entry = m_inlineStackTop; entry; entry = entry->m_caller) {
1275        ++depth;
1276        if (depth >= Options::maximumInliningDepth())
1277            return false; // Depth exceeded.
1278
1279        if (entry->executable() == executable)
1280            return false; // Recursion detected.
1281    }
1282
1283    // Do we have a code block, and does the code block's size match the heuristics/requirements for
1284    // being an inline candidate? We might not have a code block if code was thrown away or if we
1285    // simply hadn't actually made this call yet. We could still theoretically attempt to inline it
1286    // if we had a static proof of what was being called; this might happen for example if you call a
1287    // global function, where watchpointing gives us static information. Overall, it's a rare case
1288    // because we expect that any hot callees would have already been compiled.
1289    CodeBlock* codeBlock = executable->baselineCodeBlockFor(kind);
1290    if (!codeBlock)
1291        return false;
1292    if (!canInlineFunctionFor(codeBlock, kind, callLinkStatus.isClosureCall()))
1293        return false;
1294
1295#if DFG_ENABLE(DEBUG_VERBOSE)
1296    dataLogF("Inlining executable %p.\n", executable);
1297#endif
1298
1299    // Now we know without a doubt that we are committed to inlining. So begin the process
1300    // by checking the callee (if necessary) and making sure that arguments and the callee
1301    // are flushed.
1302    emitFunctionChecks(callLinkStatus, callTargetNode, registerOffset, kind);
1303
1304    // FIXME: Don't flush constants!
1305
1306    int inlineCallFrameStart = m_inlineStackTop->remapOperand(registerOffset) - JSStack::CallFrameHeaderSize;
1307
1308    // Make sure that the area used by the call frame is reserved.
1309    for (int arg = inlineCallFrameStart + JSStack::CallFrameHeaderSize + codeBlock->m_numVars; arg-- > inlineCallFrameStart;)
1310        m_preservedVars.set(arg);
1311
1312    // Make sure that we have enough locals.
1313    unsigned newNumLocals = inlineCallFrameStart + JSStack::CallFrameHeaderSize + codeBlock->m_numCalleeRegisters;
1314    if (newNumLocals > m_numLocals) {
1315        m_numLocals = newNumLocals;
1316        for (size_t i = 0; i < m_graph.m_blocks.size(); ++i)
1317            m_graph.m_blocks[i]->ensureLocals(newNumLocals);
1318    }
1319
1320    size_t argumentPositionStart = m_graph.m_argumentPositions.size();
1321
1322    InlineStackEntry inlineStackEntry(
1323        this, codeBlock, codeBlock, m_graph.m_blocks.size() - 1,
1324        callLinkStatus.function(), (VirtualRegister)m_inlineStackTop->remapOperand(
1325            usesResult ? resultOperand : InvalidVirtualRegister),
1326        (VirtualRegister)inlineCallFrameStart, argumentCountIncludingThis, kind);
1327
1328    // This is where the actual inlining really happens.
1329    unsigned oldIndex = m_currentIndex;
1330    unsigned oldProfilingIndex = m_currentProfilingIndex;
1331    m_currentIndex = 0;
1332    m_currentProfilingIndex = 0;
1333
1334    addToGraph(InlineStart, OpInfo(argumentPositionStart));
1335    if (callLinkStatus.isClosureCall()) {
1336        addToGraph(SetCallee, callTargetNode);
1337        addToGraph(SetMyScope, addToGraph(GetScope, callTargetNode));
1338    }
1339
1340    parseCodeBlock();
1341
1342    m_currentIndex = oldIndex;
1343    m_currentProfilingIndex = oldProfilingIndex;
1344
1345    // If the inlined code created some new basic blocks, then we have linking to do.
1346    if (inlineStackEntry.m_callsiteBlockHead != m_graph.m_blocks.size() - 1) {
1347
1348        ASSERT(!inlineStackEntry.m_unlinkedBlocks.isEmpty());
1349        if (inlineStackEntry.m_callsiteBlockHeadNeedsLinking)
1350            linkBlock(m_graph.m_blocks[inlineStackEntry.m_callsiteBlockHead].get(), inlineStackEntry.m_blockLinkingTargets);
1351        else
1352            ASSERT(m_graph.m_blocks[inlineStackEntry.m_callsiteBlockHead]->isLinked);
1353
1354        // It's possible that the callsite block head is not owned by the caller.
1355        if (!inlineStackEntry.m_caller->m_unlinkedBlocks.isEmpty()) {
1356            // It's definitely owned by the caller, because the caller created new blocks.
1357            // Assert that this all adds up.
1358            ASSERT(inlineStackEntry.m_caller->m_unlinkedBlocks.last().m_blockIndex == inlineStackEntry.m_callsiteBlockHead);
1359            ASSERT(inlineStackEntry.m_caller->m_unlinkedBlocks.last().m_needsNormalLinking);
1360            inlineStackEntry.m_caller->m_unlinkedBlocks.last().m_needsNormalLinking = false;
1361        } else {
1362            // It's definitely not owned by the caller. Tell the caller that he does not
1363            // need to link his callsite block head, because we did it for him.
1364            ASSERT(inlineStackEntry.m_caller->m_callsiteBlockHeadNeedsLinking);
1365            ASSERT(inlineStackEntry.m_caller->m_callsiteBlockHead == inlineStackEntry.m_callsiteBlockHead);
1366            inlineStackEntry.m_caller->m_callsiteBlockHeadNeedsLinking = false;
1367        }
1368
1369        linkBlocks(inlineStackEntry.m_unlinkedBlocks, inlineStackEntry.m_blockLinkingTargets);
1370    } else
1371        ASSERT(inlineStackEntry.m_unlinkedBlocks.isEmpty());
1372
1373    BasicBlock* lastBlock = m_graph.m_blocks.last().get();
1374    // If there was a return, but no early returns, then we're done. We allow parsing of
1375    // the caller to continue in whatever basic block we're in right now.
1376    if (!inlineStackEntry.m_didEarlyReturn && inlineStackEntry.m_didReturn) {
1377        ASSERT(lastBlock->isEmpty() || !lastBlock->last()->isTerminal());
1378
1379        // If we created new blocks then the last block needs linking, but in the
1380        // caller. It doesn't need to be linked to, but it needs outgoing links.
1381        if (!inlineStackEntry.m_unlinkedBlocks.isEmpty()) {
1382#if DFG_ENABLE(DEBUG_VERBOSE)
1383            dataLogF("Reascribing bytecode index of block %p from bc#%u to bc#%u (inline return case).\n", lastBlock, lastBlock->bytecodeBegin, m_currentIndex);
1384#endif
1385            // For debugging purposes, set the bytecodeBegin. Note that this doesn't matter
1386            // for release builds because this block will never serve as a potential target
1387            // in the linker's binary search.
1388            lastBlock->bytecodeBegin = m_currentIndex;
1389            m_inlineStackTop->m_caller->m_unlinkedBlocks.append(UnlinkedBlock(m_graph.m_blocks.size() - 1));
1390        }
1391
1392        m_currentBlock = m_graph.m_blocks.last().get();
1393
1394#if DFG_ENABLE(DEBUG_VERBOSE)
1395        dataLogF("Done inlining executable %p, continuing code generation at epilogue.\n", executable);
1396#endif
1397        return true;
1398    }
1399
1400    // If we get to this point then all blocks must end in some sort of terminals.
1401    ASSERT(lastBlock->last()->isTerminal());
1402
1403    // Link the early returns to the basic block we're about to create.
1404    for (size_t i = 0; i < inlineStackEntry.m_unlinkedBlocks.size(); ++i) {
1405        if (!inlineStackEntry.m_unlinkedBlocks[i].m_needsEarlyReturnLinking)
1406            continue;
1407        BasicBlock* block = m_graph.m_blocks[inlineStackEntry.m_unlinkedBlocks[i].m_blockIndex].get();
1408        ASSERT(!block->isLinked);
1409        Node* node = block->last();
1410        ASSERT(node->op() == Jump);
1411        ASSERT(node->takenBlockIndex() == NoBlock);
1412        node->setTakenBlockIndex(m_graph.m_blocks.size());
1413        inlineStackEntry.m_unlinkedBlocks[i].m_needsEarlyReturnLinking = false;
1414#if !ASSERT_DISABLED
1415        block->isLinked = true;
1416#endif
1417    }
1418
1419    // Need to create a new basic block for the continuation at the caller.
1420    OwnPtr<BasicBlock> block = adoptPtr(new BasicBlock(nextOffset, m_numArguments, m_numLocals));
1421#if DFG_ENABLE(DEBUG_VERBOSE)
1422    dataLogF("Creating inline epilogue basic block %p, #%zu for %p bc#%u at inline depth %u.\n", block.get(), m_graph.m_blocks.size(), m_inlineStackTop->executable(), m_currentIndex, CodeOrigin::inlineDepthForCallFrame(inlineCallFrame()));
1423#endif
1424    m_currentBlock = block.get();
1425    ASSERT(m_inlineStackTop->m_caller->m_blockLinkingTargets.isEmpty() || m_graph.m_blocks[m_inlineStackTop->m_caller->m_blockLinkingTargets.last()]->bytecodeBegin < nextOffset);
1426    m_inlineStackTop->m_caller->m_unlinkedBlocks.append(UnlinkedBlock(m_graph.m_blocks.size()));
1427    m_inlineStackTop->m_caller->m_blockLinkingTargets.append(m_graph.m_blocks.size());
1428    m_graph.m_blocks.append(block.release());
1429    prepareToParseBlock();
1430
1431    // At this point we return and continue to generate code for the caller, but
1432    // in the new basic block.
1433#if DFG_ENABLE(DEBUG_VERBOSE)
1434    dataLogF("Done inlining executable %p, continuing code generation in new block.\n", executable);
1435#endif
1436    return true;
1437}
1438
1439void ByteCodeParser::setIntrinsicResult(bool usesResult, int resultOperand, Node* node)
1440{
1441    if (!usesResult)
1442        return;
1443    set(resultOperand, node);
1444}
1445
1446bool ByteCodeParser::handleMinMax(bool usesResult, int resultOperand, NodeType op, int registerOffset, int argumentCountIncludingThis)
1447{
1448    if (argumentCountIncludingThis == 1) { // Math.min()
1449        setIntrinsicResult(usesResult, resultOperand, constantNaN());
1450        return true;
1451    }
1452
1453    if (argumentCountIncludingThis == 2) { // Math.min(x)
1454        Node* result = get(registerOffset + argumentToOperand(1));
1455        addToGraph(Phantom, Edge(result, NumberUse));
1456        setIntrinsicResult(usesResult, resultOperand, result);
1457        return true;
1458    }
1459
1460    if (argumentCountIncludingThis == 3) { // Math.min(x, y)
1461        setIntrinsicResult(usesResult, resultOperand, addToGraph(op, get(registerOffset + argumentToOperand(1)), get(registerOffset + argumentToOperand(2))));
1462        return true;
1463    }
1464
1465    // Don't handle >=3 arguments for now.
1466    return false;
1467}
1468
1469// FIXME: We dead-code-eliminate unused Math intrinsics, but that's invalid because
1470// they need to perform the ToNumber conversion, which can have side-effects.
1471bool ByteCodeParser::handleIntrinsic(bool usesResult, int resultOperand, Intrinsic intrinsic, int registerOffset, int argumentCountIncludingThis, SpeculatedType prediction)
1472{
1473    switch (intrinsic) {
1474    case AbsIntrinsic: {
1475        if (argumentCountIncludingThis == 1) { // Math.abs()
1476            setIntrinsicResult(usesResult, resultOperand, constantNaN());
1477            return true;
1478        }
1479
1480        if (!MacroAssembler::supportsFloatingPointAbs())
1481            return false;
1482
1483        Node* node = addToGraph(ArithAbs, get(registerOffset + argumentToOperand(1)));
1484        if (m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, Overflow))
1485            node->mergeFlags(NodeMayOverflow);
1486        setIntrinsicResult(usesResult, resultOperand, node);
1487        return true;
1488    }
1489
1490    case MinIntrinsic:
1491        return handleMinMax(usesResult, resultOperand, ArithMin, registerOffset, argumentCountIncludingThis);
1492
1493    case MaxIntrinsic:
1494        return handleMinMax(usesResult, resultOperand, ArithMax, registerOffset, argumentCountIncludingThis);
1495
1496    case SqrtIntrinsic: {
1497        if (argumentCountIncludingThis == 1) { // Math.sqrt()
1498            setIntrinsicResult(usesResult, resultOperand, constantNaN());
1499            return true;
1500        }
1501
1502        if (!MacroAssembler::supportsFloatingPointSqrt())
1503            return false;
1504
1505        setIntrinsicResult(usesResult, resultOperand, addToGraph(ArithSqrt, get(registerOffset + argumentToOperand(1))));
1506        return true;
1507    }
1508
1509    case ArrayPushIntrinsic: {
1510        if (argumentCountIncludingThis != 2)
1511            return false;
1512
1513        ArrayMode arrayMode = getArrayMode(m_currentInstruction[5].u.arrayProfile);
1514        if (!arrayMode.isJSArray())
1515            return false;
1516        switch (arrayMode.type()) {
1517        case Array::Undecided:
1518        case Array::Int32:
1519        case Array::Double:
1520        case Array::Contiguous:
1521        case Array::ArrayStorage: {
1522            Node* arrayPush = addToGraph(ArrayPush, OpInfo(arrayMode.asWord()), OpInfo(prediction), get(registerOffset + argumentToOperand(0)), get(registerOffset + argumentToOperand(1)));
1523            if (usesResult)
1524                set(resultOperand, arrayPush);
1525
1526            return true;
1527        }
1528
1529        default:
1530            return false;
1531        }
1532    }
1533
1534    case ArrayPopIntrinsic: {
1535        if (argumentCountIncludingThis != 1)
1536            return false;
1537
1538        ArrayMode arrayMode = getArrayMode(m_currentInstruction[5].u.arrayProfile);
1539        if (!arrayMode.isJSArray())
1540            return false;
1541        switch (arrayMode.type()) {
1542        case Array::Int32:
1543        case Array::Double:
1544        case Array::Contiguous:
1545        case Array::ArrayStorage: {
1546            Node* arrayPop = addToGraph(ArrayPop, OpInfo(arrayMode.asWord()), OpInfo(prediction), get(registerOffset + argumentToOperand(0)));
1547            if (usesResult)
1548                set(resultOperand, arrayPop);
1549            return true;
1550        }
1551
1552        default:
1553            return false;
1554        }
1555    }
1556
1557    case CharCodeAtIntrinsic: {
1558        if (argumentCountIncludingThis != 2)
1559            return false;
1560
1561        int thisOperand = registerOffset + argumentToOperand(0);
1562        int indexOperand = registerOffset + argumentToOperand(1);
1563        Node* charCode = addToGraph(StringCharCodeAt, OpInfo(ArrayMode(Array::String).asWord()), get(thisOperand), getToInt32(indexOperand));
1564
1565        if (usesResult)
1566            set(resultOperand, charCode);
1567        return true;
1568    }
1569
1570    case CharAtIntrinsic: {
1571        if (argumentCountIncludingThis != 2)
1572            return false;
1573
1574        int thisOperand = registerOffset + argumentToOperand(0);
1575        int indexOperand = registerOffset + argumentToOperand(1);
1576        Node* charCode = addToGraph(StringCharAt, OpInfo(ArrayMode(Array::String).asWord()), get(thisOperand), getToInt32(indexOperand));
1577
1578        if (usesResult)
1579            set(resultOperand, charCode);
1580        return true;
1581    }
1582    case FromCharCodeIntrinsic: {
1583        if (argumentCountIncludingThis != 2)
1584            return false;
1585
1586        int indexOperand = registerOffset + argumentToOperand(1);
1587        Node* charCode = addToGraph(StringFromCharCode, getToInt32(indexOperand));
1588
1589        if (usesResult)
1590            set(resultOperand, charCode);
1591
1592        return true;
1593    }
1594
1595    case RegExpExecIntrinsic: {
1596        if (argumentCountIncludingThis != 2)
1597            return false;
1598
1599        Node* regExpExec = addToGraph(RegExpExec, OpInfo(0), OpInfo(prediction), get(registerOffset + argumentToOperand(0)), get(registerOffset + argumentToOperand(1)));
1600        if (usesResult)
1601            set(resultOperand, regExpExec);
1602
1603        return true;
1604    }
1605
1606    case RegExpTestIntrinsic: {
1607        if (argumentCountIncludingThis != 2)
1608            return false;
1609
1610        Node* regExpExec = addToGraph(RegExpTest, OpInfo(0), OpInfo(prediction), get(registerOffset + argumentToOperand(0)), get(registerOffset + argumentToOperand(1)));
1611        if (usesResult)
1612            set(resultOperand, regExpExec);
1613
1614        return true;
1615    }
1616
1617    case IMulIntrinsic: {
1618        if (argumentCountIncludingThis != 3)
1619            return false;
1620        int leftOperand = registerOffset + argumentToOperand(1);
1621        int rightOperand = registerOffset + argumentToOperand(2);
1622        Node* left = getToInt32(leftOperand);
1623        Node* right = getToInt32(rightOperand);
1624        setIntrinsicResult(usesResult, resultOperand, addToGraph(ArithIMul, left, right));
1625        return true;
1626    }
1627
1628    default:
1629        return false;
1630    }
1631}
1632
1633bool ByteCodeParser::handleConstantInternalFunction(
1634    bool usesResult, int resultOperand, InternalFunction* function, int registerOffset,
1635    int argumentCountIncludingThis, SpeculatedType prediction, CodeSpecializationKind kind)
1636{
1637    // If we ever find that we have a lot of internal functions that we specialize for,
1638    // then we should probably have some sort of hashtable dispatch, or maybe even
1639    // dispatch straight through the MethodTable of the InternalFunction. But for now,
1640    // it seems that this case is hit infrequently enough, and the number of functions
1641    // we know about is small enough, that having just a linear cascade of if statements
1642    // is good enough.
1643
1644    UNUSED_PARAM(prediction); // Remove this once we do more things.
1645
1646    if (function->classInfo() == &ArrayConstructor::s_info) {
1647        if (argumentCountIncludingThis == 2) {
1648            setIntrinsicResult(
1649                usesResult, resultOperand,
1650                addToGraph(NewArrayWithSize, OpInfo(ArrayWithUndecided), get(registerOffset + argumentToOperand(1))));
1651            return true;
1652        }
1653
1654        for (int i = 1; i < argumentCountIncludingThis; ++i)
1655            addVarArgChild(get(registerOffset + argumentToOperand(i)));
1656        setIntrinsicResult(
1657            usesResult, resultOperand,
1658            addToGraph(Node::VarArg, NewArray, OpInfo(ArrayWithUndecided), OpInfo(0)));
1659        return true;
1660    } else if (function->classInfo() == &StringConstructor::s_info) {
1661        Node* result;
1662
1663        if (argumentCountIncludingThis <= 1)
1664            result = cellConstant(m_vm->smallStrings.emptyString());
1665        else
1666            result = addToGraph(ToString, get(registerOffset + argumentToOperand(1)));
1667
1668        if (kind == CodeForConstruct)
1669            result = addToGraph(NewStringObject, OpInfo(function->globalObject()->stringObjectStructure()), result);
1670
1671        setIntrinsicResult(usesResult, resultOperand, result);
1672        return true;
1673    }
1674
1675    return false;
1676}
1677
1678Node* ByteCodeParser::handleGetByOffset(SpeculatedType prediction, Node* base, unsigned identifierNumber, PropertyOffset offset)
1679{
1680    Node* propertyStorage;
1681    if (isInlineOffset(offset))
1682        propertyStorage = base;
1683    else
1684        propertyStorage = addToGraph(GetButterfly, base);
1685    // FIXME: It would be far more efficient for load elimination (and safer from
1686    // an OSR standpoint) if GetByOffset also referenced the object we were loading
1687    // from, and if we could load eliminate a GetByOffset even if the butterfly
1688    // had changed. That would be a great success.
1689    Node* getByOffset = addToGraph(GetByOffset, OpInfo(m_graph.m_storageAccessData.size()), OpInfo(prediction), propertyStorage);
1690
1691    StorageAccessData storageAccessData;
1692    storageAccessData.offset = indexRelativeToBase(offset);
1693    storageAccessData.identifierNumber = identifierNumber;
1694    m_graph.m_storageAccessData.append(storageAccessData);
1695
1696    return getByOffset;
1697}
1698
1699void ByteCodeParser::handleGetByOffset(
1700    int destinationOperand, SpeculatedType prediction, Node* base, unsigned identifierNumber,
1701    PropertyOffset offset)
1702{
1703    set(destinationOperand, handleGetByOffset(prediction, base, identifierNumber, offset));
1704}
1705
1706void ByteCodeParser::handleGetById(
1707    int destinationOperand, SpeculatedType prediction, Node* base, unsigned identifierNumber,
1708    const GetByIdStatus& getByIdStatus)
1709{
1710    if (!getByIdStatus.isSimple()
1711        || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache)
1712        || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadWeakConstantCache)) {
1713        set(destinationOperand,
1714            addToGraph(
1715                getByIdStatus.makesCalls() ? GetByIdFlush : GetById,
1716                OpInfo(identifierNumber), OpInfo(prediction), base));
1717        return;
1718    }
1719
1720    ASSERT(getByIdStatus.structureSet().size());
1721
1722    // The implementation of GetByOffset does not know to terminate speculative
1723    // execution if it doesn't have a prediction, so we do it manually.
1724    if (prediction == SpecNone)
1725        addToGraph(ForceOSRExit);
1726    else if (m_graph.m_compilation)
1727        m_graph.m_compilation->noticeInlinedGetById();
1728
1729    Node* originalBaseForBaselineJIT = base;
1730
1731    addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(getByIdStatus.structureSet())), base);
1732
1733    if (!getByIdStatus.chain().isEmpty()) {
1734        Structure* currentStructure = getByIdStatus.structureSet().singletonStructure();
1735        JSObject* currentObject = 0;
1736        for (unsigned i = 0; i < getByIdStatus.chain().size(); ++i) {
1737            currentObject = asObject(currentStructure->prototypeForLookup(m_inlineStackTop->m_codeBlock));
1738            currentStructure = getByIdStatus.chain()[i];
1739            base = addStructureTransitionCheck(currentObject, currentStructure);
1740        }
1741    }
1742
1743    // Unless we want bugs like https://bugs.webkit.org/show_bug.cgi?id=88783, we need to
1744    // ensure that the base of the original get_by_id is kept alive until we're done with
1745    // all of the speculations. We only insert the Phantom if there had been a CheckStructure
1746    // on something other than the base following the CheckStructure on base, or if the
1747    // access was compiled to a WeakJSConstant specific value, in which case we might not
1748    // have any explicit use of the base at all.
1749    if (getByIdStatus.specificValue() || originalBaseForBaselineJIT != base)
1750        addToGraph(Phantom, originalBaseForBaselineJIT);
1751
1752    if (getByIdStatus.specificValue()) {
1753        ASSERT(getByIdStatus.specificValue().isCell());
1754
1755        set(destinationOperand, cellConstant(getByIdStatus.specificValue().asCell()));
1756        return;
1757    }
1758
1759    handleGetByOffset(
1760        destinationOperand, prediction, base, identifierNumber, getByIdStatus.offset());
1761}
1762
1763void ByteCodeParser::prepareToParseBlock()
1764{
1765    for (unsigned i = 0; i < m_constants.size(); ++i)
1766        m_constants[i] = ConstantRecord();
1767    m_cellConstantNodes.clear();
1768}
1769
1770Node* ByteCodeParser::getScope(bool skipTop, unsigned skipCount)
1771{
1772    Node* localBase;
1773    if (inlineCallFrame() && !inlineCallFrame()->isClosureCall()) {
1774        ASSERT(inlineCallFrame()->callee);
1775        localBase = cellConstant(inlineCallFrame()->callee->scope());
1776    } else
1777        localBase = addToGraph(GetMyScope);
1778    if (skipTop) {
1779        ASSERT(!inlineCallFrame());
1780        localBase = addToGraph(SkipTopScope, localBase);
1781    }
1782    for (unsigned n = skipCount; n--;)
1783        localBase = addToGraph(SkipScope, localBase);
1784    return localBase;
1785}
1786
1787bool ByteCodeParser::parseResolveOperations(SpeculatedType prediction, unsigned identifier, ResolveOperations* resolveOperations, PutToBaseOperation* putToBaseOperation, Node** base, Node** value)
1788{
1789    if (resolveOperations->isEmpty()) {
1790        addToGraph(ForceOSRExit);
1791        return false;
1792    }
1793    JSGlobalObject* globalObject = m_inlineStackTop->m_codeBlock->globalObject();
1794    int skipCount = 0;
1795    bool skipTop = false;
1796    bool skippedScopes = false;
1797    bool setBase = false;
1798    ResolveOperation* pc = resolveOperations->data();
1799    Node* localBase = 0;
1800    bool resolvingBase = true;
1801    while (resolvingBase) {
1802        switch (pc->m_operation) {
1803        case ResolveOperation::ReturnGlobalObjectAsBase:
1804            *base = cellConstant(globalObject);
1805            ASSERT(!value);
1806            return true;
1807
1808        case ResolveOperation::SetBaseToGlobal:
1809            *base = cellConstant(globalObject);
1810            setBase = true;
1811            resolvingBase = false;
1812            ++pc;
1813            break;
1814
1815        case ResolveOperation::SetBaseToUndefined:
1816            *base = constantUndefined();
1817            setBase = true;
1818            resolvingBase = false;
1819            ++pc;
1820            break;
1821
1822        case ResolveOperation::SetBaseToScope:
1823            localBase = getScope(skipTop, skipCount);
1824            *base = localBase;
1825            setBase = true;
1826
1827            resolvingBase = false;
1828
1829            // Reset the scope skipping as we've already loaded it
1830            skippedScopes = false;
1831            ++pc;
1832            break;
1833        case ResolveOperation::ReturnScopeAsBase:
1834            *base = getScope(skipTop, skipCount);
1835            ASSERT(!value);
1836            return true;
1837
1838        case ResolveOperation::SkipTopScopeNode:
1839            ASSERT(!inlineCallFrame());
1840            skipTop = true;
1841            skippedScopes = true;
1842            ++pc;
1843            break;
1844
1845        case ResolveOperation::SkipScopes:
1846            skipCount += pc->m_scopesToSkip;
1847            skippedScopes = true;
1848            ++pc;
1849            break;
1850
1851        case ResolveOperation::CheckForDynamicEntriesBeforeGlobalScope:
1852            return false;
1853
1854        case ResolveOperation::Fail:
1855            return false;
1856
1857        default:
1858            resolvingBase = false;
1859        }
1860    }
1861    if (skippedScopes)
1862        localBase = getScope(skipTop, skipCount);
1863
1864    if (base && !setBase)
1865        *base = localBase;
1866
1867    ASSERT(value);
1868    ResolveOperation* resolveValueOperation = pc;
1869    switch (resolveValueOperation->m_operation) {
1870    case ResolveOperation::GetAndReturnGlobalProperty: {
1871        ResolveGlobalStatus status = ResolveGlobalStatus::computeFor(m_inlineStackTop->m_profiledBlock, m_currentIndex, resolveValueOperation, m_codeBlock->identifier(identifier));
1872        if (status.isSimple()) {
1873            ASSERT(status.structure());
1874
1875            Node* globalObjectNode = addStructureTransitionCheck(globalObject, status.structure());
1876
1877            if (status.specificValue()) {
1878                ASSERT(status.specificValue().isCell());
1879                *value = cellConstant(status.specificValue().asCell());
1880            } else
1881                *value = handleGetByOffset(prediction, globalObjectNode, identifier, status.offset());
1882            return true;
1883        }
1884
1885        Node* resolve = addToGraph(ResolveGlobal, OpInfo(m_graph.m_resolveGlobalData.size()), OpInfo(prediction));
1886        m_graph.m_resolveGlobalData.append(ResolveGlobalData());
1887        ResolveGlobalData& data = m_graph.m_resolveGlobalData.last();
1888        data.identifierNumber = identifier;
1889        data.resolveOperations = resolveOperations;
1890        data.putToBaseOperation = putToBaseOperation;
1891        data.resolvePropertyIndex = resolveValueOperation - resolveOperations->data();
1892        *value = resolve;
1893        return true;
1894    }
1895    case ResolveOperation::GetAndReturnGlobalVar: {
1896        *value = addToGraph(
1897            GetGlobalVar,
1898            OpInfo(globalObject->assertRegisterIsInThisObject(pc->m_registerAddress)),
1899            OpInfo(prediction));
1900        return true;
1901    }
1902    case ResolveOperation::GetAndReturnGlobalVarWatchable: {
1903        SpeculatedType prediction = getPrediction();
1904
1905        JSGlobalObject* globalObject = m_inlineStackTop->m_codeBlock->globalObject();
1906
1907        Identifier ident = m_codeBlock->identifier(identifier);
1908        SymbolTableEntry entry = globalObject->symbolTable()->get(ident.impl());
1909        if (!entry.couldBeWatched()) {
1910            *value = addToGraph(GetGlobalVar, OpInfo(globalObject->assertRegisterIsInThisObject(pc->m_registerAddress)), OpInfo(prediction));
1911            return true;
1912        }
1913
1914        // The watchpoint is still intact! This means that we will get notified if the
1915        // current value in the global variable changes. So, we can inline that value.
1916        // Moreover, currently we can assume that this value is a JSFunction*, which
1917        // implies that it's a cell. This simplifies things, since in general we'd have
1918        // to use a JSConstant for non-cells and a WeakJSConstant for cells. So instead
1919        // of having both cases we just assert that the value is a cell.
1920
1921        // NB. If it wasn't for CSE, GlobalVarWatchpoint would have no need for the
1922        // register pointer. But CSE tracks effects on global variables by comparing
1923        // register pointers. Because CSE executes multiple times while the backend
1924        // executes once, we use the following performance trade-off:
1925        // - The node refers directly to the register pointer to make CSE super cheap.
1926        // - To perform backend code generation, the node only contains the identifier
1927        //   number, from which it is possible to get (via a few average-time O(1)
1928        //   lookups) to the WatchpointSet.
1929
1930        addToGraph(GlobalVarWatchpoint, OpInfo(globalObject->assertRegisterIsInThisObject(pc->m_registerAddress)), OpInfo(identifier));
1931
1932        JSValue specificValue = globalObject->registerAt(entry.getIndex()).get();
1933        ASSERT(specificValue.isCell());
1934        *value = cellConstant(specificValue.asCell());
1935        return true;
1936    }
1937    case ResolveOperation::GetAndReturnScopedVar: {
1938        Node* getScopeRegisters = addToGraph(GetScopeRegisters, localBase);
1939        *value = addToGraph(GetScopedVar, OpInfo(resolveValueOperation->m_offset), OpInfo(prediction), getScopeRegisters);
1940        return true;
1941    }
1942    default:
1943        CRASH();
1944        return false;
1945    }
1946
1947}
1948
1949bool ByteCodeParser::parseBlock(unsigned limit)
1950{
1951    bool shouldContinueParsing = true;
1952
1953    Interpreter* interpreter = m_vm->interpreter;
1954    Instruction* instructionsBegin = m_inlineStackTop->m_codeBlock->instructions().begin();
1955    unsigned blockBegin = m_currentIndex;
1956
1957    // If we are the first basic block, introduce markers for arguments. This allows
1958    // us to track if a use of an argument may use the actual argument passed, as
1959    // opposed to using a value we set explicitly.
1960    if (m_currentBlock == m_graph.m_blocks[0].get() && !inlineCallFrame()) {
1961        m_graph.m_arguments.resize(m_numArguments);
1962        for (unsigned argument = 0; argument < m_numArguments; ++argument) {
1963            VariableAccessData* variable = newVariableAccessData(
1964                argumentToOperand(argument), m_codeBlock->isCaptured(argumentToOperand(argument)));
1965            variable->mergeStructureCheckHoistingFailed(
1966                m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache));
1967
1968            Node* setArgument = addToGraph(SetArgument, OpInfo(variable));
1969            m_graph.m_arguments[argument] = setArgument;
1970            m_currentBlock->variablesAtTail.setArgumentFirstTime(argument, setArgument);
1971        }
1972    }
1973
1974    while (true) {
1975        m_currentProfilingIndex = m_currentIndex;
1976
1977        // Don't extend over jump destinations.
1978        if (m_currentIndex == limit) {
1979            // Ordinarily we want to plant a jump. But refuse to do this if the block is
1980            // empty. This is a special case for inlining, which might otherwise create
1981            // some empty blocks in some cases. When parseBlock() returns with an empty
1982            // block, it will get repurposed instead of creating a new one. Note that this
1983            // logic relies on every bytecode resulting in one or more nodes, which would
1984            // be true anyway except for op_loop_hint, which emits a Phantom to force this
1985            // to be true.
1986            if (!m_currentBlock->isEmpty())
1987                addToGraph(Jump, OpInfo(m_currentIndex));
1988            else {
1989#if DFG_ENABLE(DEBUG_VERBOSE)
1990                dataLogF("Refusing to plant jump at limit %u because block %p is empty.\n", limit, m_currentBlock);
1991#endif
1992            }
1993            return shouldContinueParsing;
1994        }
1995
1996        // Switch on the current bytecode opcode.
1997        Instruction* currentInstruction = instructionsBegin + m_currentIndex;
1998        m_currentInstruction = currentInstruction; // Some methods want to use this, and we'd rather not thread it through calls.
1999        OpcodeID opcodeID = interpreter->getOpcodeID(currentInstruction->u.opcode);
2000
2001        if (m_graph.m_compilation && opcodeID != op_call_put_result) {
2002            addToGraph(CountExecution, OpInfo(m_graph.m_compilation->executionCounterFor(
2003                Profiler::OriginStack(*m_vm->m_perBytecodeProfiler, m_codeBlock, currentCodeOrigin()))));
2004        }
2005
2006        switch (opcodeID) {
2007
2008        // === Function entry opcodes ===
2009
2010        case op_enter:
2011            // Initialize all locals to undefined.
2012            for (int i = 0; i < m_inlineStackTop->m_codeBlock->m_numVars; ++i)
2013                set(i, constantUndefined(), SetOnEntry);
2014            NEXT_OPCODE(op_enter);
2015
2016        case op_convert_this: {
2017            Node* op1 = getThis();
2018            if (op1->op() != ConvertThis) {
2019                ValueProfile* profile =
2020                    m_inlineStackTop->m_profiledBlock->valueProfileForBytecodeOffset(m_currentProfilingIndex);
2021                profile->computeUpdatedPrediction();
2022#if DFG_ENABLE(DEBUG_VERBOSE)
2023                dataLogF("[bc#%u]: profile %p: ", m_currentProfilingIndex, profile);
2024                profile->dump(WTF::dataFile());
2025                dataLogF("\n");
2026#endif
2027                if (profile->m_singletonValueIsTop
2028                    || !profile->m_singletonValue
2029                    || !profile->m_singletonValue.isCell()
2030                    || profile->m_singletonValue.asCell()->classInfo() != &Structure::s_info)
2031                    setThis(addToGraph(ConvertThis, op1));
2032                else {
2033                    addToGraph(
2034                        CheckStructure,
2035                        OpInfo(m_graph.addStructureSet(jsCast<Structure*>(profile->m_singletonValue.asCell()))),
2036                        op1);
2037                }
2038            }
2039            NEXT_OPCODE(op_convert_this);
2040        }
2041
2042        case op_create_this: {
2043            int calleeOperand = currentInstruction[2].u.operand;
2044            Node* callee = get(calleeOperand);
2045            bool alreadyEmitted = false;
2046            if (callee->op() == WeakJSConstant) {
2047                JSCell* cell = callee->weakConstant();
2048                ASSERT(cell->inherits(&JSFunction::s_info));
2049
2050                JSFunction* function = jsCast<JSFunction*>(cell);
2051                ObjectAllocationProfile* allocationProfile = function->tryGetAllocationProfile();
2052                if (allocationProfile) {
2053                    addToGraph(AllocationProfileWatchpoint, OpInfo(function));
2054                    // The callee is still live up to this point.
2055                    addToGraph(Phantom, callee);
2056                    set(currentInstruction[1].u.operand,
2057                        addToGraph(NewObject, OpInfo(allocationProfile->structure())));
2058                    alreadyEmitted = true;
2059                }
2060            }
2061            if (!alreadyEmitted)
2062                set(currentInstruction[1].u.operand,
2063                    addToGraph(CreateThis, OpInfo(currentInstruction[3].u.operand), callee));
2064            NEXT_OPCODE(op_create_this);
2065        }
2066
2067        case op_new_object: {
2068            set(currentInstruction[1].u.operand,
2069                addToGraph(NewObject,
2070                    OpInfo(currentInstruction[3].u.objectAllocationProfile->structure())));
2071            NEXT_OPCODE(op_new_object);
2072        }
2073
2074        case op_new_array: {
2075            int startOperand = currentInstruction[2].u.operand;
2076            int numOperands = currentInstruction[3].u.operand;
2077            ArrayAllocationProfile* profile = currentInstruction[4].u.arrayAllocationProfile;
2078            for (int operandIdx = startOperand; operandIdx < startOperand + numOperands; ++operandIdx)
2079                addVarArgChild(get(operandIdx));
2080            set(currentInstruction[1].u.operand, addToGraph(Node::VarArg, NewArray, OpInfo(profile->selectIndexingType()), OpInfo(0)));
2081            NEXT_OPCODE(op_new_array);
2082        }
2083
2084        case op_new_array_with_size: {
2085            int lengthOperand = currentInstruction[2].u.operand;
2086            ArrayAllocationProfile* profile = currentInstruction[3].u.arrayAllocationProfile;
2087            set(currentInstruction[1].u.operand, addToGraph(NewArrayWithSize, OpInfo(profile->selectIndexingType()), get(lengthOperand)));
2088            NEXT_OPCODE(op_new_array_with_size);
2089        }
2090
2091        case op_new_array_buffer: {
2092            int startConstant = currentInstruction[2].u.operand;
2093            int numConstants = currentInstruction[3].u.operand;
2094            ArrayAllocationProfile* profile = currentInstruction[4].u.arrayAllocationProfile;
2095            NewArrayBufferData data;
2096            data.startConstant = m_inlineStackTop->m_constantBufferRemap[startConstant];
2097            data.numConstants = numConstants;
2098            data.indexingType = profile->selectIndexingType();
2099
2100            // If this statement has never executed, we'll have the wrong indexing type in the profile.
2101            for (int i = 0; i < numConstants; ++i) {
2102                data.indexingType =
2103                    leastUpperBoundOfIndexingTypeAndValue(
2104                        data.indexingType,
2105                        m_codeBlock->constantBuffer(data.startConstant)[i]);
2106            }
2107
2108            m_graph.m_newArrayBufferData.append(data);
2109            set(currentInstruction[1].u.operand, addToGraph(NewArrayBuffer, OpInfo(&m_graph.m_newArrayBufferData.last())));
2110            NEXT_OPCODE(op_new_array_buffer);
2111        }
2112
2113        case op_new_regexp: {
2114            set(currentInstruction[1].u.operand, addToGraph(NewRegexp, OpInfo(currentInstruction[2].u.operand)));
2115            NEXT_OPCODE(op_new_regexp);
2116        }
2117
2118        case op_get_callee: {
2119            ValueProfile* profile = currentInstruction[2].u.profile;
2120            profile->computeUpdatedPrediction();
2121            if (profile->m_singletonValueIsTop
2122                || !profile->m_singletonValue
2123                || !profile->m_singletonValue.isCell())
2124                set(currentInstruction[1].u.operand, get(JSStack::Callee));
2125            else {
2126                ASSERT(profile->m_singletonValue.asCell()->inherits(&JSFunction::s_info));
2127                Node* actualCallee = get(JSStack::Callee);
2128                addToGraph(CheckFunction, OpInfo(profile->m_singletonValue.asCell()), actualCallee);
2129                set(currentInstruction[1].u.operand, addToGraph(WeakJSConstant, OpInfo(profile->m_singletonValue.asCell())));
2130            }
2131            NEXT_OPCODE(op_get_callee);
2132        }
2133
2134        // === Bitwise operations ===
2135
2136        case op_bitand: {
2137            Node* op1 = getToInt32(currentInstruction[2].u.operand);
2138            Node* op2 = getToInt32(currentInstruction[3].u.operand);
2139            set(currentInstruction[1].u.operand, addToGraph(BitAnd, op1, op2));
2140            NEXT_OPCODE(op_bitand);
2141        }
2142
2143        case op_bitor: {
2144            Node* op1 = getToInt32(currentInstruction[2].u.operand);
2145            Node* op2 = getToInt32(currentInstruction[3].u.operand);
2146            set(currentInstruction[1].u.operand, addToGraph(BitOr, op1, op2));
2147            NEXT_OPCODE(op_bitor);
2148        }
2149
2150        case op_bitxor: {
2151            Node* op1 = getToInt32(currentInstruction[2].u.operand);
2152            Node* op2 = getToInt32(currentInstruction[3].u.operand);
2153            set(currentInstruction[1].u.operand, addToGraph(BitXor, op1, op2));
2154            NEXT_OPCODE(op_bitxor);
2155        }
2156
2157        case op_rshift: {
2158            Node* op1 = getToInt32(currentInstruction[2].u.operand);
2159            Node* op2 = getToInt32(currentInstruction[3].u.operand);
2160            Node* result;
2161            // Optimize out shifts by zero.
2162            if (isInt32Constant(op2) && !(valueOfInt32Constant(op2) & 0x1f))
2163                result = op1;
2164            else
2165                result = addToGraph(BitRShift, op1, op2);
2166            set(currentInstruction[1].u.operand, result);
2167            NEXT_OPCODE(op_rshift);
2168        }
2169
2170        case op_lshift: {
2171            Node* op1 = getToInt32(currentInstruction[2].u.operand);
2172            Node* op2 = getToInt32(currentInstruction[3].u.operand);
2173            Node* result;
2174            // Optimize out shifts by zero.
2175            if (isInt32Constant(op2) && !(valueOfInt32Constant(op2) & 0x1f))
2176                result = op1;
2177            else
2178                result = addToGraph(BitLShift, op1, op2);
2179            set(currentInstruction[1].u.operand, result);
2180            NEXT_OPCODE(op_lshift);
2181        }
2182
2183        case op_urshift: {
2184            Node* op1 = getToInt32(currentInstruction[2].u.operand);
2185            Node* op2 = getToInt32(currentInstruction[3].u.operand);
2186            Node* result;
2187            // The result of a zero-extending right shift is treated as an unsigned value.
2188            // This means that if the top bit is set, the result is not in the int32 range,
2189            // and as such must be stored as a double. If the shift amount is a constant,
2190            // we may be able to optimize.
2191            if (isInt32Constant(op2)) {
2192                // If we know we are shifting by a non-zero amount, then since the operation
2193                // zero fills we know the top bit of the result must be zero, and as such the
2194                // result must be within the int32 range. Conversely, if this is a shift by
2195                // zero, then the result may be changed by the conversion to unsigned, but it
2196                // is not necessary to perform the shift!
2197                if (valueOfInt32Constant(op2) & 0x1f)
2198                    result = addToGraph(BitURShift, op1, op2);
2199                else
2200                    result = makeSafe(addToGraph(UInt32ToNumber, op1));
2201            }  else {
2202                // Cannot optimize at this stage; shift & potentially rebox as a double.
2203                result = addToGraph(BitURShift, op1, op2);
2204                result = makeSafe(addToGraph(UInt32ToNumber, result));
2205            }
2206            set(currentInstruction[1].u.operand, result);
2207            NEXT_OPCODE(op_urshift);
2208        }
2209
2210        // === Increment/Decrement opcodes ===
2211
2212        case op_inc: {
2213            unsigned srcDst = currentInstruction[1].u.operand;
2214            Node* op = get(srcDst);
2215            set(srcDst, makeSafe(addToGraph(ArithAdd, op, one())));
2216            NEXT_OPCODE(op_inc);
2217        }
2218
2219        case op_dec: {
2220            unsigned srcDst = currentInstruction[1].u.operand;
2221            Node* op = get(srcDst);
2222            set(srcDst, makeSafe(addToGraph(ArithSub, op, one())));
2223            NEXT_OPCODE(op_dec);
2224        }
2225
2226        // === Arithmetic operations ===
2227
2228        case op_add: {
2229            Node* op1 = get(currentInstruction[2].u.operand);
2230            Node* op2 = get(currentInstruction[3].u.operand);
2231            if (op1->hasNumberResult() && op2->hasNumberResult())
2232                set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithAdd, op1, op2)));
2233            else
2234                set(currentInstruction[1].u.operand, makeSafe(addToGraph(ValueAdd, op1, op2)));
2235            NEXT_OPCODE(op_add);
2236        }
2237
2238        case op_sub: {
2239            Node* op1 = get(currentInstruction[2].u.operand);
2240            Node* op2 = get(currentInstruction[3].u.operand);
2241            set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithSub, op1, op2)));
2242            NEXT_OPCODE(op_sub);
2243        }
2244
2245        case op_negate: {
2246            Node* op1 = get(currentInstruction[2].u.operand);
2247            set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithNegate, op1)));
2248            NEXT_OPCODE(op_negate);
2249        }
2250
2251        case op_mul: {
2252            // Multiply requires that the inputs are not truncated, unfortunately.
2253            Node* op1 = get(currentInstruction[2].u.operand);
2254            Node* op2 = get(currentInstruction[3].u.operand);
2255            set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithMul, op1, op2)));
2256            NEXT_OPCODE(op_mul);
2257        }
2258
2259        case op_mod: {
2260            Node* op1 = get(currentInstruction[2].u.operand);
2261            Node* op2 = get(currentInstruction[3].u.operand);
2262            set(currentInstruction[1].u.operand, makeSafe(addToGraph(ArithMod, op1, op2)));
2263            NEXT_OPCODE(op_mod);
2264        }
2265
2266        case op_div: {
2267            Node* op1 = get(currentInstruction[2].u.operand);
2268            Node* op2 = get(currentInstruction[3].u.operand);
2269            set(currentInstruction[1].u.operand, makeDivSafe(addToGraph(ArithDiv, op1, op2)));
2270            NEXT_OPCODE(op_div);
2271        }
2272
2273        // === Misc operations ===
2274
2275#if ENABLE(DEBUG_WITH_BREAKPOINT)
2276        case op_debug:
2277            addToGraph(Breakpoint);
2278            NEXT_OPCODE(op_debug);
2279#endif
2280        case op_mov: {
2281            Node* op = get(currentInstruction[2].u.operand);
2282            set(currentInstruction[1].u.operand, op);
2283            NEXT_OPCODE(op_mov);
2284        }
2285
2286        case op_check_has_instance:
2287            addToGraph(CheckHasInstance, get(currentInstruction[3].u.operand));
2288            NEXT_OPCODE(op_check_has_instance);
2289
2290        case op_instanceof: {
2291            Node* value = get(currentInstruction[2].u.operand);
2292            Node* prototype = get(currentInstruction[3].u.operand);
2293            set(currentInstruction[1].u.operand, addToGraph(InstanceOf, value, prototype));
2294            NEXT_OPCODE(op_instanceof);
2295        }
2296
2297        case op_is_undefined: {
2298            Node* value = get(currentInstruction[2].u.operand);
2299            set(currentInstruction[1].u.operand, addToGraph(IsUndefined, value));
2300            NEXT_OPCODE(op_is_undefined);
2301        }
2302
2303        case op_is_boolean: {
2304            Node* value = get(currentInstruction[2].u.operand);
2305            set(currentInstruction[1].u.operand, addToGraph(IsBoolean, value));
2306            NEXT_OPCODE(op_is_boolean);
2307        }
2308
2309        case op_is_number: {
2310            Node* value = get(currentInstruction[2].u.operand);
2311            set(currentInstruction[1].u.operand, addToGraph(IsNumber, value));
2312            NEXT_OPCODE(op_is_number);
2313        }
2314
2315        case op_is_string: {
2316            Node* value = get(currentInstruction[2].u.operand);
2317            set(currentInstruction[1].u.operand, addToGraph(IsString, value));
2318            NEXT_OPCODE(op_is_string);
2319        }
2320
2321        case op_is_object: {
2322            Node* value = get(currentInstruction[2].u.operand);
2323            set(currentInstruction[1].u.operand, addToGraph(IsObject, value));
2324            NEXT_OPCODE(op_is_object);
2325        }
2326
2327        case op_is_function: {
2328            Node* value = get(currentInstruction[2].u.operand);
2329            set(currentInstruction[1].u.operand, addToGraph(IsFunction, value));
2330            NEXT_OPCODE(op_is_function);
2331        }
2332
2333        case op_not: {
2334            Node* value = get(currentInstruction[2].u.operand);
2335            set(currentInstruction[1].u.operand, addToGraph(LogicalNot, value));
2336            NEXT_OPCODE(op_not);
2337        }
2338
2339        case op_to_primitive: {
2340            Node* value = get(currentInstruction[2].u.operand);
2341            set(currentInstruction[1].u.operand, addToGraph(ToPrimitive, value));
2342            NEXT_OPCODE(op_to_primitive);
2343        }
2344
2345        case op_strcat: {
2346            int startOperand = currentInstruction[2].u.operand;
2347            int numOperands = currentInstruction[3].u.operand;
2348#if CPU(X86)
2349            // X86 doesn't have enough registers to compile MakeRope with three arguments.
2350            // Rather than try to be clever, we just make MakeRope dumber on this processor.
2351            const unsigned maxRopeArguments = 2;
2352#else
2353            const unsigned maxRopeArguments = 3;
2354#endif
2355            OwnArrayPtr<Node*> toStringNodes = adoptArrayPtr(new Node*[numOperands]);
2356            for (int i = 0; i < numOperands; i++)
2357                toStringNodes[i] = addToGraph(ToString, get(startOperand + i));
2358
2359            for (int i = 0; i < numOperands; i++)
2360                addToGraph(Phantom, toStringNodes[i]);
2361
2362            Node* operands[AdjacencyList::Size];
2363            unsigned indexInOperands = 0;
2364            for (unsigned i = 0; i < AdjacencyList::Size; ++i)
2365                operands[i] = 0;
2366            for (int operandIdx = 0; operandIdx < numOperands; ++operandIdx) {
2367                if (indexInOperands == maxRopeArguments) {
2368                    operands[0] = addToGraph(MakeRope, operands[0], operands[1], operands[2]);
2369                    for (unsigned i = 1; i < AdjacencyList::Size; ++i)
2370                        operands[i] = 0;
2371                    indexInOperands = 1;
2372                }
2373
2374                ASSERT(indexInOperands < AdjacencyList::Size);
2375                ASSERT(indexInOperands < maxRopeArguments);
2376                operands[indexInOperands++] = toStringNodes[operandIdx];
2377            }
2378            set(currentInstruction[1].u.operand,
2379                addToGraph(MakeRope, operands[0], operands[1], operands[2]));
2380            NEXT_OPCODE(op_strcat);
2381        }
2382
2383        case op_less: {
2384            Node* op1 = get(currentInstruction[2].u.operand);
2385            Node* op2 = get(currentInstruction[3].u.operand);
2386            if (canFold(op1) && canFold(op2)) {
2387                JSValue a = valueOfJSConstant(op1);
2388                JSValue b = valueOfJSConstant(op2);
2389                if (a.isNumber() && b.isNumber()) {
2390                    set(currentInstruction[1].u.operand,
2391                        getJSConstantForValue(jsBoolean(a.asNumber() < b.asNumber())));
2392                    NEXT_OPCODE(op_less);
2393                }
2394            }
2395            set(currentInstruction[1].u.operand, addToGraph(CompareLess, op1, op2));
2396            NEXT_OPCODE(op_less);
2397        }
2398
2399        case op_lesseq: {
2400            Node* op1 = get(currentInstruction[2].u.operand);
2401            Node* op2 = get(currentInstruction[3].u.operand);
2402            if (canFold(op1) && canFold(op2)) {
2403                JSValue a = valueOfJSConstant(op1);
2404                JSValue b = valueOfJSConstant(op2);
2405                if (a.isNumber() && b.isNumber()) {
2406                    set(currentInstruction[1].u.operand,
2407                        getJSConstantForValue(jsBoolean(a.asNumber() <= b.asNumber())));
2408                    NEXT_OPCODE(op_lesseq);
2409                }
2410            }
2411            set(currentInstruction[1].u.operand, addToGraph(CompareLessEq, op1, op2));
2412            NEXT_OPCODE(op_lesseq);
2413        }
2414
2415        case op_greater: {
2416            Node* op1 = get(currentInstruction[2].u.operand);
2417            Node* op2 = get(currentInstruction[3].u.operand);
2418            if (canFold(op1) && canFold(op2)) {
2419                JSValue a = valueOfJSConstant(op1);
2420                JSValue b = valueOfJSConstant(op2);
2421                if (a.isNumber() && b.isNumber()) {
2422                    set(currentInstruction[1].u.operand,
2423                        getJSConstantForValue(jsBoolean(a.asNumber() > b.asNumber())));
2424                    NEXT_OPCODE(op_greater);
2425                }
2426            }
2427            set(currentInstruction[1].u.operand, addToGraph(CompareGreater, op1, op2));
2428            NEXT_OPCODE(op_greater);
2429        }
2430
2431        case op_greatereq: {
2432            Node* op1 = get(currentInstruction[2].u.operand);
2433            Node* op2 = get(currentInstruction[3].u.operand);
2434            if (canFold(op1) && canFold(op2)) {
2435                JSValue a = valueOfJSConstant(op1);
2436                JSValue b = valueOfJSConstant(op2);
2437                if (a.isNumber() && b.isNumber()) {
2438                    set(currentInstruction[1].u.operand,
2439                        getJSConstantForValue(jsBoolean(a.asNumber() >= b.asNumber())));
2440                    NEXT_OPCODE(op_greatereq);
2441                }
2442            }
2443            set(currentInstruction[1].u.operand, addToGraph(CompareGreaterEq, op1, op2));
2444            NEXT_OPCODE(op_greatereq);
2445        }
2446
2447        case op_eq: {
2448            Node* op1 = get(currentInstruction[2].u.operand);
2449            Node* op2 = get(currentInstruction[3].u.operand);
2450            if (canFold(op1) && canFold(op2)) {
2451                JSValue a = valueOfJSConstant(op1);
2452                JSValue b = valueOfJSConstant(op2);
2453                set(currentInstruction[1].u.operand,
2454                    getJSConstantForValue(jsBoolean(JSValue::equal(m_codeBlock->globalObject()->globalExec(), a, b))));
2455                NEXT_OPCODE(op_eq);
2456            }
2457            set(currentInstruction[1].u.operand, addToGraph(CompareEq, op1, op2));
2458            NEXT_OPCODE(op_eq);
2459        }
2460
2461        case op_eq_null: {
2462            Node* value = get(currentInstruction[2].u.operand);
2463            set(currentInstruction[1].u.operand, addToGraph(CompareEqConstant, value, constantNull()));
2464            NEXT_OPCODE(op_eq_null);
2465        }
2466
2467        case op_stricteq: {
2468            Node* op1 = get(currentInstruction[2].u.operand);
2469            Node* op2 = get(currentInstruction[3].u.operand);
2470            if (canFold(op1) && canFold(op2)) {
2471                JSValue a = valueOfJSConstant(op1);
2472                JSValue b = valueOfJSConstant(op2);
2473                set(currentInstruction[1].u.operand,
2474                    getJSConstantForValue(jsBoolean(JSValue::strictEqual(m_codeBlock->globalObject()->globalExec(), a, b))));
2475                NEXT_OPCODE(op_stricteq);
2476            }
2477            if (isConstantForCompareStrictEq(op1))
2478                set(currentInstruction[1].u.operand, addToGraph(CompareStrictEqConstant, op2, op1));
2479            else if (isConstantForCompareStrictEq(op2))
2480                set(currentInstruction[1].u.operand, addToGraph(CompareStrictEqConstant, op1, op2));
2481            else
2482                set(currentInstruction[1].u.operand, addToGraph(CompareStrictEq, op1, op2));
2483            NEXT_OPCODE(op_stricteq);
2484        }
2485
2486        case op_neq: {
2487            Node* op1 = get(currentInstruction[2].u.operand);
2488            Node* op2 = get(currentInstruction[3].u.operand);
2489            if (canFold(op1) && canFold(op2)) {
2490                JSValue a = valueOfJSConstant(op1);
2491                JSValue b = valueOfJSConstant(op2);
2492                set(currentInstruction[1].u.operand,
2493                    getJSConstantForValue(jsBoolean(!JSValue::equal(m_codeBlock->globalObject()->globalExec(), a, b))));
2494                NEXT_OPCODE(op_neq);
2495            }
2496            set(currentInstruction[1].u.operand, addToGraph(LogicalNot, addToGraph(CompareEq, op1, op2)));
2497            NEXT_OPCODE(op_neq);
2498        }
2499
2500        case op_neq_null: {
2501            Node* value = get(currentInstruction[2].u.operand);
2502            set(currentInstruction[1].u.operand, addToGraph(LogicalNot, addToGraph(CompareEqConstant, value, constantNull())));
2503            NEXT_OPCODE(op_neq_null);
2504        }
2505
2506        case op_nstricteq: {
2507            Node* op1 = get(currentInstruction[2].u.operand);
2508            Node* op2 = get(currentInstruction[3].u.operand);
2509            if (canFold(op1) && canFold(op2)) {
2510                JSValue a = valueOfJSConstant(op1);
2511                JSValue b = valueOfJSConstant(op2);
2512                set(currentInstruction[1].u.operand,
2513                    getJSConstantForValue(jsBoolean(!JSValue::strictEqual(m_codeBlock->globalObject()->globalExec(), a, b))));
2514                NEXT_OPCODE(op_nstricteq);
2515            }
2516            Node* invertedResult;
2517            if (isConstantForCompareStrictEq(op1))
2518                invertedResult = addToGraph(CompareStrictEqConstant, op2, op1);
2519            else if (isConstantForCompareStrictEq(op2))
2520                invertedResult = addToGraph(CompareStrictEqConstant, op1, op2);
2521            else
2522                invertedResult = addToGraph(CompareStrictEq, op1, op2);
2523            set(currentInstruction[1].u.operand, addToGraph(LogicalNot, invertedResult));
2524            NEXT_OPCODE(op_nstricteq);
2525        }
2526
2527        // === Property access operations ===
2528
2529        case op_get_by_val: {
2530            SpeculatedType prediction = getPrediction();
2531
2532            Node* base = get(currentInstruction[2].u.operand);
2533            ArrayMode arrayMode = getArrayModeAndEmitChecks(currentInstruction[4].u.arrayProfile, Array::Read, base);
2534            Node* property = get(currentInstruction[3].u.operand);
2535            Node* getByVal = addToGraph(GetByVal, OpInfo(arrayMode.asWord()), OpInfo(prediction), base, property);
2536            set(currentInstruction[1].u.operand, getByVal);
2537
2538            NEXT_OPCODE(op_get_by_val);
2539        }
2540
2541        case op_put_by_val: {
2542            Node* base = get(currentInstruction[1].u.operand);
2543
2544            ArrayMode arrayMode = getArrayModeAndEmitChecks(currentInstruction[4].u.arrayProfile, Array::Write, base);
2545
2546            Node* property = get(currentInstruction[2].u.operand);
2547            Node* value = get(currentInstruction[3].u.operand);
2548
2549            addVarArgChild(base);
2550            addVarArgChild(property);
2551            addVarArgChild(value);
2552            addVarArgChild(0); // Leave room for property storage.
2553            addToGraph(Node::VarArg, PutByVal, OpInfo(arrayMode.asWord()), OpInfo(0));
2554
2555            NEXT_OPCODE(op_put_by_val);
2556        }
2557
2558        case op_get_by_id:
2559        case op_get_by_id_out_of_line:
2560        case op_get_array_length: {
2561            SpeculatedType prediction = getPrediction();
2562
2563            Node* base = get(currentInstruction[2].u.operand);
2564            unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[3].u.operand];
2565
2566            Identifier identifier = m_codeBlock->identifier(identifierNumber);
2567            GetByIdStatus getByIdStatus = GetByIdStatus::computeFor(
2568                m_inlineStackTop->m_profiledBlock, m_currentIndex, identifier);
2569
2570            handleGetById(
2571                currentInstruction[1].u.operand, prediction, base, identifierNumber, getByIdStatus);
2572
2573            NEXT_OPCODE(op_get_by_id);
2574        }
2575        case op_put_by_id:
2576        case op_put_by_id_out_of_line:
2577        case op_put_by_id_transition_direct:
2578        case op_put_by_id_transition_normal:
2579        case op_put_by_id_transition_direct_out_of_line:
2580        case op_put_by_id_transition_normal_out_of_line: {
2581            Node* value = get(currentInstruction[3].u.operand);
2582            Node* base = get(currentInstruction[1].u.operand);
2583            unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[2].u.operand];
2584            bool direct = currentInstruction[8].u.operand;
2585
2586            PutByIdStatus putByIdStatus = PutByIdStatus::computeFor(
2587                m_inlineStackTop->m_profiledBlock,
2588                m_currentIndex,
2589                m_codeBlock->identifier(identifierNumber));
2590            bool canCountAsInlined = true;
2591            if (!putByIdStatus.isSet()) {
2592                addToGraph(ForceOSRExit);
2593                canCountAsInlined = false;
2594            }
2595
2596            bool hasExitSite =
2597                m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadCache)
2598                || m_inlineStackTop->m_exitProfile.hasExitSite(m_currentIndex, BadWeakConstantCache);
2599
2600            if (!hasExitSite && putByIdStatus.isSimpleReplace()) {
2601                addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(putByIdStatus.oldStructure())), base);
2602                Node* propertyStorage;
2603                if (isInlineOffset(putByIdStatus.offset()))
2604                    propertyStorage = base;
2605                else
2606                    propertyStorage = addToGraph(GetButterfly, base);
2607                addToGraph(PutByOffset, OpInfo(m_graph.m_storageAccessData.size()), propertyStorage, base, value);
2608
2609                StorageAccessData storageAccessData;
2610                storageAccessData.offset = indexRelativeToBase(putByIdStatus.offset());
2611                storageAccessData.identifierNumber = identifierNumber;
2612                m_graph.m_storageAccessData.append(storageAccessData);
2613            } else if (!hasExitSite
2614                       && putByIdStatus.isSimpleTransition()
2615                       && structureChainIsStillValid(
2616                           direct,
2617                           putByIdStatus.oldStructure(),
2618                           putByIdStatus.structureChain())) {
2619
2620                addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(putByIdStatus.oldStructure())), base);
2621                if (!direct) {
2622                    if (!putByIdStatus.oldStructure()->storedPrototype().isNull()) {
2623                        addStructureTransitionCheck(
2624                            putByIdStatus.oldStructure()->storedPrototype().asCell());
2625                    }
2626
2627                    for (WriteBarrier<Structure>* it = putByIdStatus.structureChain()->head(); *it; ++it) {
2628                        JSValue prototype = (*it)->storedPrototype();
2629                        if (prototype.isNull())
2630                            continue;
2631                        ASSERT(prototype.isCell());
2632                        addStructureTransitionCheck(prototype.asCell());
2633                    }
2634                }
2635                ASSERT(putByIdStatus.oldStructure()->transitionWatchpointSetHasBeenInvalidated());
2636
2637                Node* propertyStorage;
2638                StructureTransitionData* transitionData =
2639                    m_graph.addStructureTransitionData(
2640                        StructureTransitionData(
2641                            putByIdStatus.oldStructure(),
2642                            putByIdStatus.newStructure()));
2643
2644                if (putByIdStatus.oldStructure()->outOfLineCapacity()
2645                    != putByIdStatus.newStructure()->outOfLineCapacity()) {
2646
2647                    // If we're growing the property storage then it must be because we're
2648                    // storing into the out-of-line storage.
2649                    ASSERT(!isInlineOffset(putByIdStatus.offset()));
2650
2651                    if (!putByIdStatus.oldStructure()->outOfLineCapacity()) {
2652                        propertyStorage = addToGraph(
2653                            AllocatePropertyStorage, OpInfo(transitionData), base);
2654                    } else {
2655                        propertyStorage = addToGraph(
2656                            ReallocatePropertyStorage, OpInfo(transitionData),
2657                            base, addToGraph(GetButterfly, base));
2658                    }
2659                } else {
2660                    if (isInlineOffset(putByIdStatus.offset()))
2661                        propertyStorage = base;
2662                    else
2663                        propertyStorage = addToGraph(GetButterfly, base);
2664                }
2665
2666                addToGraph(PutStructure, OpInfo(transitionData), base);
2667
2668                addToGraph(
2669                    PutByOffset,
2670                    OpInfo(m_graph.m_storageAccessData.size()),
2671                    propertyStorage,
2672                    base,
2673                    value);
2674
2675                StorageAccessData storageAccessData;
2676                storageAccessData.offset = indexRelativeToBase(putByIdStatus.offset());
2677                storageAccessData.identifierNumber = identifierNumber;
2678                m_graph.m_storageAccessData.append(storageAccessData);
2679            } else {
2680                if (direct)
2681                    addToGraph(PutByIdDirect, OpInfo(identifierNumber), base, value);
2682                else
2683                    addToGraph(PutById, OpInfo(identifierNumber), base, value);
2684                canCountAsInlined = false;
2685            }
2686
2687            if (canCountAsInlined && m_graph.m_compilation)
2688                m_graph.m_compilation->noticeInlinedPutById();
2689
2690            NEXT_OPCODE(op_put_by_id);
2691        }
2692
2693        case op_init_global_const_nop: {
2694            NEXT_OPCODE(op_init_global_const_nop);
2695        }
2696
2697        case op_init_global_const: {
2698            Node* value = get(currentInstruction[2].u.operand);
2699            addToGraph(
2700                PutGlobalVar,
2701                OpInfo(m_inlineStackTop->m_codeBlock->globalObject()->assertRegisterIsInThisObject(currentInstruction[1].u.registerPointer)),
2702                value);
2703            NEXT_OPCODE(op_init_global_const);
2704        }
2705
2706        case op_init_global_const_check: {
2707            Node* value = get(currentInstruction[2].u.operand);
2708            CodeBlock* codeBlock = m_inlineStackTop->m_codeBlock;
2709            JSGlobalObject* globalObject = codeBlock->globalObject();
2710            unsigned identifierNumber = m_inlineStackTop->m_identifierRemap[currentInstruction[4].u.operand];
2711            Identifier identifier = m_codeBlock->identifier(identifierNumber);
2712            SymbolTableEntry entry = globalObject->symbolTable()->get(identifier.impl());
2713            if (!entry.couldBeWatched()) {
2714                addToGraph(
2715                    PutGlobalVar,
2716                    OpInfo(globalObject->assertRegisterIsInThisObject(currentInstruction[1].u.registerPointer)),
2717                    value);
2718                NEXT_OPCODE(op_init_global_const_check);
2719            }
2720            addToGraph(
2721                PutGlobalVarCheck,
2722                OpInfo(codeBlock->globalObject()->assertRegisterIsInThisObject(currentInstruction[1].u.registerPointer)),
2723                OpInfo(identifierNumber),
2724                value);
2725            NEXT_OPCODE(op_init_global_const_check);
2726        }
2727
2728
2729        // === Block terminators. ===
2730
2731        case op_jmp: {
2732            unsigned relativeOffset = currentInstruction[1].u.operand;
2733            addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2734            LAST_OPCODE(op_jmp);
2735        }
2736
2737        case op_jtrue: {
2738            unsigned relativeOffset = currentInstruction[2].u.operand;
2739            Node* condition = get(currentInstruction[1].u.operand);
2740            if (canFold(condition)) {
2741                TriState state = valueOfJSConstant(condition).pureToBoolean();
2742                if (state == TrueTriState) {
2743                    addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2744                    LAST_OPCODE(op_jtrue);
2745                } else if (state == FalseTriState) {
2746                    // Emit a placeholder for this bytecode operation but otherwise
2747                    // just fall through.
2748                    addToGraph(Phantom);
2749                    NEXT_OPCODE(op_jtrue);
2750                }
2751            }
2752            addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jtrue)), condition);
2753            LAST_OPCODE(op_jtrue);
2754        }
2755
2756        case op_jfalse: {
2757            unsigned relativeOffset = currentInstruction[2].u.operand;
2758            Node* condition = get(currentInstruction[1].u.operand);
2759            if (canFold(condition)) {
2760                TriState state = valueOfJSConstant(condition).pureToBoolean();
2761                if (state == FalseTriState) {
2762                    addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2763                    LAST_OPCODE(op_jfalse);
2764                } else if (state == TrueTriState) {
2765                    // Emit a placeholder for this bytecode operation but otherwise
2766                    // just fall through.
2767                    addToGraph(Phantom);
2768                    NEXT_OPCODE(op_jfalse);
2769                }
2770            }
2771            addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jfalse)), OpInfo(m_currentIndex + relativeOffset), condition);
2772            LAST_OPCODE(op_jfalse);
2773        }
2774
2775        case op_jeq_null: {
2776            unsigned relativeOffset = currentInstruction[2].u.operand;
2777            Node* value = get(currentInstruction[1].u.operand);
2778            Node* condition = addToGraph(CompareEqConstant, value, constantNull());
2779            addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jeq_null)), condition);
2780            LAST_OPCODE(op_jeq_null);
2781        }
2782
2783        case op_jneq_null: {
2784            unsigned relativeOffset = currentInstruction[2].u.operand;
2785            Node* value = get(currentInstruction[1].u.operand);
2786            Node* condition = addToGraph(CompareEqConstant, value, constantNull());
2787            addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jneq_null)), OpInfo(m_currentIndex + relativeOffset), condition);
2788            LAST_OPCODE(op_jneq_null);
2789        }
2790
2791        case op_jless: {
2792            unsigned relativeOffset = currentInstruction[3].u.operand;
2793            Node* op1 = get(currentInstruction[1].u.operand);
2794            Node* op2 = get(currentInstruction[2].u.operand);
2795            if (canFold(op1) && canFold(op2)) {
2796                JSValue aValue = valueOfJSConstant(op1);
2797                JSValue bValue = valueOfJSConstant(op2);
2798                if (aValue.isNumber() && bValue.isNumber()) {
2799                    double a = aValue.asNumber();
2800                    double b = bValue.asNumber();
2801                    if (a < b) {
2802                        addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2803                        LAST_OPCODE(op_jless);
2804                    } else {
2805                        // Emit a placeholder for this bytecode operation but otherwise
2806                        // just fall through.
2807                        addToGraph(Phantom);
2808                        NEXT_OPCODE(op_jless);
2809                    }
2810                }
2811            }
2812            Node* condition = addToGraph(CompareLess, op1, op2);
2813            addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jless)), condition);
2814            LAST_OPCODE(op_jless);
2815        }
2816
2817        case op_jlesseq: {
2818            unsigned relativeOffset = currentInstruction[3].u.operand;
2819            Node* op1 = get(currentInstruction[1].u.operand);
2820            Node* op2 = get(currentInstruction[2].u.operand);
2821            if (canFold(op1) && canFold(op2)) {
2822                JSValue aValue = valueOfJSConstant(op1);
2823                JSValue bValue = valueOfJSConstant(op2);
2824                if (aValue.isNumber() && bValue.isNumber()) {
2825                    double a = aValue.asNumber();
2826                    double b = bValue.asNumber();
2827                    if (a <= b) {
2828                        addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2829                        LAST_OPCODE(op_jlesseq);
2830                    } else {
2831                        // Emit a placeholder for this bytecode operation but otherwise
2832                        // just fall through.
2833                        addToGraph(Phantom);
2834                        NEXT_OPCODE(op_jlesseq);
2835                    }
2836                }
2837            }
2838            Node* condition = addToGraph(CompareLessEq, op1, op2);
2839            addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jlesseq)), condition);
2840            LAST_OPCODE(op_jlesseq);
2841        }
2842
2843        case op_jgreater: {
2844            unsigned relativeOffset = currentInstruction[3].u.operand;
2845            Node* op1 = get(currentInstruction[1].u.operand);
2846            Node* op2 = get(currentInstruction[2].u.operand);
2847            if (canFold(op1) && canFold(op2)) {
2848                JSValue aValue = valueOfJSConstant(op1);
2849                JSValue bValue = valueOfJSConstant(op2);
2850                if (aValue.isNumber() && bValue.isNumber()) {
2851                    double a = aValue.asNumber();
2852                    double b = bValue.asNumber();
2853                    if (a > b) {
2854                        addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2855                        LAST_OPCODE(op_jgreater);
2856                    } else {
2857                        // Emit a placeholder for this bytecode operation but otherwise
2858                        // just fall through.
2859                        addToGraph(Phantom);
2860                        NEXT_OPCODE(op_jgreater);
2861                    }
2862                }
2863            }
2864            Node* condition = addToGraph(CompareGreater, op1, op2);
2865            addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jgreater)), condition);
2866            LAST_OPCODE(op_jgreater);
2867        }
2868
2869        case op_jgreatereq: {
2870            unsigned relativeOffset = currentInstruction[3].u.operand;
2871            Node* op1 = get(currentInstruction[1].u.operand);
2872            Node* op2 = get(currentInstruction[2].u.operand);
2873            if (canFold(op1) && canFold(op2)) {
2874                JSValue aValue = valueOfJSConstant(op1);
2875                JSValue bValue = valueOfJSConstant(op2);
2876                if (aValue.isNumber() && bValue.isNumber()) {
2877                    double a = aValue.asNumber();
2878                    double b = bValue.asNumber();
2879                    if (a >= b) {
2880                        addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2881                        LAST_OPCODE(op_jgreatereq);
2882                    } else {
2883                        // Emit a placeholder for this bytecode operation but otherwise
2884                        // just fall through.
2885                        addToGraph(Phantom);
2886                        NEXT_OPCODE(op_jgreatereq);
2887                    }
2888                }
2889            }
2890            Node* condition = addToGraph(CompareGreaterEq, op1, op2);
2891            addToGraph(Branch, OpInfo(m_currentIndex + relativeOffset), OpInfo(m_currentIndex + OPCODE_LENGTH(op_jgreatereq)), condition);
2892            LAST_OPCODE(op_jgreatereq);
2893        }
2894
2895        case op_jnless: {
2896            unsigned relativeOffset = currentInstruction[3].u.operand;
2897            Node* op1 = get(currentInstruction[1].u.operand);
2898            Node* op2 = get(currentInstruction[2].u.operand);
2899            if (canFold(op1) && canFold(op2)) {
2900                JSValue aValue = valueOfJSConstant(op1);
2901                JSValue bValue = valueOfJSConstant(op2);
2902                if (aValue.isNumber() && bValue.isNumber()) {
2903                    double a = aValue.asNumber();
2904                    double b = bValue.asNumber();
2905                    if (a < b) {
2906                        // Emit a placeholder for this bytecode operation but otherwise
2907                        // just fall through.
2908                        addToGraph(Phantom);
2909                        NEXT_OPCODE(op_jnless);
2910                    } else {
2911                        addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2912                        LAST_OPCODE(op_jnless);
2913                    }
2914                }
2915            }
2916            Node* condition = addToGraph(CompareLess, op1, op2);
2917            addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jnless)), OpInfo(m_currentIndex + relativeOffset), condition);
2918            LAST_OPCODE(op_jnless);
2919        }
2920
2921        case op_jnlesseq: {
2922            unsigned relativeOffset = currentInstruction[3].u.operand;
2923            Node* op1 = get(currentInstruction[1].u.operand);
2924            Node* op2 = get(currentInstruction[2].u.operand);
2925            if (canFold(op1) && canFold(op2)) {
2926                JSValue aValue = valueOfJSConstant(op1);
2927                JSValue bValue = valueOfJSConstant(op2);
2928                if (aValue.isNumber() && bValue.isNumber()) {
2929                    double a = aValue.asNumber();
2930                    double b = bValue.asNumber();
2931                    if (a <= b) {
2932                        // Emit a placeholder for this bytecode operation but otherwise
2933                        // just fall through.
2934                        addToGraph(Phantom);
2935                        NEXT_OPCODE(op_jnlesseq);
2936                    } else {
2937                        addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2938                        LAST_OPCODE(op_jnlesseq);
2939                    }
2940                }
2941            }
2942            Node* condition = addToGraph(CompareLessEq, op1, op2);
2943            addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jnlesseq)), OpInfo(m_currentIndex + relativeOffset), condition);
2944            LAST_OPCODE(op_jnlesseq);
2945        }
2946
2947        case op_jngreater: {
2948            unsigned relativeOffset = currentInstruction[3].u.operand;
2949            Node* op1 = get(currentInstruction[1].u.operand);
2950            Node* op2 = get(currentInstruction[2].u.operand);
2951            if (canFold(op1) && canFold(op2)) {
2952                JSValue aValue = valueOfJSConstant(op1);
2953                JSValue bValue = valueOfJSConstant(op2);
2954                if (aValue.isNumber() && bValue.isNumber()) {
2955                    double a = aValue.asNumber();
2956                    double b = bValue.asNumber();
2957                    if (a > b) {
2958                        // Emit a placeholder for this bytecode operation but otherwise
2959                        // just fall through.
2960                        addToGraph(Phantom);
2961                        NEXT_OPCODE(op_jngreater);
2962                    } else {
2963                        addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2964                        LAST_OPCODE(op_jngreater);
2965                    }
2966                }
2967            }
2968            Node* condition = addToGraph(CompareGreater, op1, op2);
2969            addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jngreater)), OpInfo(m_currentIndex + relativeOffset), condition);
2970            LAST_OPCODE(op_jngreater);
2971        }
2972
2973        case op_jngreatereq: {
2974            unsigned relativeOffset = currentInstruction[3].u.operand;
2975            Node* op1 = get(currentInstruction[1].u.operand);
2976            Node* op2 = get(currentInstruction[2].u.operand);
2977            if (canFold(op1) && canFold(op2)) {
2978                JSValue aValue = valueOfJSConstant(op1);
2979                JSValue bValue = valueOfJSConstant(op2);
2980                if (aValue.isNumber() && bValue.isNumber()) {
2981                    double a = aValue.asNumber();
2982                    double b = bValue.asNumber();
2983                    if (a >= b) {
2984                        // Emit a placeholder for this bytecode operation but otherwise
2985                        // just fall through.
2986                        addToGraph(Phantom);
2987                        NEXT_OPCODE(op_jngreatereq);
2988                    } else {
2989                        addToGraph(Jump, OpInfo(m_currentIndex + relativeOffset));
2990                        LAST_OPCODE(op_jngreatereq);
2991                    }
2992                }
2993            }
2994            Node* condition = addToGraph(CompareGreaterEq, op1, op2);
2995            addToGraph(Branch, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jngreatereq)), OpInfo(m_currentIndex + relativeOffset), condition);
2996            LAST_OPCODE(op_jngreatereq);
2997        }
2998
2999        case op_ret:
3000            flushArgumentsAndCapturedVariables();
3001            if (inlineCallFrame()) {
3002                if (m_inlineStackTop->m_returnValue != InvalidVirtualRegister)
3003                    setDirect(m_inlineStackTop->m_returnValue, get(currentInstruction[1].u.operand));
3004                m_inlineStackTop->m_didReturn = true;
3005                if (m_inlineStackTop->m_unlinkedBlocks.isEmpty()) {
3006                    // If we're returning from the first block, then we're done parsing.
3007                    ASSERT(m_inlineStackTop->m_callsiteBlockHead == m_graph.m_blocks.size() - 1);
3008                    shouldContinueParsing = false;
3009                    LAST_OPCODE(op_ret);
3010                } else {
3011                    // If inlining created blocks, and we're doing a return, then we need some
3012                    // special linking.
3013                    ASSERT(m_inlineStackTop->m_unlinkedBlocks.last().m_blockIndex == m_graph.m_blocks.size() - 1);
3014                    m_inlineStackTop->m_unlinkedBlocks.last().m_needsNormalLinking = false;
3015                }
3016                if (m_currentIndex + OPCODE_LENGTH(op_ret) != m_inlineStackTop->m_codeBlock->instructions().size() || m_inlineStackTop->m_didEarlyReturn) {
3017                    ASSERT(m_currentIndex + OPCODE_LENGTH(op_ret) <= m_inlineStackTop->m_codeBlock->instructions().size());
3018                    addToGraph(Jump, OpInfo(NoBlock));
3019                    m_inlineStackTop->m_unlinkedBlocks.last().m_needsEarlyReturnLinking = true;
3020                    m_inlineStackTop->m_didEarlyReturn = true;
3021                }
3022                LAST_OPCODE(op_ret);
3023            }
3024            addToGraph(Return, get(currentInstruction[1].u.operand));
3025            LAST_OPCODE(op_ret);
3026
3027        case op_end:
3028            flushArgumentsAndCapturedVariables();
3029            ASSERT(!inlineCallFrame());
3030            addToGraph(Return, get(currentInstruction[1].u.operand));
3031            LAST_OPCODE(op_end);
3032
3033        case op_throw:
3034            addToGraph(Throw, get(currentInstruction[1].u.operand));
3035            flushAllArgumentsAndCapturedVariablesInInlineStack();
3036            addToGraph(Unreachable);
3037            LAST_OPCODE(op_throw);
3038
3039        case op_throw_static_error:
3040            addToGraph(ThrowReferenceError);
3041            flushAllArgumentsAndCapturedVariablesInInlineStack();
3042            addToGraph(Unreachable);
3043            LAST_OPCODE(op_throw_static_error);
3044
3045        case op_call:
3046            handleCall(interpreter, currentInstruction, Call, CodeForCall);
3047            NEXT_OPCODE(op_call);
3048
3049        case op_construct:
3050            handleCall(interpreter, currentInstruction, Construct, CodeForConstruct);
3051            NEXT_OPCODE(op_construct);
3052
3053        case op_call_varargs: {
3054            ASSERT(inlineCallFrame());
3055            ASSERT(currentInstruction[3].u.operand == m_inlineStackTop->m_codeBlock->argumentsRegister());
3056            ASSERT(!m_inlineStackTop->m_codeBlock->symbolTable()->slowArguments());
3057            // It would be cool to funnel this into handleCall() so that it can handle
3058            // inlining. But currently that won't be profitable anyway, since none of the
3059            // uses of call_varargs will be inlineable. So we set this up manually and
3060            // without inline/intrinsic detection.
3061
3062            Instruction* putInstruction = currentInstruction + OPCODE_LENGTH(op_call_varargs);
3063
3064            SpeculatedType prediction = SpecNone;
3065            if (interpreter->getOpcodeID(putInstruction->u.opcode) == op_call_put_result) {
3066                m_currentProfilingIndex = m_currentIndex + OPCODE_LENGTH(op_call_varargs);
3067                prediction = getPrediction();
3068            }
3069
3070            addToGraph(CheckArgumentsNotCreated);
3071
3072            unsigned argCount = inlineCallFrame()->arguments.size();
3073            if (JSStack::CallFrameHeaderSize + argCount > m_parameterSlots)
3074                m_parameterSlots = JSStack::CallFrameHeaderSize + argCount;
3075
3076            addVarArgChild(get(currentInstruction[1].u.operand)); // callee
3077            addVarArgChild(get(currentInstruction[2].u.operand)); // this
3078            for (unsigned argument = 1; argument < argCount; ++argument)
3079                addVarArgChild(get(argumentToOperand(argument)));
3080
3081            Node* call = addToGraph(Node::VarArg, Call, OpInfo(0), OpInfo(prediction));
3082            if (interpreter->getOpcodeID(putInstruction->u.opcode) == op_call_put_result)
3083                set(putInstruction[1].u.operand, call);
3084
3085            NEXT_OPCODE(op_call_varargs);
3086        }
3087
3088        case op_call_put_result:
3089            NEXT_OPCODE(op_call_put_result);
3090
3091        case op_jneq_ptr:
3092            // Statically speculate for now. It makes sense to let speculate-only jneq_ptr
3093            // support simmer for a while before making it more general, since it's
3094            // already gnarly enough as it is.
3095            ASSERT(pointerIsFunction(currentInstruction[2].u.specialPointer));
3096            addToGraph(
3097                CheckFunction,
3098                OpInfo(actualPointerFor(m_inlineStackTop->m_codeBlock, currentInstruction[2].u.specialPointer)),
3099                get(currentInstruction[1].u.operand));
3100            addToGraph(Jump, OpInfo(m_currentIndex + OPCODE_LENGTH(op_jneq_ptr)));
3101            LAST_OPCODE(op_jneq_ptr);
3102
3103        case op_get_scoped_var: {
3104            SpeculatedType prediction = getPrediction();
3105            int dst = currentInstruction[1].u.operand;
3106            int slot = currentInstruction[2].u.operand;
3107            int depth = currentInstruction[3].u.operand;
3108            bool hasTopScope = m_codeBlock->codeType() == FunctionCode && m_inlineStackTop->m_codeBlock->needsFullScopeChain();
3109            ASSERT(!hasTopScope || depth >= 1);
3110            Node* scope = getScope(hasTopScope, depth - hasTopScope);
3111            Node* getScopeRegisters = addToGraph(GetScopeRegisters, scope);
3112            Node* getScopedVar = addToGraph(GetScopedVar, OpInfo(slot), OpInfo(prediction), getScopeRegisters);
3113            set(dst, getScopedVar);
3114            NEXT_OPCODE(op_get_scoped_var);
3115        }
3116
3117        case op_put_scoped_var: {
3118            int slot = currentInstruction[1].u.operand;
3119            int depth = currentInstruction[2].u.operand;
3120            int source = currentInstruction[3].u.operand;
3121            bool hasTopScope = m_codeBlock->codeType() == FunctionCode && m_inlineStackTop->m_codeBlock->needsFullScopeChain();
3122            ASSERT(!hasTopScope || depth >= 1);
3123            Node* scope = getScope(hasTopScope, depth - hasTopScope);
3124            Node* scopeRegisters = addToGraph(GetScopeRegisters, scope);
3125            addToGraph(PutScopedVar, OpInfo(slot), scope, scopeRegisters, get(source));
3126            NEXT_OPCODE(op_put_scoped_var);
3127        }
3128
3129        case op_resolve:
3130        case op_resolve_global_property:
3131        case op_resolve_global_var:
3132        case op_resolve_scoped_var:
3133        case op_resolve_scoped_var_on_top_scope:
3134        case op_resolve_scoped_var_with_top_scope_check: {
3135            SpeculatedType prediction = getPrediction();
3136
3137            unsigned identifier = m_inlineStackTop->m_identifierRemap[currentInstruction[2].u.operand];
3138            ResolveOperations* operations = currentInstruction[3].u.resolveOperations;
3139            Node* value = 0;
3140            if (parseResolveOperations(prediction, identifier, operations, 0, 0, &value)) {
3141                set(currentInstruction[1].u.operand, value);
3142                NEXT_OPCODE(op_resolve);
3143            }
3144
3145            Node* resolve = addToGraph(Resolve, OpInfo(m_graph.m_resolveOperationsData.size()), OpInfo(prediction));
3146            m_graph.m_resolveOperationsData.append(ResolveOperationData());
3147            ResolveOperationData& data = m_graph.m_resolveOperationsData.last();
3148            data.identifierNumber = identifier;
3149            data.resolveOperations = operations;
3150
3151            set(currentInstruction[1].u.operand, resolve);
3152
3153            NEXT_OPCODE(op_resolve);
3154        }
3155
3156        case op_put_to_base_variable:
3157        case op_put_to_base: {
3158            unsigned base = currentInstruction[1].u.operand;
3159            unsigned identifier = m_inlineStackTop->m_identifierRemap[currentInstruction[2].u.operand];
3160            unsigned value = currentInstruction[3].u.operand;
3161            PutToBaseOperation* putToBase = currentInstruction[4].u.putToBaseOperation;
3162
3163            if (putToBase->m_isDynamic) {
3164                addToGraph(PutById, OpInfo(identifier), get(base), get(value));
3165                NEXT_OPCODE(op_put_to_base);
3166            }
3167
3168            switch (putToBase->m_kind) {
3169            case PutToBaseOperation::Uninitialised:
3170                addToGraph(ForceOSRExit);
3171                addToGraph(Phantom, get(base));
3172                addToGraph(Phantom, get(value));
3173                break;
3174
3175            case PutToBaseOperation::GlobalVariablePutChecked: {
3176                CodeBlock* codeBlock = m_inlineStackTop->m_codeBlock;
3177                JSGlobalObject* globalObject = codeBlock->globalObject();
3178                SymbolTableEntry entry = globalObject->symbolTable()->get(m_codeBlock->identifier(identifier).impl());
3179                if (entry.couldBeWatched()) {
3180                    addToGraph(PutGlobalVarCheck,
3181                               OpInfo(codeBlock->globalObject()->assertRegisterIsInThisObject(putToBase->m_registerAddress)),
3182                               OpInfo(identifier),
3183                               get(value));
3184                    break;
3185                }
3186            }
3187            case PutToBaseOperation::GlobalVariablePut:
3188                addToGraph(PutGlobalVar,
3189                           OpInfo(m_inlineStackTop->m_codeBlock->globalObject()->assertRegisterIsInThisObject(putToBase->m_registerAddress)),
3190                           get(value));
3191                break;
3192            case PutToBaseOperation::VariablePut: {
3193                Node* scope = get(base);
3194                Node* scopeRegisters = addToGraph(GetScopeRegisters, scope);
3195                addToGraph(PutScopedVar, OpInfo(putToBase->m_offset), scope, scopeRegisters, get(value));
3196                break;
3197            }
3198            case PutToBaseOperation::GlobalPropertyPut: {
3199                if (!putToBase->m_structure) {
3200                    addToGraph(ForceOSRExit);
3201                    addToGraph(Phantom, get(base));
3202                    addToGraph(Phantom, get(value));
3203                    NEXT_OPCODE(op_put_to_base);
3204                }
3205                Node* baseNode = get(base);
3206                addToGraph(CheckStructure, OpInfo(m_graph.addStructureSet(putToBase->m_structure.get())), baseNode);
3207                Node* propertyStorage;
3208                if (isInlineOffset(putToBase->m_offset))
3209                    propertyStorage = baseNode;
3210                else
3211                    propertyStorage = addToGraph(GetButterfly, baseNode);
3212                addToGraph(PutByOffset, OpInfo(m_graph.m_storageAccessData.size()), propertyStorage, baseNode, get(value));
3213
3214                StorageAccessData storageAccessData;
3215                storageAccessData.offset = indexRelativeToBase(putToBase->m_offset);
3216                storageAccessData.identifierNumber = identifier;
3217                m_graph.m_storageAccessData.append(storageAccessData);
3218                break;
3219            }
3220            case PutToBaseOperation::Readonly:
3221            case PutToBaseOperation::Generic:
3222                addToGraph(PutById, OpInfo(identifier), get(base), get(value));
3223            }
3224            NEXT_OPCODE(op_put_to_base);
3225        }
3226
3227        case op_resolve_base_to_global:
3228        case op_resolve_base_to_global_dynamic:
3229        case op_resolve_base_to_scope:
3230        case op_resolve_base_to_scope_with_top_scope_check:
3231        case op_resolve_base: {
3232            SpeculatedType prediction = getPrediction();
3233
3234            unsigned identifier = m_inlineStackTop->m_identifierRemap[currentInstruction[2].u.operand];
3235            ResolveOperations* operations = currentInstruction[4].u.resolveOperations;
3236            PutToBaseOperation* putToBaseOperation = currentInstruction[5].u.putToBaseOperation;
3237
3238            Node* base = 0;
3239            if (parseResolveOperations(prediction, identifier, operations, 0, &base, 0)) {
3240                set(currentInstruction[1].u.operand, base);
3241                NEXT_OPCODE(op_resolve_base);
3242            }
3243
3244            Node* resolve = addToGraph(currentInstruction[3].u.operand ? ResolveBaseStrictPut : ResolveBase, OpInfo(m_graph.m_resolveOperationsData.size()), OpInfo(prediction));
3245            m_graph.m_resolveOperationsData.append(ResolveOperationData());
3246            ResolveOperationData& data = m_graph.m_resolveOperationsData.last();
3247            data.identifierNumber = identifier;
3248            data.resolveOperations = operations;
3249            data.putToBaseOperation = putToBaseOperation;
3250
3251            set(currentInstruction[1].u.operand, resolve);
3252
3253            NEXT_OPCODE(op_resolve_base);
3254        }
3255        case op_resolve_with_base: {
3256            SpeculatedType prediction = getPrediction();
3257            unsigned baseDst = currentInstruction[1].u.operand;
3258            unsigned valueDst = currentInstruction[2].u.operand;
3259            unsigned identifier = m_inlineStackTop->m_identifierRemap[currentInstruction[3].u.operand];
3260            ResolveOperations* operations = currentInstruction[4].u.resolveOperations;
3261            PutToBaseOperation* putToBaseOperation = currentInstruction[5].u.putToBaseOperation;
3262
3263            Node* base = 0;
3264            Node* value = 0;
3265            if (parseResolveOperations(prediction, identifier, operations, putToBaseOperation, &base, &value))
3266                setPair(baseDst, base, valueDst, value);
3267            else {
3268                addToGraph(ForceOSRExit);
3269                setPair(baseDst, addToGraph(GarbageValue), valueDst, addToGraph(GarbageValue));
3270            }
3271
3272            NEXT_OPCODE(op_resolve_with_base);
3273        }
3274        case op_resolve_with_this: {
3275            SpeculatedType prediction = getPrediction();
3276            unsigned baseDst = currentInstruction[1].u.operand;
3277            unsigned valueDst = currentInstruction[2].u.operand;
3278            unsigned identifier = m_inlineStackTop->m_identifierRemap[currentInstruction[3].u.operand];
3279            ResolveOperations* operations = currentInstruction[4].u.resolveOperations;
3280
3281            Node* base = 0;
3282            Node* value = 0;
3283            if (parseResolveOperations(prediction, identifier, operations, 0, &base, &value))
3284                setPair(baseDst, base, valueDst, value);
3285            else {
3286                addToGraph(ForceOSRExit);
3287                setPair(baseDst, addToGraph(GarbageValue), valueDst, addToGraph(GarbageValue));
3288            }
3289
3290            NEXT_OPCODE(op_resolve_with_this);
3291        }
3292        case op_loop_hint: {
3293            // Baseline->DFG OSR jumps between loop hints. The DFG assumes that Baseline->DFG
3294            // OSR can only happen at basic block boundaries. Assert that these two statements
3295            // are compatible.
3296            RELEASE_ASSERT(m_currentIndex == blockBegin);
3297
3298            // We never do OSR into an inlined code block. That could not happen, since OSR
3299            // looks up the code block that is the replacement for the baseline JIT code
3300            // block. Hence, machine code block = true code block = not inline code block.
3301            if (!m_inlineStackTop->m_caller)
3302                m_currentBlock->isOSRTarget = true;
3303
3304            if (m_vm->watchdog.isEnabled())
3305                addToGraph(CheckWatchdogTimer);
3306            else {
3307                // Emit a phantom node to ensure that there is a placeholder
3308                // node for this bytecode op.
3309                addToGraph(Phantom);
3310            }
3311
3312            NEXT_OPCODE(op_loop_hint);
3313        }
3314
3315        case op_init_lazy_reg: {
3316            set(currentInstruction[1].u.operand, getJSConstantForValue(JSValue()));
3317            NEXT_OPCODE(op_init_lazy_reg);
3318        }
3319
3320        case op_create_activation: {
3321            set(currentInstruction[1].u.operand, addToGraph(CreateActivation, get(currentInstruction[1].u.operand)));
3322            NEXT_OPCODE(op_create_activation);
3323        }
3324
3325        case op_create_arguments: {
3326            m_graph.m_hasArguments = true;
3327            Node* createArguments = addToGraph(CreateArguments, get(currentInstruction[1].u.operand));
3328            set(currentInstruction[1].u.operand, createArguments);
3329            set(unmodifiedArgumentsRegister(currentInstruction[1].u.operand), createArguments);
3330            NEXT_OPCODE(op_create_arguments);
3331        }
3332
3333        case op_tear_off_activation: {
3334            addToGraph(TearOffActivation, get(currentInstruction[1].u.operand));
3335            NEXT_OPCODE(op_tear_off_activation);
3336        }
3337
3338        case op_tear_off_arguments: {
3339            m_graph.m_hasArguments = true;
3340            addToGraph(TearOffArguments, get(unmodifiedArgumentsRegister(currentInstruction[1].u.operand)), get(currentInstruction[2].u.operand));
3341            NEXT_OPCODE(op_tear_off_arguments);
3342        }
3343
3344        case op_get_arguments_length: {
3345            m_graph.m_hasArguments = true;
3346            set(currentInstruction[1].u.operand, addToGraph(GetMyArgumentsLengthSafe));
3347            NEXT_OPCODE(op_get_arguments_length);
3348        }
3349
3350        case op_get_argument_by_val: {
3351            m_graph.m_hasArguments = true;
3352            set(currentInstruction[1].u.operand,
3353                addToGraph(
3354                    GetMyArgumentByValSafe, OpInfo(0), OpInfo(getPrediction()),
3355                    get(currentInstruction[3].u.operand)));
3356            NEXT_OPCODE(op_get_argument_by_val);
3357        }
3358
3359        case op_new_func: {
3360            if (!currentInstruction[3].u.operand) {
3361                set(currentInstruction[1].u.operand,
3362                    addToGraph(NewFunctionNoCheck, OpInfo(currentInstruction[2].u.operand)));
3363            } else {
3364                set(currentInstruction[1].u.operand,
3365                    addToGraph(
3366                        NewFunction,
3367                        OpInfo(currentInstruction[2].u.operand),
3368                        get(currentInstruction[1].u.operand)));
3369            }
3370            NEXT_OPCODE(op_new_func);
3371        }
3372
3373        case op_new_func_exp: {
3374            set(currentInstruction[1].u.operand,
3375                addToGraph(NewFunctionExpression, OpInfo(currentInstruction[2].u.operand)));
3376            NEXT_OPCODE(op_new_func_exp);
3377        }
3378
3379        case op_typeof: {
3380            set(currentInstruction[1].u.operand,
3381                addToGraph(TypeOf, get(currentInstruction[2].u.operand)));
3382            NEXT_OPCODE(op_typeof);
3383        }
3384
3385        case op_to_number: {
3386            set(currentInstruction[1].u.operand,
3387                addToGraph(Identity, Edge(get(currentInstruction[2].u.operand), NumberUse)));
3388            NEXT_OPCODE(op_to_number);
3389        }
3390
3391        default:
3392            // Parse failed! This should not happen because the capabilities checker
3393            // should have caught it.
3394            RELEASE_ASSERT_NOT_REACHED();
3395            return false;
3396        }
3397    }
3398}
3399
3400void ByteCodeParser::linkBlock(BasicBlock* block, Vector<BlockIndex>& possibleTargets)
3401{
3402    ASSERT(!block->isLinked);
3403    ASSERT(!block->isEmpty());
3404    Node* node = block->last();
3405    ASSERT(node->isTerminal());
3406
3407    switch (node->op()) {
3408    case Jump:
3409        node->setTakenBlockIndex(m_graph.blockIndexForBytecodeOffset(possibleTargets, node->takenBytecodeOffsetDuringParsing()));
3410#if DFG_ENABLE(DEBUG_VERBOSE)
3411        dataLogF("Linked basic block %p to %p, #%u.\n", block, m_graph.m_blocks[node->takenBlockIndex()].get(), node->takenBlockIndex());
3412#endif
3413        break;
3414
3415    case Branch:
3416        node->setTakenBlockIndex(m_graph.blockIndexForBytecodeOffset(possibleTargets, node->takenBytecodeOffsetDuringParsing()));
3417        node->setNotTakenBlockIndex(m_graph.blockIndexForBytecodeOffset(possibleTargets, node->notTakenBytecodeOffsetDuringParsing()));
3418#if DFG_ENABLE(DEBUG_VERBOSE)
3419        dataLogF("Linked basic block %p to %p, #%u and %p, #%u.\n", block, m_graph.m_blocks[node->takenBlockIndex()].get(), node->takenBlockIndex(), m_graph.m_blocks[node->notTakenBlockIndex()].get(), node->notTakenBlockIndex());
3420#endif
3421        break;
3422
3423    default:
3424#if DFG_ENABLE(DEBUG_VERBOSE)
3425        dataLogF("Marking basic block %p as linked.\n", block);
3426#endif
3427        break;
3428    }
3429
3430#if !ASSERT_DISABLED
3431    block->isLinked = true;
3432#endif
3433}
3434
3435void ByteCodeParser::linkBlocks(Vector<UnlinkedBlock>& unlinkedBlocks, Vector<BlockIndex>& possibleTargets)
3436{
3437    for (size_t i = 0; i < unlinkedBlocks.size(); ++i) {
3438        if (unlinkedBlocks[i].m_needsNormalLinking) {
3439            linkBlock(m_graph.m_blocks[unlinkedBlocks[i].m_blockIndex].get(), possibleTargets);
3440            unlinkedBlocks[i].m_needsNormalLinking = false;
3441        }
3442    }
3443}
3444
3445void ByteCodeParser::buildOperandMapsIfNecessary()
3446{
3447    if (m_haveBuiltOperandMaps)
3448        return;
3449
3450    for (size_t i = 0; i < m_codeBlock->numberOfIdentifiers(); ++i)
3451        m_identifierMap.add(m_codeBlock->identifier(i).impl(), i);
3452    for (size_t i = 0; i < m_codeBlock->numberOfConstantRegisters(); ++i) {
3453        JSValue value = m_codeBlock->getConstant(i + FirstConstantRegisterIndex);
3454        if (!value)
3455            m_emptyJSValueIndex = i + FirstConstantRegisterIndex;
3456        else
3457            m_jsValueMap.add(JSValue::encode(value), i + FirstConstantRegisterIndex);
3458    }
3459
3460    m_haveBuiltOperandMaps = true;
3461}
3462
3463ByteCodeParser::InlineStackEntry::InlineStackEntry(
3464    ByteCodeParser* byteCodeParser,
3465    CodeBlock* codeBlock,
3466    CodeBlock* profiledBlock,
3467    BlockIndex callsiteBlockHead,
3468    JSFunction* callee, // Null if this is a closure call.
3469    VirtualRegister returnValueVR,
3470    VirtualRegister inlineCallFrameStart,
3471    int argumentCountIncludingThis,
3472    CodeSpecializationKind kind)
3473    : m_byteCodeParser(byteCodeParser)
3474    , m_codeBlock(codeBlock)
3475    , m_profiledBlock(profiledBlock)
3476    , m_exitProfile(profiledBlock->exitProfile())
3477    , m_callsiteBlockHead(callsiteBlockHead)
3478    , m_returnValue(returnValueVR)
3479    , m_lazyOperands(profiledBlock->lazyOperandValueProfiles())
3480    , m_didReturn(false)
3481    , m_didEarlyReturn(false)
3482    , m_caller(byteCodeParser->m_inlineStackTop)
3483{
3484    m_argumentPositions.resize(argumentCountIncludingThis);
3485    for (int i = 0; i < argumentCountIncludingThis; ++i) {
3486        byteCodeParser->m_graph.m_argumentPositions.append(ArgumentPosition());
3487        ArgumentPosition* argumentPosition = &byteCodeParser->m_graph.m_argumentPositions.last();
3488        m_argumentPositions[i] = argumentPosition;
3489    }
3490
3491    // Track the code-block-global exit sites.
3492    if (m_exitProfile.hasExitSite(ArgumentsEscaped)) {
3493        byteCodeParser->m_graph.m_executablesWhoseArgumentsEscaped.add(
3494            codeBlock->ownerExecutable());
3495    }
3496
3497    if (m_caller) {
3498        // Inline case.
3499        ASSERT(codeBlock != byteCodeParser->m_codeBlock);
3500        ASSERT(inlineCallFrameStart != InvalidVirtualRegister);
3501        ASSERT(callsiteBlockHead != NoBlock);
3502
3503        InlineCallFrame inlineCallFrame;
3504        inlineCallFrame.executable.set(*byteCodeParser->m_vm, byteCodeParser->m_codeBlock->ownerExecutable(), codeBlock->ownerExecutable());
3505        inlineCallFrame.stackOffset = inlineCallFrameStart + JSStack::CallFrameHeaderSize;
3506        if (callee)
3507            inlineCallFrame.callee.set(*byteCodeParser->m_vm, byteCodeParser->m_codeBlock->ownerExecutable(), callee);
3508        inlineCallFrame.caller = byteCodeParser->currentCodeOrigin();
3509        inlineCallFrame.arguments.resize(argumentCountIncludingThis); // Set the number of arguments including this, but don't configure the value recoveries, yet.
3510        inlineCallFrame.isCall = isCall(kind);
3511
3512        if (inlineCallFrame.caller.inlineCallFrame)
3513            inlineCallFrame.capturedVars = inlineCallFrame.caller.inlineCallFrame->capturedVars;
3514        else {
3515            for (int i = byteCodeParser->m_codeBlock->m_numVars; i--;) {
3516                if (byteCodeParser->m_codeBlock->isCaptured(i))
3517                    inlineCallFrame.capturedVars.set(i);
3518            }
3519        }
3520
3521        for (int i = argumentCountIncludingThis; i--;) {
3522            if (codeBlock->isCaptured(argumentToOperand(i)))
3523                inlineCallFrame.capturedVars.set(argumentToOperand(i) + inlineCallFrame.stackOffset);
3524        }
3525        for (size_t i = codeBlock->m_numVars; i--;) {
3526            if (codeBlock->isCaptured(i))
3527                inlineCallFrame.capturedVars.set(i + inlineCallFrame.stackOffset);
3528        }
3529
3530#if DFG_ENABLE(DEBUG_VERBOSE)
3531        dataLogF("Current captured variables: ");
3532        inlineCallFrame.capturedVars.dump(WTF::dataFile());
3533        dataLogF("\n");
3534#endif
3535
3536        byteCodeParser->m_codeBlock->inlineCallFrames().append(inlineCallFrame);
3537        m_inlineCallFrame = &byteCodeParser->m_codeBlock->inlineCallFrames().last();
3538
3539        byteCodeParser->buildOperandMapsIfNecessary();
3540
3541        m_identifierRemap.resize(codeBlock->numberOfIdentifiers());
3542        m_constantRemap.resize(codeBlock->numberOfConstantRegisters());
3543        m_constantBufferRemap.resize(codeBlock->numberOfConstantBuffers());
3544
3545        for (size_t i = 0; i < codeBlock->numberOfIdentifiers(); ++i) {
3546            StringImpl* rep = codeBlock->identifier(i).impl();
3547            IdentifierMap::AddResult result = byteCodeParser->m_identifierMap.add(rep, byteCodeParser->m_codeBlock->numberOfIdentifiers());
3548            if (result.isNewEntry)
3549                byteCodeParser->m_codeBlock->addIdentifier(Identifier(byteCodeParser->m_vm, rep));
3550            m_identifierRemap[i] = result.iterator->value;
3551        }
3552        for (size_t i = 0; i < codeBlock->numberOfConstantRegisters(); ++i) {
3553            JSValue value = codeBlock->getConstant(i + FirstConstantRegisterIndex);
3554            if (!value) {
3555                if (byteCodeParser->m_emptyJSValueIndex == UINT_MAX) {
3556                    byteCodeParser->m_emptyJSValueIndex = byteCodeParser->m_codeBlock->numberOfConstantRegisters() + FirstConstantRegisterIndex;
3557                    byteCodeParser->m_codeBlock->addConstant(JSValue());
3558                    byteCodeParser->m_constants.append(ConstantRecord());
3559                }
3560                m_constantRemap[i] = byteCodeParser->m_emptyJSValueIndex;
3561                continue;
3562            }
3563            JSValueMap::AddResult result = byteCodeParser->m_jsValueMap.add(JSValue::encode(value), byteCodeParser->m_codeBlock->numberOfConstantRegisters() + FirstConstantRegisterIndex);
3564            if (result.isNewEntry) {
3565                byteCodeParser->m_codeBlock->addConstant(value);
3566                byteCodeParser->m_constants.append(ConstantRecord());
3567            }
3568            m_constantRemap[i] = result.iterator->value;
3569        }
3570        for (unsigned i = 0; i < codeBlock->numberOfConstantBuffers(); ++i) {
3571            // If we inline the same code block multiple times, we don't want to needlessly
3572            // duplicate its constant buffers.
3573            HashMap<ConstantBufferKey, unsigned>::iterator iter =
3574                byteCodeParser->m_constantBufferCache.find(ConstantBufferKey(codeBlock, i));
3575            if (iter != byteCodeParser->m_constantBufferCache.end()) {
3576                m_constantBufferRemap[i] = iter->value;
3577                continue;
3578            }
3579            Vector<JSValue>& buffer = codeBlock->constantBufferAsVector(i);
3580            unsigned newIndex = byteCodeParser->m_codeBlock->addConstantBuffer(buffer);
3581            m_constantBufferRemap[i] = newIndex;
3582            byteCodeParser->m_constantBufferCache.add(ConstantBufferKey(codeBlock, i), newIndex);
3583        }
3584        m_callsiteBlockHeadNeedsLinking = true;
3585    } else {
3586        // Machine code block case.
3587        ASSERT(codeBlock == byteCodeParser->m_codeBlock);
3588        ASSERT(!callee);
3589        ASSERT(returnValueVR == InvalidVirtualRegister);
3590        ASSERT(inlineCallFrameStart == InvalidVirtualRegister);
3591        ASSERT(callsiteBlockHead == NoBlock);
3592
3593        m_inlineCallFrame = 0;
3594
3595        m_identifierRemap.resize(codeBlock->numberOfIdentifiers());
3596        m_constantRemap.resize(codeBlock->numberOfConstantRegisters());
3597        m_constantBufferRemap.resize(codeBlock->numberOfConstantBuffers());
3598        for (size_t i = 0; i < codeBlock->numberOfIdentifiers(); ++i)
3599            m_identifierRemap[i] = i;
3600        for (size_t i = 0; i < codeBlock->numberOfConstantRegisters(); ++i)
3601            m_constantRemap[i] = i + FirstConstantRegisterIndex;
3602        for (size_t i = 0; i < codeBlock->numberOfConstantBuffers(); ++i)
3603            m_constantBufferRemap[i] = i;
3604        m_callsiteBlockHeadNeedsLinking = false;
3605    }
3606
3607    for (size_t i = 0; i < m_constantRemap.size(); ++i)
3608        ASSERT(m_constantRemap[i] >= static_cast<unsigned>(FirstConstantRegisterIndex));
3609
3610    byteCodeParser->m_inlineStackTop = this;
3611}
3612
3613void ByteCodeParser::parseCodeBlock()
3614{
3615    CodeBlock* codeBlock = m_inlineStackTop->m_codeBlock;
3616
3617    if (m_graph.m_compilation) {
3618        m_graph.m_compilation->addProfiledBytecodes(
3619            *m_vm->m_perBytecodeProfiler, m_inlineStackTop->m_profiledBlock);
3620    }
3621
3622    bool shouldDumpBytecode = Options::dumpBytecodeAtDFGTime();
3623#if DFG_ENABLE(DEBUG_VERBOSE)
3624    shouldDumpBytecode |= true;
3625#endif
3626    if (shouldDumpBytecode) {
3627        dataLog("Parsing ", *codeBlock);
3628        if (inlineCallFrame()) {
3629            dataLog(
3630                " for inlining at ", CodeBlockWithJITType(m_codeBlock, JITCode::DFGJIT),
3631                " ", inlineCallFrame()->caller);
3632        }
3633        dataLog(
3634            ": captureCount = ", codeBlock->symbolTable() ? codeBlock->symbolTable()->captureCount() : 0,
3635            ", needsFullScopeChain = ", codeBlock->needsFullScopeChain(),
3636            ", needsActivation = ", codeBlock->ownerExecutable()->needsActivation(),
3637            ", isStrictMode = ", codeBlock->ownerExecutable()->isStrictMode(), "\n");
3638        codeBlock->baselineVersion()->dumpBytecode();
3639    }
3640
3641    Vector<unsigned, 32> jumpTargets;
3642    computePreciseJumpTargets(codeBlock, jumpTargets);
3643    if (Options::dumpBytecodeAtDFGTime()) {
3644        dataLog("Jump targets: ");
3645        CommaPrinter comma;
3646        for (unsigned i = 0; i < jumpTargets.size(); ++i)
3647            dataLog(comma, jumpTargets[i]);
3648        dataLog("\n");
3649    }
3650
3651    for (unsigned jumpTargetIndex = 0; jumpTargetIndex <= jumpTargets.size(); ++jumpTargetIndex) {
3652        // The maximum bytecode offset to go into the current basicblock is either the next jump target, or the end of the instructions.
3653        unsigned limit = jumpTargetIndex < jumpTargets.size() ? jumpTargets[jumpTargetIndex] : codeBlock->instructions().size();
3654#if DFG_ENABLE(DEBUG_VERBOSE)
3655        dataLog(
3656            "Parsing bytecode with limit ", pointerDump(inlineCallFrame()),
3657            " bc#", limit, " at inline depth ",
3658            CodeOrigin::inlineDepthForCallFrame(inlineCallFrame()), ".\n");
3659#endif
3660        ASSERT(m_currentIndex < limit);
3661
3662        // Loop until we reach the current limit (i.e. next jump target).
3663        do {
3664            if (!m_currentBlock) {
3665                // Check if we can use the last block.
3666                if (!m_graph.m_blocks.isEmpty() && m_graph.m_blocks.last()->isEmpty()) {
3667                    // This must be a block belonging to us.
3668                    ASSERT(m_inlineStackTop->m_unlinkedBlocks.last().m_blockIndex == m_graph.m_blocks.size() - 1);
3669                    // Either the block is linkable or it isn't. If it's linkable then it's the last
3670                    // block in the blockLinkingTargets list. If it's not then the last block will
3671                    // have a lower bytecode index that the one we're about to give to this block.
3672                    if (m_inlineStackTop->m_blockLinkingTargets.isEmpty() || m_graph.m_blocks[m_inlineStackTop->m_blockLinkingTargets.last()]->bytecodeBegin != m_currentIndex) {
3673                        // Make the block linkable.
3674                        ASSERT(m_inlineStackTop->m_blockLinkingTargets.isEmpty() || m_graph.m_blocks[m_inlineStackTop->m_blockLinkingTargets.last()]->bytecodeBegin < m_currentIndex);
3675                        m_inlineStackTop->m_blockLinkingTargets.append(m_graph.m_blocks.size() - 1);
3676                    }
3677                    // Change its bytecode begin and continue.
3678                    m_currentBlock = m_graph.m_blocks.last().get();
3679#if DFG_ENABLE(DEBUG_VERBOSE)
3680                    dataLogF("Reascribing bytecode index of block %p from bc#%u to bc#%u (peephole case).\n", m_currentBlock, m_currentBlock->bytecodeBegin, m_currentIndex);
3681#endif
3682                    m_currentBlock->bytecodeBegin = m_currentIndex;
3683                } else {
3684                    OwnPtr<BasicBlock> block = adoptPtr(new BasicBlock(m_currentIndex, m_numArguments, m_numLocals));
3685#if DFG_ENABLE(DEBUG_VERBOSE)
3686                    dataLogF("Creating basic block %p, #%zu for %p bc#%u at inline depth %u.\n", block.get(), m_graph.m_blocks.size(), m_inlineStackTop->executable(), m_currentIndex, CodeOrigin::inlineDepthForCallFrame(inlineCallFrame()));
3687#endif
3688                    m_currentBlock = block.get();
3689                    // This assertion checks two things:
3690                    // 1) If the bytecodeBegin is greater than currentIndex, then something has gone
3691                    //    horribly wrong. So, we're probably generating incorrect code.
3692                    // 2) If the bytecodeBegin is equal to the currentIndex, then we failed to do
3693                    //    a peephole coalescing of this block in the if statement above. So, we're
3694                    //    generating suboptimal code and leaving more work for the CFG simplifier.
3695                    ASSERT(m_inlineStackTop->m_unlinkedBlocks.isEmpty() || m_graph.m_blocks[m_inlineStackTop->m_unlinkedBlocks.last().m_blockIndex]->bytecodeBegin < m_currentIndex);
3696                    m_inlineStackTop->m_unlinkedBlocks.append(UnlinkedBlock(m_graph.m_blocks.size()));
3697                    m_inlineStackTop->m_blockLinkingTargets.append(m_graph.m_blocks.size());
3698                    // The first block is definitely an OSR target.
3699                    if (!m_graph.m_blocks.size())
3700                        block->isOSRTarget = true;
3701                    m_graph.m_blocks.append(block.release());
3702                    prepareToParseBlock();
3703                }
3704            }
3705
3706            bool shouldContinueParsing = parseBlock(limit);
3707
3708            // We should not have gone beyond the limit.
3709            ASSERT(m_currentIndex <= limit);
3710
3711            // We should have planted a terminal, or we just gave up because
3712            // we realized that the jump target information is imprecise, or we
3713            // are at the end of an inline function, or we realized that we
3714            // should stop parsing because there was a return in the first
3715            // basic block.
3716            ASSERT(m_currentBlock->isEmpty() || m_currentBlock->last()->isTerminal() || (m_currentIndex == codeBlock->instructions().size() && inlineCallFrame()) || !shouldContinueParsing);
3717
3718            if (!shouldContinueParsing)
3719                return;
3720
3721            m_currentBlock = 0;
3722        } while (m_currentIndex < limit);
3723    }
3724
3725    // Should have reached the end of the instructions.
3726    ASSERT(m_currentIndex == codeBlock->instructions().size());
3727}
3728
3729bool ByteCodeParser::parse()
3730{
3731    // Set during construction.
3732    ASSERT(!m_currentIndex);
3733
3734#if DFG_ENABLE(ALL_VARIABLES_CAPTURED)
3735    // We should be pretending that the code has an activation.
3736    ASSERT(m_graph.needsActivation());
3737#endif
3738
3739    InlineStackEntry inlineStackEntry(
3740        this, m_codeBlock, m_profiledBlock, NoBlock, 0, InvalidVirtualRegister, InvalidVirtualRegister,
3741        m_codeBlock->numParameters(), CodeForCall);
3742
3743    parseCodeBlock();
3744
3745    linkBlocks(inlineStackEntry.m_unlinkedBlocks, inlineStackEntry.m_blockLinkingTargets);
3746    m_graph.determineReachability();
3747
3748    ASSERT(m_preservedVars.size());
3749    size_t numberOfLocals = 0;
3750    for (size_t i = m_preservedVars.size(); i--;) {
3751        if (m_preservedVars.quickGet(i)) {
3752            numberOfLocals = i + 1;
3753            break;
3754        }
3755    }
3756
3757    for (BlockIndex blockIndex = 0; blockIndex < m_graph.m_blocks.size(); ++blockIndex) {
3758        BasicBlock* block = m_graph.m_blocks[blockIndex].get();
3759        ASSERT(block);
3760        if (!block->isReachable) {
3761            m_graph.m_blocks[blockIndex].clear();
3762            continue;
3763        }
3764
3765        block->variablesAtHead.ensureLocals(numberOfLocals);
3766        block->variablesAtTail.ensureLocals(numberOfLocals);
3767    }
3768
3769    m_graph.m_preservedVars = m_preservedVars;
3770    m_graph.m_localVars = m_numLocals;
3771    m_graph.m_parameterSlots = m_parameterSlots;
3772
3773    return true;
3774}
3775
3776bool parse(ExecState*, Graph& graph)
3777{
3778    SamplingRegion samplingRegion("DFG Parsing");
3779#if DFG_DEBUG_LOCAL_DISBALE
3780    UNUSED_PARAM(exec);
3781    UNUSED_PARAM(graph);
3782    return false;
3783#else
3784    return ByteCodeParser(graph).parse();
3785#endif
3786}
3787
3788} } // namespace JSC::DFG
3789
3790#endif
3791