1/*
2 * Copyright (C) 2013, 2014 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 *    notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 *    notice, this list of conditions and the following disclaimer in the
11 *    documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
27#include "FTLSlowPathCall.h"
28
29#if ENABLE(FTL_JIT)
30
31#include "CCallHelpers.h"
32#include "FTLState.h"
33#include "GPRInfo.h"
34#include "JSCInlines.h"
35
36namespace JSC { namespace FTL {
37
38namespace {
39
40// This code relies on us being 64-bit. FTL is currently always 64-bit.
41static const size_t wordSize = 8;
42
43// This will be an RAII thingy that will set up the necessary stack sizes and offsets and such.
44class CallContext {
45public:
46    CallContext(
47        State& state, const RegisterSet& usedRegisters, CCallHelpers& jit,
48        unsigned numArgs, GPRReg returnRegister)
49        : m_state(state)
50        , m_usedRegisters(usedRegisters)
51        , m_jit(jit)
52        , m_numArgs(numArgs)
53        , m_returnRegister(returnRegister)
54    {
55        // We don't care that you're using callee-save, stack, or hardware registers.
56        m_usedRegisters.exclude(RegisterSet::stackRegisters());
57        m_usedRegisters.exclude(RegisterSet::reservedHardwareRegisters());
58        m_usedRegisters.exclude(RegisterSet::calleeSaveRegisters());
59
60        // The return register doesn't need to be saved.
61        if (m_returnRegister != InvalidGPRReg)
62            m_usedRegisters.clear(m_returnRegister);
63
64        size_t stackBytesNeededForReturnAddress = wordSize;
65
66        m_offsetToSavingArea =
67            (std::max(m_numArgs, NUMBER_OF_ARGUMENT_REGISTERS) - NUMBER_OF_ARGUMENT_REGISTERS) * wordSize;
68
69        for (unsigned i = std::min(NUMBER_OF_ARGUMENT_REGISTERS, numArgs); i--;)
70            m_argumentRegisters.set(GPRInfo::toArgumentRegister(i));
71        m_callingConventionRegisters.merge(m_argumentRegisters);
72        if (returnRegister != InvalidGPRReg)
73            m_callingConventionRegisters.set(GPRInfo::returnValueGPR);
74        m_callingConventionRegisters.filter(m_usedRegisters);
75
76        unsigned numberOfCallingConventionRegisters =
77            m_callingConventionRegisters.numberOfSetRegisters();
78
79        size_t offsetToThunkSavingArea =
80            m_offsetToSavingArea +
81            numberOfCallingConventionRegisters * wordSize;
82
83        m_stackBytesNeeded =
84            offsetToThunkSavingArea +
85            stackBytesNeededForReturnAddress +
86            (m_usedRegisters.numberOfSetRegisters() - numberOfCallingConventionRegisters) * wordSize;
87
88        m_stackBytesNeeded = (m_stackBytesNeeded + stackAlignmentBytes() - 1) & ~(stackAlignmentBytes() - 1);
89
90        m_jit.subPtr(CCallHelpers::TrustedImm32(m_stackBytesNeeded), CCallHelpers::stackPointerRegister);
91
92        m_thunkSaveSet = m_usedRegisters;
93
94        // This relies on all calling convention registers also being temp registers.
95        unsigned stackIndex = 0;
96        for (unsigned i = GPRInfo::numberOfRegisters; i--;) {
97            GPRReg reg = GPRInfo::toRegister(i);
98            if (!m_callingConventionRegisters.get(reg))
99                continue;
100            m_jit.storePtr(reg, CCallHelpers::Address(CCallHelpers::stackPointerRegister, m_offsetToSavingArea + (stackIndex++) * wordSize));
101            m_thunkSaveSet.clear(reg);
102        }
103
104        m_offset = offsetToThunkSavingArea;
105    }
106
107    ~CallContext()
108    {
109        if (m_returnRegister != InvalidGPRReg)
110            m_jit.move(GPRInfo::returnValueGPR, m_returnRegister);
111
112        unsigned stackIndex = 0;
113        for (unsigned i = GPRInfo::numberOfRegisters; i--;) {
114            GPRReg reg = GPRInfo::toRegister(i);
115            if (!m_callingConventionRegisters.get(reg))
116                continue;
117            m_jit.loadPtr(CCallHelpers::Address(CCallHelpers::stackPointerRegister, m_offsetToSavingArea + (stackIndex++) * wordSize), reg);
118        }
119
120        m_jit.addPtr(CCallHelpers::TrustedImm32(m_stackBytesNeeded), CCallHelpers::stackPointerRegister);
121    }
122
123    RegisterSet usedRegisters() const
124    {
125        return m_thunkSaveSet;
126    }
127
128    ptrdiff_t offset() const
129    {
130        return m_offset;
131    }
132
133    SlowPathCallKey keyWithTarget(void* callTarget) const
134    {
135        return SlowPathCallKey(usedRegisters(), callTarget, m_argumentRegisters, offset());
136    }
137
138    MacroAssembler::Call makeCall(void* callTarget, MacroAssembler::JumpList* exceptionTarget)
139    {
140        MacroAssembler::Call result = m_jit.call();
141        m_state.finalizer->slowPathCalls.append(SlowPathCall(
142            result, keyWithTarget(callTarget)));
143        if (exceptionTarget)
144            exceptionTarget->append(m_jit.emitExceptionCheck());
145        return result;
146    }
147
148private:
149    State& m_state;
150    RegisterSet m_usedRegisters;
151    RegisterSet m_argumentRegisters;
152    RegisterSet m_callingConventionRegisters;
153    CCallHelpers& m_jit;
154    unsigned m_numArgs;
155    GPRReg m_returnRegister;
156    size_t m_offsetToSavingArea;
157    size_t m_stackBytesNeeded;
158    RegisterSet m_thunkSaveSet;
159    ptrdiff_t m_offset;
160};
161
162} // anonymous namespace
163
164void storeCodeOrigin(State& state, CCallHelpers& jit, CodeOrigin codeOrigin)
165{
166    if (!codeOrigin.isSet())
167        return;
168
169    unsigned index = state.jitCode->common.addCodeOrigin(codeOrigin);
170    unsigned locationBits = CallFrame::Location::encodeAsCodeOriginIndex(index);
171    jit.store32(
172        CCallHelpers::TrustedImm32(locationBits),
173        CCallHelpers::tagFor(static_cast<VirtualRegister>(JSStack::ArgumentCount)));
174}
175
176MacroAssembler::Call callOperation(
177    State& state, const RegisterSet& usedRegisters, CCallHelpers& jit,
178    CodeOrigin codeOrigin, MacroAssembler::JumpList* exceptionTarget,
179    J_JITOperation_ESsiJI operation, GPRReg result, StructureStubInfo* stubInfo,
180    GPRReg object, StringImpl* uid)
181{
182    storeCodeOrigin(state, jit, codeOrigin);
183    CallContext context(state, usedRegisters, jit, 4, result);
184    jit.setupArgumentsWithExecState(
185        CCallHelpers::TrustedImmPtr(stubInfo), object,
186        CCallHelpers::TrustedImmPtr(uid));
187    return context.makeCall(bitwise_cast<void*>(operation), exceptionTarget);
188}
189
190MacroAssembler::Call callOperation(
191    State& state, const RegisterSet& usedRegisters, CCallHelpers& jit,
192    CodeOrigin codeOrigin, MacroAssembler::JumpList* exceptionTarget,
193    V_JITOperation_ESsiJJI operation, StructureStubInfo* stubInfo, GPRReg value,
194    GPRReg object, StringImpl* uid)
195{
196    storeCodeOrigin(state, jit, codeOrigin);
197    CallContext context(state, usedRegisters, jit, 5, InvalidGPRReg);
198    jit.setupArgumentsWithExecState(
199        CCallHelpers::TrustedImmPtr(stubInfo), value, object,
200        CCallHelpers::TrustedImmPtr(uid));
201    return context.makeCall(bitwise_cast<void*>(operation), exceptionTarget);
202}
203
204} } // namespace JSC::FTL
205
206#endif // ENABLE(FTL_JIT)
207
208