blob: 0804e4dae1e9b88271b5f5849c9b9945d77e941e [file] [log] [blame]
barraclough@apple.come367b002008-12-04 05:43:14 +00001/*
mark.lam@apple.com97d3f452019-01-16 18:44:25 +00002 * Copyright (C) 2008-2019 Apple Inc. All rights reserved.
barraclough@apple.come367b002008-12-04 05:43:14 +00003 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
barraclough@apple.come367b002008-12-04 05:43:14 +000027
28#if ENABLE(JIT)
oliver@apple.com0cc25c32010-10-19 23:55:08 +000029#if USE(JSVALUE64)
ggaren@apple.com7214b5a2010-05-21 20:52:30 +000030#include "JIT.h"
barraclough@apple.come367b002008-12-04 05:43:14 +000031
keith_miller@apple.comd06760d2020-04-18 21:45:34 +000032#include "BytecodeOperandsForCheckpoint.h"
33#include "CacheableIdentifierInlines.h"
msaboff@apple.com4cd77e62015-09-18 23:51:41 +000034#include "CallFrameShuffler.h"
barraclough@apple.come367b002008-12-04 05:43:14 +000035#include "CodeBlock.h"
mark.lam@apple.coma4fe7ab2012-11-09 03:03:44 +000036#include "JITInlines.h"
barraclough@apple.come367b002008-12-04 05:43:14 +000037#include "JSArray.h"
38#include "JSFunction.h"
39#include "Interpreter.h"
fpizlo@apple.comfb7eff22014-02-11 01:45:50 +000040#include "JSCInlines.h"
benjamin@webkit.orgf766fd92014-07-08 04:23:30 +000041#include "LinkBuffer.h"
mark.lam@apple.com8a737122018-09-08 06:29:41 +000042#include "OpcodeInlines.h"
barraclough@apple.come367b002008-12-04 05:43:14 +000043#include "ResultType.h"
fpizlo@apple.com74485fb2015-02-10 03:27:43 +000044#include "SetupVarargsFrame.h"
aperez@igalia.comb1016a12020-04-21 07:48:52 +000045#include "SlowPathCall.h"
msaboff@apple.com95894332014-01-29 19:18:54 +000046#include "StackAlignment.h"
fpizlo@apple.com947ec622013-01-02 08:48:18 +000047#include "ThunkGenerators.h"
barraclough@apple.come367b002008-12-04 05:43:14 +000048
keith_miller@apple.comd06760d2020-04-18 21:45:34 +000049#include <wtf/StringPrintStream.h>
barraclough@apple.come367b002008-12-04 05:43:14 +000050
51namespace JSC {
52
tzagallo@apple.com3474dd02018-10-29 13:16:03 +000053template<typename Op>
54void JIT::emitPutCallResult(const Op& bytecode)
ggaren@apple.com29496602009-05-12 04:20:29 +000055{
tzagallo@apple.com3474dd02018-10-29 13:16:03 +000056 emitValueProfilingSite(bytecode.metadata(m_codeBlock));
keith_miller@apple.comd06760d2020-04-18 21:45:34 +000057 emitPutVirtualRegister(destinationFor(bytecode, m_bytecodeIndex.checkpoint()).virtualRegister(), regT0);
barraclough@apple.comc73ede72010-05-20 20:12:44 +000058}
59
tzagallo@apple.com3474dd02018-10-29 13:16:03 +000060template<typename Op>
61std::enable_if_t<
62 Op::opcodeID != op_call_varargs && Op::opcodeID != op_construct_varargs
63 && Op::opcodeID != op_tail_call_varargs && Op::opcodeID != op_tail_call_forward_arguments
64, void>
65JIT::compileSetupFrame(const Op& bytecode, CallLinkInfo*)
barraclough@apple.comc73ede72010-05-20 20:12:44 +000066{
keith_miller@apple.comd06760d2020-04-18 21:45:34 +000067 unsigned checkpoint = m_bytecodeIndex.checkpoint();
tzagallo@apple.com3474dd02018-10-29 13:16:03 +000068 auto& metadata = bytecode.metadata(m_codeBlock);
keith_miller@apple.comd06760d2020-04-18 21:45:34 +000069 int argCountIncludingThis = argumentCountIncludingThisFor(bytecode, checkpoint);
70 int registerOffset = -static_cast<int>(stackOffsetInRegistersForCall(bytecode, checkpoint));
tzagallo@apple.com3474dd02018-10-29 13:16:03 +000071
72 if (Op::opcodeID == op_call && shouldEmitProfiling()) {
keith_miller@apple.coma1c17ed2020-01-17 04:09:32 +000073 emitGetVirtualRegister(VirtualRegister(registerOffset + CallFrame::argumentOffsetIncludingThis(0)), regT0);
tzagallo@apple.com3474dd02018-10-29 13:16:03 +000074 Jump done = branchIfNotCell(regT0);
75 load32(Address(regT0, JSCell::structureIDOffset()), regT0);
keith_miller@apple.comd06760d2020-04-18 21:45:34 +000076 store32(regT0, arrayProfileFor(metadata, checkpoint).addressOfLastSeenStructureID());
tzagallo@apple.com3474dd02018-10-29 13:16:03 +000077 done.link(this);
78 }
79
80 addPtr(TrustedImm32(registerOffset * sizeof(Register) + sizeof(CallerFrameAndPC)), callFrameRegister, stackPointerRegister);
keith_miller@apple.comd06760d2020-04-18 21:45:34 +000081 store32(TrustedImm32(argCountIncludingThis), Address(stackPointerRegister, CallFrameSlot::argumentCountIncludingThis * static_cast<int>(sizeof(Register)) + PayloadOffset - sizeof(CallerFrameAndPC)));
tzagallo@apple.com3474dd02018-10-29 13:16:03 +000082}
83
84
85template<typename Op>
86std::enable_if_t<
87 Op::opcodeID == op_call_varargs || Op::opcodeID == op_construct_varargs
88 || Op::opcodeID == op_tail_call_varargs || Op::opcodeID == op_tail_call_forward_arguments
89, void>
90JIT::compileSetupFrame(const Op& bytecode, CallLinkInfo* info)
91{
keith_miller@apple.coma1c17ed2020-01-17 04:09:32 +000092 VirtualRegister thisValue = bytecode.m_thisValue;
93 VirtualRegister arguments = bytecode.m_arguments;
94 int firstFreeRegister = bytecode.m_firstFree.offset(); // FIXME: Why is this a virtual register if we never use it as one...
mark.lam@apple.com97d3f452019-01-16 18:44:25 +000095 int firstVarArgOffset = bytecode.m_firstVarArg;
ggaren@apple.com29496602009-05-12 04:20:29 +000096
mark.lam@apple.com03944a02013-10-14 16:42:22 +000097 emitGetVirtualRegister(arguments, regT1);
ysuzuki@apple.com52e98bb2019-10-22 09:24:48 +000098 Z_JITOperation_GJZZ sizeOperation;
tzagallo@apple.com3474dd02018-10-29 13:16:03 +000099 if (Op::opcodeID == op_tail_call_forward_arguments)
keith_miller@apple.come497e202016-06-13 21:05:36 +0000100 sizeOperation = operationSizeFrameForForwardArguments;
101 else
102 sizeOperation = operationSizeFrameForVarargs;
ysuzuki@apple.com52e98bb2019-10-22 09:24:48 +0000103 callOperation(sizeOperation, TrustedImmPtr(m_codeBlock->globalObject()), regT1, -firstFreeRegister, firstVarArgOffset);
fpizlo@apple.combcfd39e2015-02-10 23:16:36 +0000104 move(TrustedImm32(-firstFreeRegister), regT1);
105 emitSetVarargsFrame(*this, returnValueGPR, false, regT1, regT1);
106 addPtr(TrustedImm32(-(sizeof(CallerFrameAndPC) + WTF::roundUpToMultipleOf(stackAlignmentBytes(), 5 * sizeof(void*)))), regT1, stackPointerRegister);
107 emitGetVirtualRegister(arguments, regT2);
ysuzuki@apple.com52e98bb2019-10-22 09:24:48 +0000108 F_JITOperation_GFJZZ setupOperation;
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000109 if (Op::opcodeID == op_tail_call_forward_arguments)
keith_miller@apple.come497e202016-06-13 21:05:36 +0000110 setupOperation = operationSetupForwardArgumentsFrame;
111 else
112 setupOperation = operationSetupVarargsFrame;
ysuzuki@apple.com52e98bb2019-10-22 09:24:48 +0000113 callOperation(setupOperation, TrustedImmPtr(m_codeBlock->globalObject()), regT1, regT2, firstVarArgOffset, regT0);
mark.lam@apple.comab6b5eb2013-11-08 04:05:27 +0000114 move(returnValueGPR, regT1);
ggaren@apple.com539d1bb2011-11-14 19:21:40 +0000115
fpizlo@apple.com8fefdd32015-02-18 19:55:47 +0000116 // Profile the argument count.
ysuzuki@apple.com88da11f2019-12-22 03:12:00 +0000117 load32(Address(regT1, CallFrameSlot::argumentCountIncludingThis * static_cast<int>(sizeof(Register)) + PayloadOffset), regT2);
ysuzuki@apple.comd3c7bf22019-09-20 02:31:45 +0000118 load32(info->addressOfMaxArgumentCountIncludingThis(), regT0);
fpizlo@apple.com8fefdd32015-02-18 19:55:47 +0000119 Jump notBiggest = branch32(Above, regT0, regT2);
ysuzuki@apple.comd3c7bf22019-09-20 02:31:45 +0000120 store32(regT2, info->addressOfMaxArgumentCountIncludingThis());
fpizlo@apple.com8fefdd32015-02-18 19:55:47 +0000121 notBiggest.link(this);
122
fpizlo@apple.com74485fb2015-02-10 03:27:43 +0000123 // Initialize 'this'.
124 emitGetVirtualRegister(thisValue, regT0);
125 store64(regT0, Address(regT1, CallFrame::thisArgumentOffset() * static_cast<int>(sizeof(Register))));
126
msaboff@apple.com95894332014-01-29 19:18:54 +0000127 addPtr(TrustedImm32(sizeof(CallerFrameAndPC)), regT1, stackPointerRegister);
ggaren@apple.com539d1bb2011-11-14 19:21:40 +0000128}
129
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000130template<typename Op>
131bool JIT::compileCallEval(const Op&)
132{
133 return false;
134}
135
136template<>
137bool JIT::compileCallEval(const OpCallEval& bytecode)
ggaren@apple.com539d1bb2011-11-14 19:21:40 +0000138{
msaboff@apple.com95894332014-01-29 19:18:54 +0000139 addPtr(TrustedImm32(-static_cast<ptrdiff_t>(sizeof(CallerFrameAndPC))), stackPointerRegister, regT1);
msaboff@apple.com2906e8b2014-09-04 21:23:38 +0000140 storePtr(callFrameRegister, Address(regT1, CallFrame::callerFrameOffset()));
msaboff@apple.com95894332014-01-29 19:18:54 +0000141
142 addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister);
143 checkStackPointerAlignment();
ggaren@apple.com539d1bb2011-11-14 19:21:40 +0000144
tzagallo@apple.coma6b8c342020-04-07 22:32:21 +0000145 move(TrustedImm32(bytecode.m_ecmaMode.value()), regT2);
146 callOperation(operationCallEval, m_codeBlock->globalObject(), regT1, regT2);
msaboff@apple.com2906e8b2014-09-04 21:23:38 +0000147
utatane.tea@gmail.comc940ae52018-05-17 16:18:24 +0000148 addSlowCase(branchIfEmpty(regT0));
msaboff@apple.com2906e8b2014-09-04 21:23:38 +0000149
ggaren@apple.com539d1bb2011-11-14 19:21:40 +0000150 sampleCodeBlock(m_codeBlock);
oliver@apple.comcf0e6c42013-07-25 04:01:45 +0000151
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000152 emitPutCallResult(bytecode);
153
154 return true;
ggaren@apple.com539d1bb2011-11-14 19:21:40 +0000155}
156
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000157void JIT::compileCallEvalSlowCase(const Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter)
ggaren@apple.com539d1bb2011-11-14 19:21:40 +0000158{
utatane.tea@gmail.com96c8e182017-10-22 18:28:16 +0000159 linkAllSlowCases(iter);
160
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000161 auto bytecode = instruction->as<OpCallEval>();
basile_clement@apple.com7d9a7092015-07-28 20:12:33 +0000162 CallLinkInfo* info = m_codeBlock->addCallLinkInfo();
keith_miller@apple.com0f985ec2019-10-23 00:55:38 +0000163 info->setUpCall(CallLinkInfo::Call, CodeOrigin(m_bytecodeIndex), regT0);
basile_clement@apple.com7d9a7092015-07-28 20:12:33 +0000164
mark.lam@apple.com97d3f452019-01-16 18:44:25 +0000165 int registerOffset = -bytecode.m_argv;
msaboff@apple.com2906e8b2014-09-04 21:23:38 +0000166
167 addPtr(TrustedImm32(registerOffset * sizeof(Register) + sizeof(CallerFrameAndPC)), callFrameRegister, stackPointerRegister);
ggaren@apple.com539d1bb2011-11-14 19:21:40 +0000168
mark.lam@apple.com2bd89342016-07-08 22:58:15 +0000169 load64(Address(stackPointerRegister, sizeof(Register) * CallFrameSlot::callee - sizeof(CallerFrameAndPC)), regT0);
ysuzuki@apple.com52e98bb2019-10-22 09:24:48 +0000170 emitDumbVirtualCall(vm(), m_codeBlock->globalObject(), info);
msaboff@apple.com95894332014-01-29 19:18:54 +0000171 addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister);
172 checkStackPointerAlignment();
ggaren@apple.com29496602009-05-12 04:20:29 +0000173
ggaren@apple.com29496602009-05-12 04:20:29 +0000174 sampleCodeBlock(m_codeBlock);
oliver@apple.comcf0e6c42013-07-25 04:01:45 +0000175
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000176 emitPutCallResult(bytecode);
ggaren@apple.com29496602009-05-12 04:20:29 +0000177}
178
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000179template<typename Op>
180bool JIT::compileTailCall(const Op&, CallLinkInfo*, unsigned)
barraclough@apple.come367b002008-12-04 05:43:14 +0000181{
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000182 return false;
183}
184
185template<>
186bool JIT::compileTailCall(const OpTailCall& bytecode, CallLinkInfo* info, unsigned callLinkInfoIndex)
187{
188 CallFrameShuffleData shuffleData;
mark.lam@apple.com97d3f452019-01-16 18:44:25 +0000189 shuffleData.numPassedArgs = bytecode.m_argc;
mark.lam@apple.com9ed91502019-09-19 16:14:47 +0000190 shuffleData.numberTagRegister = GPRInfo::numberTagRegister;
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000191 shuffleData.numLocals =
mark.lam@apple.com97d3f452019-01-16 18:44:25 +0000192 bytecode.m_argv - sizeof(CallerFrameAndPC) / sizeof(Register);
193 shuffleData.args.resize(bytecode.m_argc);
194 for (unsigned i = 0; i < bytecode.m_argc; ++i) {
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000195 shuffleData.args[i] =
196 ValueRecovery::displacedInJSStack(
keith_miller@apple.coma1c17ed2020-01-17 04:09:32 +0000197 virtualRegisterForArgumentIncludingThis(i) - bytecode.m_argv,
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000198 DataFormatJS);
199 }
200 shuffleData.callee =
201 ValueRecovery::inGPR(regT0, DataFormatJS);
202 shuffleData.setupCalleeSaveRegisters(m_codeBlock);
203 info->setFrameShuffleData(shuffleData);
204 CallFrameShuffler(*this, shuffleData).prepareForTailCall();
205 m_callCompilationInfo[callLinkInfoIndex].hotPathOther = emitNakedTailCall();
206 return true;
207}
208
209template<typename Op>
210void JIT::compileOpCall(const Instruction* instruction, unsigned callLinkInfoIndex)
211{
212 OpcodeID opcodeID = Op::opcodeID;
213 auto bytecode = instruction->as<Op>();
keith_miller@apple.comd06760d2020-04-18 21:45:34 +0000214 VirtualRegister callee = calleeFor(bytecode, m_bytecodeIndex.checkpoint());
barraclough@apple.come367b002008-12-04 05:43:14 +0000215
ggaren@apple.com539d1bb2011-11-14 19:21:40 +0000216 /* Caller always:
217 - Updates callFrameRegister to callee callFrame.
218 - Initializes ArgumentCount; CallerFrame; Callee.
219
220 For a JS call:
ggaren@apple.com539d1bb2011-11-14 19:21:40 +0000221 - Callee initializes ReturnPC; CodeBlock.
222 - Callee restores callFrameRegister before return.
223
224 For a non-JS call:
msaboff@apple.com217b2c72015-01-21 21:43:55 +0000225 - Caller initializes ReturnPC; CodeBlock.
ggaren@apple.com539d1bb2011-11-14 19:21:40 +0000226 - Caller restores callFrameRegister after return.
227 */
pvollan@apple.com415da952016-09-08 09:09:59 +0000228 CallLinkInfo* info = nullptr;
basile_clement@apple.com7d9a7092015-07-28 20:12:33 +0000229 if (opcodeID != op_call_eval)
230 info = m_codeBlock->addCallLinkInfo();
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000231 compileSetupFrame(bytecode, info);
ggaren@apple.com539d1bb2011-11-14 19:21:40 +0000232
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000233 // SP holds newCallFrame + sizeof(CallerFrameAndPC), with ArgumentCount initialized.
keith_miller@apple.com0f985ec2019-10-23 00:55:38 +0000234 auto bytecodeIndex = m_codeBlock->bytecodeIndex(instruction);
235 uint32_t locationBits = CallSiteIndex(bytecodeIndex).bits();
ysuzuki@apple.com88da11f2019-12-22 03:12:00 +0000236 store32(TrustedImm32(locationBits), Address(callFrameRegister, CallFrameSlot::argumentCountIncludingThis * static_cast<int>(sizeof(Register)) + TagOffset));
ggaren@apple.com539d1bb2011-11-14 19:21:40 +0000237
commit-queue@webkit.org2cb9c252016-12-13 19:38:13 +0000238 emitGetVirtualRegister(callee, regT0); // regT0 holds callee.
239 store64(regT0, Address(stackPointerRegister, CallFrameSlot::callee * static_cast<int>(sizeof(Register)) - sizeof(CallerFrameAndPC)));
msaboff@apple.comc15ae7e2015-09-16 23:40:35 +0000240
keith_miller@apple.comd06760d2020-04-18 21:45:34 +0000241 if (compileCallEval(bytecode))
ggaren@apple.com539d1bb2011-11-14 19:21:40 +0000242 return;
barraclough@apple.come367b002008-12-04 05:43:14 +0000243
barraclough@apple.com289318a2008-12-22 01:00:07 +0000244 DataLabelPtr addressOfLinkedFunctionCheck;
keith_miller@apple.com308f2ba2018-02-28 05:41:18 +0000245 Jump slowCase = branchPtrWithPatch(NotEqual, regT0, addressOfLinkedFunctionCheck, TrustedImmPtr(nullptr));
ggaren@apple.com539d1bb2011-11-14 19:21:40 +0000246 addSlowCase(slowCase);
barraclough@apple.com970af2c2009-08-13 05:58:36 +0000247
fpizlo@apple.comba262b22014-03-23 04:34:38 +0000248 ASSERT(m_callCompilationInfo.size() == callLinkInfoIndex);
keith_miller@apple.com0f985ec2019-10-23 00:55:38 +0000249 info->setUpCall(CallLinkInfo::callTypeFor(opcodeID), CodeOrigin(m_bytecodeIndex), regT0);
fpizlo@apple.comba262b22014-03-23 04:34:38 +0000250 m_callCompilationInfo.append(CallCompilationInfo());
251 m_callCompilationInfo[callLinkInfoIndex].hotPathBegin = addressOfLinkedFunctionCheck;
252 m_callCompilationInfo[callLinkInfoIndex].callLinkInfo = info;
barraclough@apple.come367b002008-12-04 05:43:14 +0000253
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000254 if (compileTailCall(bytecode, info, callLinkInfoIndex)) {
msaboff@apple.com4cd77e62015-09-18 23:51:41 +0000255 return;
256 }
257
keith_miller@apple.come497e202016-06-13 21:05:36 +0000258 if (opcodeID == op_tail_call_varargs || opcodeID == op_tail_call_forward_arguments) {
msaboff@apple.com4cd77e62015-09-18 23:51:41 +0000259 emitRestoreCalleeSaves();
msaboff@apple.comc15ae7e2015-09-16 23:40:35 +0000260 prepareForTailCallSlow();
261 m_callCompilationInfo[callLinkInfoIndex].hotPathOther = emitNakedTailCall();
262 return;
263 }
264
fpizlo@apple.comba262b22014-03-23 04:34:38 +0000265 m_callCompilationInfo[callLinkInfoIndex].hotPathOther = emitNakedCall();
barraclough@apple.come367b002008-12-04 05:43:14 +0000266
msaboff@apple.com95894332014-01-29 19:18:54 +0000267 addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister);
268 checkStackPointerAlignment();
269
barraclough@apple.com24a7abb2009-01-16 23:34:46 +0000270 sampleCodeBlock(m_codeBlock);
oliver@apple.comcf0e6c42013-07-25 04:01:45 +0000271
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000272 emitPutCallResult(bytecode);
barraclough@apple.come367b002008-12-04 05:43:14 +0000273}
274
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000275template<typename Op>
276void JIT::compileOpCallSlowCase(const Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter, unsigned callLinkInfoIndex)
barraclough@apple.come367b002008-12-04 05:43:14 +0000277{
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000278 OpcodeID opcodeID = Op::opcodeID;
279 ASSERT(opcodeID != op_call_eval);
barraclough@apple.come367b002008-12-04 05:43:14 +0000280
utatane.tea@gmail.com96c8e182017-10-22 18:28:16 +0000281 linkAllSlowCases(iter);
msaboff@apple.com2b95ada2013-10-09 13:29:00 +0000282
keith_miller@apple.come497e202016-06-13 21:05:36 +0000283 if (opcodeID == op_tail_call || opcodeID == op_tail_call_varargs || opcodeID == op_tail_call_forward_arguments)
msaboff@apple.com4cd77e62015-09-18 23:51:41 +0000284 emitRestoreCalleeSaves();
285
ysuzuki@apple.com52e98bb2019-10-22 09:24:48 +0000286 move(TrustedImmPtr(m_codeBlock->globalObject()), regT3);
commit-queue@webkit.org2cb9c252016-12-13 19:38:13 +0000287 move(TrustedImmPtr(m_callCompilationInfo[callLinkInfoIndex].callLinkInfo), regT2);
msaboff@apple.comc15ae7e2015-09-16 23:40:35 +0000288
mark.lam@apple.combbf313e2018-04-05 04:30:17 +0000289 m_callCompilationInfo[callLinkInfoIndex].callReturnLocation =
mark.lam@apple.comde0dba72018-04-18 03:31:09 +0000290 emitNakedCall(m_vm->getCTIStub(linkCallThunkGenerator).retaggedCode<NoPtrTag>());
msaboff@apple.com95894332014-01-29 19:18:54 +0000291
msaboff@apple.comc15ae7e2015-09-16 23:40:35 +0000292 if (opcodeID == op_tail_call || opcodeID == op_tail_call_varargs) {
293 abortWithReason(JITDidReturnFromTailCall);
294 return;
295 }
296
msaboff@apple.com95894332014-01-29 19:18:54 +0000297 addPtr(TrustedImm32(stackPointerOffsetFor(m_codeBlock) * sizeof(Register)), callFrameRegister, stackPointerRegister);
298 checkStackPointerAlignment();
barraclough@apple.come367b002008-12-04 05:43:14 +0000299
barraclough@apple.com24a7abb2009-01-16 23:34:46 +0000300 sampleCodeBlock(m_codeBlock);
oliver@apple.comcf0e6c42013-07-25 04:01:45 +0000301
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000302 auto bytecode = instruction->as<Op>();
303 emitPutCallResult(bytecode);
barraclough@apple.come367b002008-12-04 05:43:14 +0000304}
305
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000306void JIT::emit_op_call(const Instruction* currentInstruction)
oliver@apple.comcf0e6c42013-07-25 04:01:45 +0000307{
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000308 compileOpCall<OpCall>(currentInstruction, m_callLinkInfoIndex++);
oliver@apple.comcf0e6c42013-07-25 04:01:45 +0000309}
310
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000311void JIT::emit_op_tail_call(const Instruction* currentInstruction)
msaboff@apple.comc15ae7e2015-09-16 23:40:35 +0000312{
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000313 compileOpCall<OpTailCall>(currentInstruction, m_callLinkInfoIndex++);
msaboff@apple.comc15ae7e2015-09-16 23:40:35 +0000314}
315
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000316void JIT::emit_op_call_eval(const Instruction* currentInstruction)
oliver@apple.comcf0e6c42013-07-25 04:01:45 +0000317{
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000318 compileOpCall<OpCallEval>(currentInstruction, m_callLinkInfoIndex);
oliver@apple.comcf0e6c42013-07-25 04:01:45 +0000319}
320
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000321void JIT::emit_op_call_varargs(const Instruction* currentInstruction)
oliver@apple.comcf0e6c42013-07-25 04:01:45 +0000322{
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000323 compileOpCall<OpCallVarargs>(currentInstruction, m_callLinkInfoIndex++);
oliver@apple.comcf0e6c42013-07-25 04:01:45 +0000324}
msaboff@apple.comc15ae7e2015-09-16 23:40:35 +0000325
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000326void JIT::emit_op_tail_call_varargs(const Instruction* currentInstruction)
msaboff@apple.comc15ae7e2015-09-16 23:40:35 +0000327{
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000328 compileOpCall<OpTailCallVarargs>(currentInstruction, m_callLinkInfoIndex++);
msaboff@apple.comc15ae7e2015-09-16 23:40:35 +0000329}
330
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000331void JIT::emit_op_tail_call_forward_arguments(const Instruction* currentInstruction)
keith_miller@apple.come497e202016-06-13 21:05:36 +0000332{
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000333 compileOpCall<OpTailCallForwardArguments>(currentInstruction, m_callLinkInfoIndex++);
keith_miller@apple.come497e202016-06-13 21:05:36 +0000334}
335
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000336void JIT::emit_op_construct_varargs(const Instruction* currentInstruction)
oliver@apple.com177c2b92014-03-28 01:10:25 +0000337{
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000338 compileOpCall<OpConstructVarargs>(currentInstruction, m_callLinkInfoIndex++);
oliver@apple.com177c2b92014-03-28 01:10:25 +0000339}
oliver@apple.comcf0e6c42013-07-25 04:01:45 +0000340
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000341void JIT::emit_op_construct(const Instruction* currentInstruction)
oliver@apple.comcf0e6c42013-07-25 04:01:45 +0000342{
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000343 compileOpCall<OpConstruct>(currentInstruction, m_callLinkInfoIndex++);
oliver@apple.comcf0e6c42013-07-25 04:01:45 +0000344}
345
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000346void JIT::emitSlow_op_call(const Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
oliver@apple.comcf0e6c42013-07-25 04:01:45 +0000347{
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000348 compileOpCallSlowCase<OpCall>(currentInstruction, iter, m_callLinkInfoIndex++);
oliver@apple.comcf0e6c42013-07-25 04:01:45 +0000349}
350
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000351void JIT::emitSlow_op_tail_call(const Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
msaboff@apple.comc15ae7e2015-09-16 23:40:35 +0000352{
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000353 compileOpCallSlowCase<OpTailCall>(currentInstruction, iter, m_callLinkInfoIndex++);
msaboff@apple.comc15ae7e2015-09-16 23:40:35 +0000354}
355
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000356void JIT::emitSlow_op_call_eval(const Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
oliver@apple.comcf0e6c42013-07-25 04:01:45 +0000357{
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000358 compileCallEvalSlowCase(currentInstruction, iter);
oliver@apple.comcf0e6c42013-07-25 04:01:45 +0000359}
360
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000361void JIT::emitSlow_op_call_varargs(const Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
oliver@apple.comcf0e6c42013-07-25 04:01:45 +0000362{
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000363 compileOpCallSlowCase<OpCallVarargs>(currentInstruction, iter, m_callLinkInfoIndex++);
oliver@apple.comcf0e6c42013-07-25 04:01:45 +0000364}
msaboff@apple.comc15ae7e2015-09-16 23:40:35 +0000365
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000366void JIT::emitSlow_op_tail_call_varargs(const Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
msaboff@apple.comc15ae7e2015-09-16 23:40:35 +0000367{
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000368 compileOpCallSlowCase<OpTailCallVarargs>(currentInstruction, iter, m_callLinkInfoIndex++);
msaboff@apple.comc15ae7e2015-09-16 23:40:35 +0000369}
keith_miller@apple.come497e202016-06-13 21:05:36 +0000370
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000371void JIT::emitSlow_op_tail_call_forward_arguments(const Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
keith_miller@apple.come497e202016-06-13 21:05:36 +0000372{
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000373 compileOpCallSlowCase<OpTailCallForwardArguments>(currentInstruction, iter, m_callLinkInfoIndex++);
keith_miller@apple.come497e202016-06-13 21:05:36 +0000374}
oliver@apple.com177c2b92014-03-28 01:10:25 +0000375
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000376void JIT::emitSlow_op_construct_varargs(const Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
oliver@apple.com177c2b92014-03-28 01:10:25 +0000377{
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000378 compileOpCallSlowCase<OpConstructVarargs>(currentInstruction, iter, m_callLinkInfoIndex++);
oliver@apple.com177c2b92014-03-28 01:10:25 +0000379}
380
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000381void JIT::emitSlow_op_construct(const Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
oliver@apple.comcf0e6c42013-07-25 04:01:45 +0000382{
tzagallo@apple.com3474dd02018-10-29 13:16:03 +0000383 compileOpCallSlowCase<OpConstruct>(currentInstruction, iter, m_callLinkInfoIndex++);
oliver@apple.comcf0e6c42013-07-25 04:01:45 +0000384}
385
keith_miller@apple.comd06760d2020-04-18 21:45:34 +0000386void JIT::emit_op_iterator_open(const Instruction* instruction)
387{
388 auto bytecode = instruction->as<OpIteratorOpen>();
keith_miller@apple.comd06760d2020-04-18 21:45:34 +0000389 auto* tryFastFunction = ([&] () {
390 switch (instruction->width()) {
391 case Narrow: return iterator_open_try_fast_narrow;
392 case Wide16: return iterator_open_try_fast_wide16;
393 case Wide32: return iterator_open_try_fast_wide32;
394 default: RELEASE_ASSERT_NOT_REACHED();
395 }
396 })();
ysuzuki@apple.comdb921642020-04-20 01:56:24 +0000397
398 JITSlowPathCall slowPathCall(this, instruction, tryFastFunction);
399 slowPathCall.call();
keith_miller@apple.comd06760d2020-04-18 21:45:34 +0000400 Jump fastCase = branch32(NotEqual, GPRInfo::returnValueGPR2, TrustedImm32(static_cast<uint32_t>(IterationMode::Generic)));
401
402 compileOpCall<OpIteratorOpen>(instruction, m_callLinkInfoIndex++);
403 advanceToNextCheckpoint();
404 // call result (iterator) is in regT0
405
406 const Identifier* ident = &vm().propertyNames->next;
407
408 emitJumpSlowCaseIfNotJSCell(regT0);
409
410 JITGetByIdGenerator gen(
411 m_codeBlock, CodeOrigin(m_bytecodeIndex), CallSiteIndex(BytecodeIndex(m_bytecodeIndex.offset())), RegisterSet::stubUnavailableRegisters(),
412 CacheableIdentifier::createFromImmortalIdentifier(ident->impl()), JSValueRegs(regT0), JSValueRegs(regT0), AccessType::GetById);
413 gen.generateFastPath(*this);
414 addSlowCase(gen.slowPathJump());
415 m_getByIds.append(gen);
416
417 emitValueProfilingSite(bytecode.metadata(m_codeBlock));
418 emitPutVirtualRegister(bytecode.m_next);
419
420 fastCase.link(this);
421}
422
423void JIT::emitSlow_op_iterator_open(const Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter)
424{
425 linkAllSlowCases(iter);
426 compileOpCallSlowCase<OpIteratorOpen>(instruction, iter, m_callLinkInfoIndex++);
427 emitJumpSlowToHotForCheckpoint(jump());
428
429
430 linkAllSlowCases(iter);
keith_miller@apple.coma9043a62020-04-23 18:56:51 +0000431
432 GPRReg iteratorGPR = regT0;
433 JumpList notObject;
434 notObject.append(branchIfNotCell(iteratorGPR));
435 notObject.append(branchIfNotObject(iteratorGPR));
436
keith_miller@apple.comd06760d2020-04-18 21:45:34 +0000437 auto bytecode = instruction->as<OpIteratorOpen>();
438 VirtualRegister nextVReg = bytecode.m_next;
439 UniquedStringImpl* ident = vm().propertyNames->next.impl();
440
441 JITGetByIdGenerator& gen = m_getByIds[m_getByIdIndex++];
442
443 Label coldPathBegin = label();
444
keith_miller@apple.coma9043a62020-04-23 18:56:51 +0000445 Call call = callOperationWithProfile(bytecode.metadata(m_codeBlock), operationGetByIdOptimize, nextVReg, TrustedImmPtr(m_codeBlock->globalObject()), gen.stubInfo(), iteratorGPR, CacheableIdentifier::createFromImmortalIdentifier(ident).rawBits());
keith_miller@apple.comd06760d2020-04-18 21:45:34 +0000446 gen.reportSlowPathCall(coldPathBegin, call);
keith_miller@apple.coma9043a62020-04-23 18:56:51 +0000447 auto done = jump();
448
449 notObject.link(this);
450 callOperation(operationThrowIteratorResultIsNotObject, TrustedImmPtr(m_codeBlock->globalObject()));
451
452 done.link(this);
keith_miller@apple.comd06760d2020-04-18 21:45:34 +0000453}
454
455void JIT::emit_op_iterator_next(const Instruction* instruction)
456{
457 auto bytecode = instruction->as<OpIteratorNext>();
458 auto& metadata = bytecode.metadata(m_codeBlock);
459 auto* tryFastFunction = ([&] () {
460 switch (instruction->width()) {
461 case Narrow: return iterator_next_try_fast_narrow;
462 case Wide16: return iterator_next_try_fast_wide16;
463 case Wide32: return iterator_next_try_fast_wide32;
464 default: RELEASE_ASSERT_NOT_REACHED();
465 }
466 })();
467
468 emitGetVirtualRegister(bytecode.m_next, regT0);
469 Jump genericCase = branchIfNotEmpty(regT0);
ysuzuki@apple.comdb921642020-04-20 01:56:24 +0000470
471 JITSlowPathCall slowPathCall(this, instruction, tryFastFunction);
472 slowPathCall.call();
keith_miller@apple.comd06760d2020-04-18 21:45:34 +0000473 Jump fastCase = branch32(NotEqual, GPRInfo::returnValueGPR2, TrustedImm32(static_cast<uint32_t>(IterationMode::Generic)));
474
475 genericCase.link(this);
476 or8(TrustedImm32(static_cast<uint8_t>(IterationMode::Generic)), AbsoluteAddress(&metadata.m_iterationMetadata.seenModes));
477 compileOpCall<OpIteratorNext>(instruction, m_callLinkInfoIndex++);
478 advanceToNextCheckpoint();
479 // call result ({ done, value } JSObject) in regT0
480
481 GPRReg valueGPR = regT0;
482 GPRReg iterResultGPR = regT2;
483 GPRReg doneGPR = regT1;
484 // iterResultGPR will get trashed by the first get by id below.
485 move(valueGPR, iterResultGPR);
486
487 {
488 emitJumpSlowCaseIfNotJSCell(iterResultGPR);
489
490 RegisterSet preservedRegs = RegisterSet::stubUnavailableRegisters();
491 preservedRegs.add(valueGPR);
492 JITGetByIdGenerator gen(
493 m_codeBlock, CodeOrigin(m_bytecodeIndex), CallSiteIndex(BytecodeIndex(m_bytecodeIndex.offset())), preservedRegs,
494 CacheableIdentifier::createFromImmortalIdentifier(vm().propertyNames->next.impl()), JSValueRegs(iterResultGPR), JSValueRegs(doneGPR), AccessType::GetById);
495 gen.generateFastPath(*this);
496 addSlowCase(gen.slowPathJump());
497 m_getByIds.append(gen);
498
499 emitValueProfilingSite(metadata);
500 emitPutVirtualRegister(bytecode.m_done, doneGPR);
501 advanceToNextCheckpoint();
502 }
503
504
505 {
506 GPRReg scratch1 = regT2;
507 GPRReg scratch2 = regT3;
508 const bool shouldCheckMasqueradesAsUndefined = false;
509 JumpList iterationDone = branchIfTruthy(vm(), JSValueRegs(doneGPR), scratch1, scratch2, fpRegT0, fpRegT1, shouldCheckMasqueradesAsUndefined, m_codeBlock->globalObject());
510
511 JITGetByIdGenerator gen(
512 m_codeBlock, CodeOrigin(m_bytecodeIndex), CallSiteIndex(BytecodeIndex(m_bytecodeIndex.offset())), RegisterSet::stubUnavailableRegisters(),
513 CacheableIdentifier::createFromImmortalIdentifier(vm().propertyNames->value.impl()), JSValueRegs(valueGPR), JSValueRegs(valueGPR), AccessType::GetById);
514 gen.generateFastPath(*this);
515 addSlowCase(gen.slowPathJump());
516 m_getByIds.append(gen);
517
518 emitValueProfilingSite(metadata);
519 emitPutVirtualRegister(bytecode.m_value, valueGPR);
520
521 iterationDone.link(this);
522 }
523
524 fastCase.link(this);
525}
526
527void JIT::emitSlow_op_iterator_next(const Instruction* instruction, Vector<SlowCaseEntry>::iterator& iter)
528{
529 linkAllSlowCases(iter);
530 compileOpCallSlowCase<OpIteratorNext>(instruction, iter, m_callLinkInfoIndex++);
531 emitJumpSlowToHotForCheckpoint(jump());
532
533 auto bytecode = instruction->as<OpIteratorNext>();
534 {
535 VirtualRegister doneVReg = bytecode.m_done;
536 GPRReg iterResultGPR = regT2;
537
538 linkAllSlowCases(iter);
539 JumpList notObject;
540 notObject.append(branchIfNotCell(iterResultGPR));
541 notObject.append(branchIfNotObject(iterResultGPR));
542
543 UniquedStringImpl* ident = vm().propertyNames->done.impl();
544 JITGetByIdGenerator& gen = m_getByIds[m_getByIdIndex++];
545
546 Label coldPathBegin = label();
547
548 Call call = callOperationWithProfile(bytecode.metadata(m_codeBlock), operationGetByIdOptimize, doneVReg, TrustedImmPtr(m_codeBlock->globalObject()), gen.stubInfo(), iterResultGPR, CacheableIdentifier::createFromImmortalIdentifier(ident).rawBits());
549
550 gen.reportSlowPathCall(coldPathBegin, call);
551 emitGetVirtualRegister(doneVReg, regT1);
552 emitGetVirtualRegister(bytecode.m_value, regT0);
553 emitJumpSlowToHotForCheckpoint(jump());
554
555 notObject.link(this);
556 callOperation(operationThrowIteratorResultIsNotObject, TrustedImmPtr(m_codeBlock->globalObject()));
557 }
558
559 {
560 linkAllSlowCases(iter);
561 VirtualRegister valueVReg = bytecode.m_value;
562 GPRReg iterResultGPR = regT0;
563
564 UniquedStringImpl* ident = vm().propertyNames->value.impl();
565 JITGetByIdGenerator& gen = m_getByIds[m_getByIdIndex++];
566
567 Label coldPathBegin = label();
568
569 Call call = callOperationWithProfile(bytecode.metadata(m_codeBlock), operationGetByIdOptimize, valueVReg, TrustedImmPtr(m_codeBlock->globalObject()), gen.stubInfo(), iterResultGPR, CacheableIdentifier::createFromImmortalIdentifier(ident).rawBits());
570
571 gen.reportSlowPathCall(coldPathBegin, call);
572 }
573
574}
575
barraclough@apple.come367b002008-12-04 05:43:14 +0000576} // namespace JSC
577
oliver@apple.com0cc25c32010-10-19 23:55:08 +0000578#endif // USE(JSVALUE64)
barraclough@apple.come367b002008-12-04 05:43:14 +0000579#endif // ENABLE(JIT)