blob: 375eab1421e22ecda033329ce64626b677c8f589 [file] [log] [blame]
barraclough@apple.come367b002008-12-04 05:43:14 +00001/*
2 * Copyright (C) 2008 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#ifndef JITInlineMethods_h
27#define JITInlineMethods_h
28
barraclough@apple.come367b002008-12-04 05:43:14 +000029
30#if ENABLE(JIT)
31
barraclough@apple.come367b002008-12-04 05:43:14 +000032namespace JSC {
33
ggaren@apple.com540d71a62009-07-30 20:57:44 +000034/* Deprecated: Please use JITStubCall instead. */
barraclough@apple.come367b002008-12-04 05:43:14 +000035
barraclough@apple.com732c7bf2008-12-13 05:25:22 +000036ALWAYS_INLINE void JIT::emitGetJITStubArg(unsigned argumentNumber, RegisterID dst)
barraclough@apple.come367b002008-12-04 05:43:14 +000037{
barraclough@apple.comcaf8b932009-11-06 08:47:47 +000038 unsigned argumentStackOffset = (argumentNumber * (sizeof(JSValue) / sizeof(void*))) + JITSTACKFRAME_ARGS_INDEX;
barraclough@apple.comde296272009-08-21 02:57:10 +000039 peek(dst, argumentStackOffset);
barraclough@apple.come367b002008-12-04 05:43:14 +000040}
41
oliver@apple.com8d181632009-09-25 02:40:59 +000042ALWAYS_INLINE bool JIT::isOperandConstantImmediateDouble(unsigned src)
43{
44 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isDouble();
45}
46
ggaren@apple.comdc067b62009-05-01 22:43:39 +000047ALWAYS_INLINE JSValue JIT::getConstantOperand(unsigned src)
barraclough@apple.combe1ce032009-01-02 03:06:10 +000048{
49 ASSERT(m_codeBlock->isConstantRegisterIndex(src));
50 return m_codeBlock->getConstant(src);
51}
52
barraclough@apple.come367b002008-12-04 05:43:14 +000053ALWAYS_INLINE void JIT::emitPutToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry entry)
54{
oliver@apple.com9d4f0ec2011-03-14 18:16:36 +000055 storePtr(from, payloadFor(entry, callFrameRegister));
56}
57
58ALWAYS_INLINE void JIT::emitPutCellToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry entry)
59{
60#if USE(JSVALUE32_64)
oliver@apple.combe4e0672011-03-28 17:14:57 +000061 store32(TrustedImm32(JSValue::CellTag), tagFor(entry, callFrameRegister));
oliver@apple.com9d4f0ec2011-03-14 18:16:36 +000062#endif
63 storePtr(from, payloadFor(entry, callFrameRegister));
64}
65
66ALWAYS_INLINE void JIT::emitPutIntToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry entry)
67{
oliver@apple.combe4e0672011-03-28 17:14:57 +000068 store32(TrustedImm32(Int32Tag), intTagFor(entry, callFrameRegister));
oliver@apple.com9d4f0ec2011-03-14 18:16:36 +000069 store32(from, intPayloadFor(entry, callFrameRegister));
barraclough@apple.com80924152008-12-05 06:58:40 +000070}
71
72ALWAYS_INLINE void JIT::emitPutImmediateToCallFrameHeader(void* value, RegisterFile::CallFrameHeaderEntry entry)
73{
oliver@apple.combe4e0672011-03-28 17:14:57 +000074 storePtr(TrustedImmPtr(value), Address(callFrameRegister, entry * sizeof(Register)));
barraclough@apple.come367b002008-12-04 05:43:14 +000075}
76
oliver@apple.comb1215342009-05-11 18:05:45 +000077ALWAYS_INLINE void JIT::emitGetFromCallFrameHeaderPtr(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from)
barraclough@apple.come367b002008-12-04 05:43:14 +000078{
oliver@apple.come79807b2009-05-05 11:34:23 +000079 loadPtr(Address(from, entry * sizeof(Register)), to);
oliver@apple.com0cc25c32010-10-19 23:55:08 +000080#if USE(JSVALUE64)
oliver@apple.come79807b2009-05-05 11:34:23 +000081 killLastResultRegister();
ggaren@apple.com540d71a62009-07-30 20:57:44 +000082#endif
oliver@apple.come79807b2009-05-05 11:34:23 +000083}
84
oliver@apple.com5230bd32010-05-06 19:39:54 +000085ALWAYS_INLINE void JIT::emitLoadCharacterString(RegisterID src, RegisterID dst, JumpList& failures)
86{
oliver@apple.combe4e0672011-03-28 17:14:57 +000087 failures.append(branchPtr(NotEqual, Address(src), TrustedImmPtr(m_globalData->jsStringVPtr)));
oliver@apple.combe4e0672011-03-28 17:14:57 +000088 failures.append(branch32(NotEqual, MacroAssembler::Address(src, ThunkHelpers::jsStringLengthOffset()), TrustedImm32(1)));
oliver@apple.com5230bd32010-05-06 19:39:54 +000089 loadPtr(MacroAssembler::Address(src, ThunkHelpers::jsStringValueOffset()), dst);
ggaren@apple.comffbe44d2011-10-19 19:45:35 +000090 failures.append(branchTest32(Zero, dst));
msaboff@apple.coma64c5812011-10-28 01:09:53 +000091 loadPtr(MacroAssembler::Address(dst, ThunkHelpers::stringImplFlagsOffset()), regT1);
oliver@apple.com5230bd32010-05-06 19:39:54 +000092 loadPtr(MacroAssembler::Address(dst, ThunkHelpers::stringImplDataOffset()), dst);
msaboff@apple.coma64c5812011-10-28 01:09:53 +000093
94 JumpList is16Bit;
95 JumpList cont8Bit;
96 is16Bit.append(branchTest32(Zero, regT1, TrustedImm32(ThunkHelpers::stringImpl8BitFlag())));
97 load8(MacroAssembler::Address(dst, 0), dst);
98 cont8Bit.append(jump());
99 is16Bit.link(this);
oliver@apple.com5230bd32010-05-06 19:39:54 +0000100 load16(MacroAssembler::Address(dst, 0), dst);
msaboff@apple.coma64c5812011-10-28 01:09:53 +0000101 cont8Bit.link(this);
oliver@apple.com5230bd32010-05-06 19:39:54 +0000102}
103
oliver@apple.comb1215342009-05-11 18:05:45 +0000104ALWAYS_INLINE void JIT::emitGetFromCallFrameHeader32(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from)
oliver@apple.come79807b2009-05-05 11:34:23 +0000105{
oliver@apple.comb1215342009-05-11 18:05:45 +0000106 load32(Address(from, entry * sizeof(Register)), to);
oliver@apple.com0cc25c32010-10-19 23:55:08 +0000107#if USE(JSVALUE64)
barraclough@apple.come367b002008-12-04 05:43:14 +0000108 killLastResultRegister();
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000109#endif
barraclough@apple.come367b002008-12-04 05:43:14 +0000110}
111
barraclough@apple.com97bacef2009-06-05 07:55:38 +0000112ALWAYS_INLINE JIT::Call JIT::emitNakedCall(CodePtr function)
barraclough@apple.come367b002008-12-04 05:43:14 +0000113{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000114 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
barraclough@apple.com249befb2008-12-13 03:18:10 +0000115
barraclough@apple.comd7e13382009-02-19 22:51:40 +0000116 Call nakedCall = nearCall();
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000117 m_calls.append(CallRecord(nakedCall, m_bytecodeOffset, function.executableAddress()));
barraclough@apple.com80924152008-12-05 06:58:40 +0000118 return nakedCall;
barraclough@apple.come367b002008-12-04 05:43:14 +0000119}
120
barraclough@apple.com4836c7a2011-05-01 22:20:59 +0000121ALWAYS_INLINE bool JIT::atJumpTarget()
122{
123 while (m_jumpTargetsPosition < m_codeBlock->numberOfJumpTargets() && m_codeBlock->jumpTarget(m_jumpTargetsPosition) <= m_bytecodeOffset) {
124 if (m_codeBlock->jumpTarget(m_jumpTargetsPosition) == m_bytecodeOffset)
125 return true;
126 ++m_jumpTargetsPosition;
127 }
128 return false;
129}
130
barraclough@apple.com970af2c2009-08-13 05:58:36 +0000131#if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
132
133ALWAYS_INLINE void JIT::beginUninterruptedSequence(int insnSpace, int constSpace)
134{
oliver@apple.comf7d466a2010-08-10 03:29:31 +0000135 JSInterfaceJIT::beginUninterruptedSequence();
mjs@apple.comcc668212010-01-04 11:38:56 +0000136#if CPU(ARM_TRADITIONAL)
barraclough@apple.com970af2c2009-08-13 05:58:36 +0000137#ifndef NDEBUG
138 // Ensure the label after the sequence can also fit
139 insnSpace += sizeof(ARMWord);
140 constSpace += sizeof(uint64_t);
141#endif
142
143 ensureSpace(insnSpace, constSpace);
144
commit-queue@webkit.orgd5765e42011-04-11 17:11:38 +0000145#elif CPU(SH4)
146#ifndef NDEBUG
147 insnSpace += sizeof(SH4Word);
148 constSpace += sizeof(uint64_t);
149#endif
150
151 m_assembler.ensureSpace(insnSpace + m_assembler.maxInstructionSize + 2, constSpace + 8);
barraclough@apple.com970af2c2009-08-13 05:58:36 +0000152#endif
153
154#if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
155#ifndef NDEBUG
156 m_uninterruptedInstructionSequenceBegin = label();
157 m_uninterruptedConstantSequenceBegin = sizeOfConstantPool();
158#endif
159#endif
160}
161
commit-queue@webkit.orgd5765e42011-04-11 17:11:38 +0000162ALWAYS_INLINE void JIT::endUninterruptedSequence(int insnSpace, int constSpace, int dst)
barraclough@apple.com970af2c2009-08-13 05:58:36 +0000163{
commit-queue@webkit.orgd5765e42011-04-11 17:11:38 +0000164 UNUSED_PARAM(dst);
barraclough@apple.com970af2c2009-08-13 05:58:36 +0000165#if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
loki@webkit.orgebaef242010-11-10 07:36:33 +0000166 /* There are several cases when the uninterrupted sequence is larger than
167 * maximum required offset for pathing the same sequence. Eg.: if in a
168 * uninterrupted sequence the last macroassembler's instruction is a stub
169 * call, it emits store instruction(s) which should not be included in the
170 * calculation of length of uninterrupted sequence. So, the insnSpace and
171 * constSpace should be upper limit instead of hard limit.
172 */
commit-queue@webkit.orgd5765e42011-04-11 17:11:38 +0000173#if CPU(SH4)
174 if ((dst > 15) || (dst < -16)) {
175 insnSpace += 8;
176 constSpace += 2;
177 }
178
179 if (((dst >= -16) && (dst < 0)) || ((dst > 7) && (dst <= 15)))
180 insnSpace += 8;
181#endif
loki@webkit.orgebaef242010-11-10 07:36:33 +0000182 ASSERT(differenceBetween(m_uninterruptedInstructionSequenceBegin, label()) <= insnSpace);
183 ASSERT(sizeOfConstantPool() - m_uninterruptedConstantSequenceBegin <= constSpace);
barraclough@apple.com970af2c2009-08-13 05:58:36 +0000184#endif
oliver@apple.comf7d466a2010-08-10 03:29:31 +0000185 JSInterfaceJIT::endUninterruptedSequence();
barraclough@apple.com970af2c2009-08-13 05:58:36 +0000186}
187
188#endif
189
mjs@apple.comcc668212010-01-04 11:38:56 +0000190#if CPU(ARM)
barraclough@apple.comf5455492009-06-09 06:52:25 +0000191
barraclough@apple.com0a30fcf2009-07-27 22:56:45 +0000192ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
barraclough@apple.comf5455492009-06-09 06:52:25 +0000193{
194 move(linkRegister, reg);
195}
196
197ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(RegisterID reg)
198{
199 move(reg, linkRegister);
200}
201
202ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(Address address)
203{
204 loadPtr(address, linkRegister);
205}
commit-queue@webkit.orgd5765e42011-04-11 17:11:38 +0000206#elif CPU(SH4)
207
208ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
209{
210 m_assembler.stspr(reg);
211}
212
213ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(RegisterID reg)
214{
215 m_assembler.ldspr(reg);
216}
217
218ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(Address address)
219{
220 loadPtrLinkReg(address);
221}
barraclough@apple.comf5455492009-06-09 06:52:25 +0000222
eric@webkit.org69363ec2010-03-30 02:59:20 +0000223#elif CPU(MIPS)
224
225ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
226{
227 move(returnAddressRegister, reg);
228}
229
230ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(RegisterID reg)
231{
232 move(reg, returnAddressRegister);
233}
234
235ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(Address address)
236{
237 loadPtr(address, returnAddressRegister);
238}
239
mjs@apple.comcc668212010-01-04 11:38:56 +0000240#else // CPU(X86) || CPU(X86_64)
barraclough@apple.com32aaad02009-09-18 20:18:27 +0000241
242ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
243{
244 pop(reg);
245}
246
247ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(RegisterID reg)
248{
249 push(reg);
250}
251
252ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(Address address)
253{
254 push(address);
255}
256
barraclough@apple.comf5455492009-06-09 06:52:25 +0000257#endif
258
barraclough@apple.com23f0c052008-12-16 04:48:16 +0000259ALWAYS_INLINE void JIT::restoreArgumentReference()
260{
barraclough@apple.comea6b6092009-02-13 04:22:02 +0000261 move(stackPointerRegister, firstArgumentRegister);
barraclough@apple.com2607dd02010-10-27 20:46:09 +0000262 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
barraclough@apple.com23f0c052008-12-16 04:48:16 +0000263}
barraclough@apple.com0368c332010-07-14 20:12:51 +0000264
commit-queue@webkit.org84814622011-08-25 01:25:38 +0000265ALWAYS_INLINE void JIT::updateTopCallFrame()
266{
267 storePtr(callFrameRegister, &m_globalData->topCallFrame);
268}
269
barraclough@apple.com23f0c052008-12-16 04:48:16 +0000270ALWAYS_INLINE void JIT::restoreArgumentReferenceForTrampoline()
271{
mjs@apple.comcc668212010-01-04 11:38:56 +0000272#if CPU(X86)
barraclough@apple.comf5455492009-06-09 06:52:25 +0000273 // Within a trampoline the return address will be on the stack at this point.
oliver@apple.combe4e0672011-03-28 17:14:57 +0000274 addPtr(TrustedImm32(sizeof(void*)), stackPointerRegister, firstArgumentRegister);
mjs@apple.comcc668212010-01-04 11:38:56 +0000275#elif CPU(ARM)
barraclough@apple.comea6b6092009-02-13 04:22:02 +0000276 move(stackPointerRegister, firstArgumentRegister);
commit-queue@webkit.orgd5765e42011-04-11 17:11:38 +0000277#elif CPU(SH4)
278 move(stackPointerRegister, firstArgumentRegister);
barraclough@apple.com289318a2008-12-22 01:00:07 +0000279#endif
barraclough@apple.comf5455492009-06-09 06:52:25 +0000280 // In the trampoline on x86-64, the first argument register is not overwritten.
barraclough@apple.com23f0c052008-12-16 04:48:16 +0000281}
barraclough@apple.com23f0c052008-12-16 04:48:16 +0000282
barraclough@apple.coma2a2e132008-12-13 23:58:58 +0000283ALWAYS_INLINE JIT::Jump JIT::checkStructure(RegisterID reg, Structure* structure)
barraclough@apple.come367b002008-12-04 05:43:14 +0000284{
oliver@apple.combe4e0672011-03-28 17:14:57 +0000285 return branchPtr(NotEqual, Address(reg, JSCell::structureOffset()), TrustedImmPtr(structure));
barraclough@apple.come367b002008-12-04 05:43:14 +0000286}
287
barraclough@apple.com4f46a502008-12-13 01:39:38 +0000288ALWAYS_INLINE void JIT::linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator& iter, int vReg)
barraclough@apple.come367b002008-12-04 05:43:14 +0000289{
barraclough@apple.com4f46a502008-12-13 01:39:38 +0000290 if (!m_codeBlock->isKnownNotImmediate(vReg))
291 linkSlowCase(iter);
barraclough@apple.come367b002008-12-04 05:43:14 +0000292}
293
barraclough@apple.coma2a2e132008-12-13 23:58:58 +0000294ALWAYS_INLINE void JIT::addSlowCase(Jump jump)
barraclough@apple.com249befb2008-12-13 03:18:10 +0000295{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000296 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
barraclough@apple.com249befb2008-12-13 03:18:10 +0000297
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000298 m_slowCases.append(SlowCaseEntry(jump, m_bytecodeOffset));
barraclough@apple.com249befb2008-12-13 03:18:10 +0000299}
300
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000301ALWAYS_INLINE void JIT::addSlowCase(JumpList jumpList)
302{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000303 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000304
305 const JumpList::JumpVector& jumpVector = jumpList.jumps();
306 size_t size = jumpVector.size();
307 for (size_t i = 0; i < size; ++i)
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000308 m_slowCases.append(SlowCaseEntry(jumpVector[i], m_bytecodeOffset));
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000309}
310
fpizlo@apple.com9b0b31e2011-09-19 22:27:38 +0000311ALWAYS_INLINE void JIT::addSlowCase()
312{
313 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
314
315 Jump emptyJump; // Doing it this way to make Windows happy.
316 m_slowCases.append(SlowCaseEntry(emptyJump, m_bytecodeOffset));
317}
318
barraclough@apple.coma2a2e132008-12-13 23:58:58 +0000319ALWAYS_INLINE void JIT::addJump(Jump jump, int relativeOffset)
barraclough@apple.com249befb2008-12-13 03:18:10 +0000320{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000321 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
barraclough@apple.com249befb2008-12-13 03:18:10 +0000322
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000323 m_jmpTable.append(JumpTable(jump, m_bytecodeOffset + relativeOffset));
barraclough@apple.com249befb2008-12-13 03:18:10 +0000324}
325
barraclough@apple.coma2a2e132008-12-13 23:58:58 +0000326ALWAYS_INLINE void JIT::emitJumpSlowToHot(Jump jump, int relativeOffset)
barraclough@apple.com249befb2008-12-13 03:18:10 +0000327{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000328 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
barraclough@apple.com249befb2008-12-13 03:18:10 +0000329
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000330 jump.linkTo(m_labels[m_bytecodeOffset + relativeOffset], this);
barraclough@apple.com249befb2008-12-13 03:18:10 +0000331}
332
barraclough@apple.comb8bcc942011-09-07 17:55:50 +0000333ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotObject(RegisterID structureReg)
334{
weinig@apple.com58576b22011-09-16 21:34:20 +0000335 return branch8(Below, Address(structureReg, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType));
barraclough@apple.comb8bcc942011-09-07 17:55:50 +0000336}
337
barraclough@apple.comb38285c2011-09-21 19:59:39 +0000338ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotType(RegisterID baseReg, RegisterID scratchReg, JSType type)
339{
340 loadPtr(Address(baseReg, JSCell::structureOffset()), scratchReg);
341 return branch8(NotEqual, Address(scratchReg, Structure::typeInfoTypeOffset()), TrustedImm32(type));
342}
343
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000344#if ENABLE(SAMPLING_FLAGS)
barraclough@apple.comc32f32e2009-05-13 09:10:02 +0000345ALWAYS_INLINE void JIT::setSamplingFlag(int32_t flag)
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000346{
347 ASSERT(flag >= 1);
348 ASSERT(flag <= 32);
barraclough@apple.com02dee5d2011-04-21 01:03:44 +0000349 or32(TrustedImm32(1u << (flag - 1)), AbsoluteAddress(SamplingFlags::addressOfFlags()));
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000350}
351
barraclough@apple.comc32f32e2009-05-13 09:10:02 +0000352ALWAYS_INLINE void JIT::clearSamplingFlag(int32_t flag)
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000353{
354 ASSERT(flag >= 1);
355 ASSERT(flag <= 32);
barraclough@apple.com02dee5d2011-04-21 01:03:44 +0000356 and32(TrustedImm32(~(1u << (flag - 1))), AbsoluteAddress(SamplingFlags::addressOfFlags()));
barraclough@apple.comc32f32e2009-05-13 09:10:02 +0000357}
358#endif
359
360#if ENABLE(SAMPLING_COUNTERS)
361ALWAYS_INLINE void JIT::emitCount(AbstractSamplingCounter& counter, uint32_t count)
362{
mjs@apple.comcc668212010-01-04 11:38:56 +0000363#if CPU(X86_64) // Or any other 64-bit plattform.
oliver@apple.combe4e0672011-03-28 17:14:57 +0000364 addPtr(TrustedImm32(count), AbsoluteAddress(counter.addressOfCounter()));
mjs@apple.comcc668212010-01-04 11:38:56 +0000365#elif CPU(X86) // Or any other little-endian 32-bit plattform.
barraclough@apple.com66184e22011-03-13 21:16:29 +0000366 intptr_t hiWord = reinterpret_cast<intptr_t>(counter.addressOfCounter()) + sizeof(int32_t);
oliver@apple.combe4e0672011-03-28 17:14:57 +0000367 add32(TrustedImm32(count), AbsoluteAddress(counter.addressOfCounter()));
368 addWithCarry32(TrustedImm32(0), AbsoluteAddress(reinterpret_cast<void*>(hiWord)));
barraclough@apple.comc32f32e2009-05-13 09:10:02 +0000369#else
370#error "SAMPLING_FLAGS not implemented on this platform."
371#endif
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000372}
373#endif
374
375#if ENABLE(OPCODE_SAMPLING)
mjs@apple.comcc668212010-01-04 11:38:56 +0000376#if CPU(X86_64)
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000377ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
378{
oliver@apple.combe4e0672011-03-28 17:14:57 +0000379 move(TrustedImmPtr(m_interpreter->sampler()->sampleSlot()), X86Registers::ecx);
380 storePtr(TrustedImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), X86Registers::ecx);
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000381}
382#else
383ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
384{
oliver@apple.combe4e0672011-03-28 17:14:57 +0000385 storePtr(TrustedImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), m_interpreter->sampler()->sampleSlot());
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000386}
387#endif
388#endif
389
390#if ENABLE(CODEBLOCK_SAMPLING)
mjs@apple.comcc668212010-01-04 11:38:56 +0000391#if CPU(X86_64)
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000392ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
393{
oliver@apple.combe4e0672011-03-28 17:14:57 +0000394 move(TrustedImmPtr(m_interpreter->sampler()->codeBlockSlot()), X86Registers::ecx);
395 storePtr(TrustedImmPtr(codeBlock), X86Registers::ecx);
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000396}
397#else
398ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
399{
oliver@apple.combe4e0672011-03-28 17:14:57 +0000400 storePtr(TrustedImmPtr(codeBlock), m_interpreter->sampler()->codeBlockSlot());
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000401}
402#endif
403#endif
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000404
oliver@apple.com5230bd32010-05-06 19:39:54 +0000405ALWAYS_INLINE bool JIT::isOperandConstantImmediateChar(unsigned src)
406{
407 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isString() && asString(getConstantOperand(src).asCell())->length() == 1;
408}
409
oliver@apple.come843bc02011-08-05 20:03:19 +0000410template <typename ClassType, typename StructureType> inline void JIT::emitAllocateBasicJSObject(StructureType structure, void* vtable, RegisterID result, RegisterID storagePtr)
commit-queue@webkit.orgc9b19ab2011-07-18 18:55:48 +0000411{
oliver@apple.com44d89542011-09-20 18:31:37 +0000412 MarkedSpace::SizeClass* sizeClass = &m_globalData->heap.sizeClassForObject(sizeof(ClassType));
commit-queue@webkit.orgc9b19ab2011-07-18 18:55:48 +0000413 loadPtr(&sizeClass->firstFreeCell, result);
414 addSlowCase(branchTestPtr(Zero, result));
oliver@apple.come843bc02011-08-05 20:03:19 +0000415
commit-queue@webkit.orgc9b19ab2011-07-18 18:55:48 +0000416 // remove the object from the free list
oliver@apple.come843bc02011-08-05 20:03:19 +0000417 loadPtr(Address(result), storagePtr);
418 storePtr(storagePtr, &sizeClass->firstFreeCell);
419
commit-queue@webkit.orgc9b19ab2011-07-18 18:55:48 +0000420 // initialize the object's vtable
oliver@apple.come843bc02011-08-05 20:03:19 +0000421 storePtr(TrustedImmPtr(vtable), Address(result));
422
commit-queue@webkit.orgc9b19ab2011-07-18 18:55:48 +0000423 // initialize the object's structure
424 storePtr(structure, Address(result, JSCell::structureOffset()));
oliver@apple.come843bc02011-08-05 20:03:19 +0000425
commit-queue@webkit.orgc9b19ab2011-07-18 18:55:48 +0000426 // initialize the inheritor ID
oliver@apple.come843bc02011-08-05 20:03:19 +0000427 storePtr(TrustedImmPtr(0), Address(result, JSObject::offsetOfInheritorID()));
428
commit-queue@webkit.orgc9b19ab2011-07-18 18:55:48 +0000429 // initialize the object's property storage pointer
oliver@apple.come843bc02011-08-05 20:03:19 +0000430 addPtr(TrustedImm32(sizeof(JSObject)), result, storagePtr);
431 storePtr(storagePtr, Address(result, ClassType::offsetOfPropertyStorage()));
432}
433
434template <typename T> inline void JIT::emitAllocateJSFinalObject(T structure, RegisterID result, RegisterID scratch)
435{
436 emitAllocateBasicJSObject<JSFinalObject>(structure, m_globalData->jsFinalObjectVPtr, result, scratch);
437}
438
439inline void JIT::emitAllocateJSFunction(FunctionExecutable* executable, RegisterID scopeChain, RegisterID result, RegisterID storagePtr)
440{
441 emitAllocateBasicJSObject<JSFunction>(TrustedImmPtr(m_codeBlock->globalObject()->namedFunctionStructure()), m_globalData->jsFunctionVPtr, result, storagePtr);
442
443 // store the function's scope chain
444 storePtr(scopeChain, Address(result, JSFunction::offsetOfScopeChain()));
445
446 // store the function's executable member
447 storePtr(TrustedImmPtr(executable), Address(result, JSFunction::offsetOfExecutable()));
448
oliver@apple.come843bc02011-08-05 20:03:19 +0000449 // store the function's name
450 ASSERT(executable->nameValue());
451 int functionNameOffset = sizeof(JSValue) * m_codeBlock->globalObject()->functionNameOffset();
452 storePtr(TrustedImmPtr(executable->nameValue()), Address(regT1, functionNameOffset + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
453#if USE(JSVALUE32_64)
454 store32(TrustedImm32(JSValue::CellTag), Address(regT1, functionNameOffset + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
455#endif
commit-queue@webkit.orgc9b19ab2011-07-18 18:55:48 +0000456}
457
fpizlo@apple.com95a9f0d2011-08-20 02:17:49 +0000458#if ENABLE(VALUE_PROFILER)
459inline void JIT::emitValueProfilingSite(ValueProfilingSiteKind siteKind)
460{
fpizlo@apple.com452cb412011-09-08 21:38:04 +0000461 if (!shouldEmitProfiling())
462 return;
463
fpizlo@apple.com95a9f0d2011-08-20 02:17:49 +0000464 const RegisterID value = regT0;
fpizlo@apple.com4043e812011-11-06 11:54:59 +0000465#if USE(JSVALUE32_64)
466 const RegisterID valueTag = regT1;
467#endif
fpizlo@apple.com95a9f0d2011-08-20 02:17:49 +0000468 const RegisterID scratch = regT3;
469
470 ValueProfile* valueProfile;
471 if (siteKind == FirstProfilingSite)
472 valueProfile = m_codeBlock->addValueProfile(m_bytecodeOffset);
473 else {
474 ASSERT(siteKind == SubsequentProfilingSite);
475 valueProfile = m_codeBlock->valueProfileForBytecodeOffset(m_bytecodeOffset);
476 }
477
478 ASSERT(valueProfile);
479
fpizlo@apple.com4043e812011-11-06 11:54:59 +0000480 if (ValueProfile::numberOfBuckets == 1) {
481 // We're in a simple configuration: only one bucket, so we can just do a direct
482 // store.
483#if USE(JSVALUE64)
484 storePtr(value, valueProfile->m_buckets);
485#else
486 EncodedValueDescriptor* descriptor = bitwise_cast<EncodedValueDescriptor*>(valueProfile->m_buckets);
487 store32(value, &descriptor->asBits.payload);
488 store32(valueTag, &descriptor->asBits.tag);
489#endif
490 return;
491 }
492
fpizlo@apple.com95a9f0d2011-08-20 02:17:49 +0000493 if (m_randomGenerator.getUint32() & 1)
494 add32(Imm32(1), bucketCounterRegister);
495 else
496 add32(Imm32(3), bucketCounterRegister);
497 and32(Imm32(ValueProfile::bucketIndexMask), bucketCounterRegister);
fpizlo@apple.com7f2d2342011-09-14 23:00:26 +0000498 move(ImmPtr(valueProfile->m_buckets), scratch);
commit-queue@webkit.org387d2ec2011-10-09 10:19:23 +0000499#if USE(JSVALUE64)
fpizlo@apple.com95a9f0d2011-08-20 02:17:49 +0000500 storePtr(value, BaseIndex(scratch, bucketCounterRegister, TimesEight));
commit-queue@webkit.org387d2ec2011-10-09 10:19:23 +0000501#elif USE(JSVALUE32_64)
commit-queue@webkit.org387d2ec2011-10-09 10:19:23 +0000502 store32(value, BaseIndex(scratch, bucketCounterRegister, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
503 store32(valueTag, BaseIndex(scratch, bucketCounterRegister, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
504#endif
fpizlo@apple.com95a9f0d2011-08-20 02:17:49 +0000505}
506#endif
507
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000508#if USE(JSVALUE32_64)
509
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000510inline void JIT::emitLoadTag(unsigned index, RegisterID tag)
511{
512 RegisterID mappedTag;
513 if (getMappedTag(index, mappedTag)) {
514 move(mappedTag, tag);
515 unmap(tag);
516 return;
517 }
518
519 if (m_codeBlock->isConstantRegisterIndex(index)) {
520 move(Imm32(getConstantOperand(index).tag()), tag);
521 unmap(tag);
522 return;
523 }
524
525 load32(tagFor(index), tag);
526 unmap(tag);
527}
528
529inline void JIT::emitLoadPayload(unsigned index, RegisterID payload)
530{
531 RegisterID mappedPayload;
532 if (getMappedPayload(index, mappedPayload)) {
533 move(mappedPayload, payload);
534 unmap(payload);
535 return;
536 }
537
538 if (m_codeBlock->isConstantRegisterIndex(index)) {
539 move(Imm32(getConstantOperand(index).payload()), payload);
540 unmap(payload);
541 return;
542 }
543
544 load32(payloadFor(index), payload);
545 unmap(payload);
546}
547
548inline void JIT::emitLoad(const JSValue& v, RegisterID tag, RegisterID payload)
549{
550 move(Imm32(v.payload()), payload);
551 move(Imm32(v.tag()), tag);
552}
553
554inline void JIT::emitLoad(unsigned index, RegisterID tag, RegisterID payload, RegisterID base)
555{
556 ASSERT(tag != payload);
557
558 if (base == callFrameRegister) {
559 ASSERT(payload != base);
560 emitLoadPayload(index, payload);
561 emitLoadTag(index, tag);
562 return;
563 }
564
565 if (payload == base) { // avoid stomping base
566 load32(tagFor(index, base), tag);
567 load32(payloadFor(index, base), payload);
568 return;
569 }
570
571 load32(payloadFor(index, base), payload);
572 load32(tagFor(index, base), tag);
573}
574
575inline void JIT::emitLoad2(unsigned index1, RegisterID tag1, RegisterID payload1, unsigned index2, RegisterID tag2, RegisterID payload2)
576{
577 if (isMapped(index1)) {
578 emitLoad(index1, tag1, payload1);
579 emitLoad(index2, tag2, payload2);
580 return;
581 }
582 emitLoad(index2, tag2, payload2);
583 emitLoad(index1, tag1, payload1);
584}
585
586inline void JIT::emitLoadDouble(unsigned index, FPRegisterID value)
587{
588 if (m_codeBlock->isConstantRegisterIndex(index)) {
oliver@apple.comba10bec2011-03-08 23:17:32 +0000589 WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(index);
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000590 loadDouble(&inConstantPool, value);
591 } else
592 loadDouble(addressFor(index), value);
593}
594
595inline void JIT::emitLoadInt32ToDouble(unsigned index, FPRegisterID value)
596{
597 if (m_codeBlock->isConstantRegisterIndex(index)) {
oliver@apple.comba10bec2011-03-08 23:17:32 +0000598 WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(index);
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000599 char* bytePointer = reinterpret_cast<char*>(&inConstantPool);
600 convertInt32ToDouble(AbsoluteAddress(bytePointer + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), value);
601 } else
602 convertInt32ToDouble(payloadFor(index), value);
603}
604
605inline void JIT::emitStore(unsigned index, RegisterID tag, RegisterID payload, RegisterID base)
606{
607 store32(payload, payloadFor(index, base));
608 store32(tag, tagFor(index, base));
609}
610
611inline void JIT::emitStoreInt32(unsigned index, RegisterID payload, bool indexIsInt32)
612{
613 store32(payload, payloadFor(index, callFrameRegister));
614 if (!indexIsInt32)
oliver@apple.combe4e0672011-03-28 17:14:57 +0000615 store32(TrustedImm32(JSValue::Int32Tag), tagFor(index, callFrameRegister));
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000616}
617
ggaren@apple.com25853082011-09-16 21:05:09 +0000618inline void JIT::emitStoreAndMapInt32(unsigned index, RegisterID tag, RegisterID payload, bool indexIsInt32, size_t opcodeLength)
619{
620 emitStoreInt32(index, payload, indexIsInt32);
621 map(m_bytecodeOffset + opcodeLength, index, tag, payload);
622}
623
oliver@apple.combe4e0672011-03-28 17:14:57 +0000624inline void JIT::emitStoreInt32(unsigned index, TrustedImm32 payload, bool indexIsInt32)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000625{
626 store32(payload, payloadFor(index, callFrameRegister));
627 if (!indexIsInt32)
oliver@apple.combe4e0672011-03-28 17:14:57 +0000628 store32(TrustedImm32(JSValue::Int32Tag), tagFor(index, callFrameRegister));
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000629}
630
631inline void JIT::emitStoreCell(unsigned index, RegisterID payload, bool indexIsCell)
632{
633 store32(payload, payloadFor(index, callFrameRegister));
634 if (!indexIsCell)
oliver@apple.combe4e0672011-03-28 17:14:57 +0000635 store32(TrustedImm32(JSValue::CellTag), tagFor(index, callFrameRegister));
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000636}
637
barraclough@apple.com5139edc2011-04-07 23:47:06 +0000638inline void JIT::emitStoreBool(unsigned index, RegisterID payload, bool indexIsBool)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000639{
barraclough@apple.com5139edc2011-04-07 23:47:06 +0000640 store32(payload, payloadFor(index, callFrameRegister));
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000641 if (!indexIsBool)
barraclough@apple.com5139edc2011-04-07 23:47:06 +0000642 store32(TrustedImm32(JSValue::BooleanTag), tagFor(index, callFrameRegister));
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000643}
644
645inline void JIT::emitStoreDouble(unsigned index, FPRegisterID value)
646{
647 storeDouble(value, addressFor(index));
648}
649
650inline void JIT::emitStore(unsigned index, const JSValue constant, RegisterID base)
651{
652 store32(Imm32(constant.payload()), payloadFor(index, base));
653 store32(Imm32(constant.tag()), tagFor(index, base));
654}
655
656ALWAYS_INLINE void JIT::emitInitRegister(unsigned dst)
657{
658 emitStore(dst, jsUndefined());
659}
660
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000661inline bool JIT::isLabeled(unsigned bytecodeOffset)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000662{
663 for (size_t numberOfJumpTargets = m_codeBlock->numberOfJumpTargets(); m_jumpTargetIndex != numberOfJumpTargets; ++m_jumpTargetIndex) {
664 unsigned jumpTarget = m_codeBlock->jumpTarget(m_jumpTargetIndex);
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000665 if (jumpTarget == bytecodeOffset)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000666 return true;
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000667 if (jumpTarget > bytecodeOffset)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000668 return false;
669 }
670 return false;
671}
672
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000673inline void JIT::map(unsigned bytecodeOffset, unsigned virtualRegisterIndex, RegisterID tag, RegisterID payload)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000674{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000675 if (isLabeled(bytecodeOffset))
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000676 return;
677
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000678 m_mappedBytecodeOffset = bytecodeOffset;
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000679 m_mappedVirtualRegisterIndex = virtualRegisterIndex;
680 m_mappedTag = tag;
681 m_mappedPayload = payload;
682}
683
684inline void JIT::unmap(RegisterID registerID)
685{
686 if (m_mappedTag == registerID)
687 m_mappedTag = (RegisterID)-1;
688 else if (m_mappedPayload == registerID)
689 m_mappedPayload = (RegisterID)-1;
690}
691
692inline void JIT::unmap()
693{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000694 m_mappedBytecodeOffset = (unsigned)-1;
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000695 m_mappedVirtualRegisterIndex = (unsigned)-1;
696 m_mappedTag = (RegisterID)-1;
697 m_mappedPayload = (RegisterID)-1;
698}
699
700inline bool JIT::isMapped(unsigned virtualRegisterIndex)
701{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000702 if (m_mappedBytecodeOffset != m_bytecodeOffset)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000703 return false;
704 if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
705 return false;
706 return true;
707}
708
709inline bool JIT::getMappedPayload(unsigned virtualRegisterIndex, RegisterID& payload)
710{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000711 if (m_mappedBytecodeOffset != m_bytecodeOffset)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000712 return false;
713 if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
714 return false;
715 if (m_mappedPayload == (RegisterID)-1)
716 return false;
717 payload = m_mappedPayload;
718 return true;
719}
720
721inline bool JIT::getMappedTag(unsigned virtualRegisterIndex, RegisterID& tag)
722{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000723 if (m_mappedBytecodeOffset != m_bytecodeOffset)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000724 return false;
725 if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
726 return false;
727 if (m_mappedTag == (RegisterID)-1)
728 return false;
729 tag = m_mappedTag;
730 return true;
731}
732
733inline void JIT::emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex)
734{
oliver@apple.comfc502ee2010-05-20 00:30:35 +0000735 if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex)) {
736 if (m_codeBlock->isConstantRegisterIndex(virtualRegisterIndex))
737 addSlowCase(jump());
738 else
739 addSlowCase(emitJumpIfNotJSCell(virtualRegisterIndex));
740 }
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000741}
742
743inline void JIT::emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex, RegisterID tag)
744{
oliver@apple.comfc502ee2010-05-20 00:30:35 +0000745 if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex)) {
746 if (m_codeBlock->isConstantRegisterIndex(virtualRegisterIndex))
747 addSlowCase(jump());
748 else
oliver@apple.combe4e0672011-03-28 17:14:57 +0000749 addSlowCase(branch32(NotEqual, tag, TrustedImm32(JSValue::CellTag)));
oliver@apple.comfc502ee2010-05-20 00:30:35 +0000750 }
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000751}
752
753inline void JIT::linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator& iter, unsigned virtualRegisterIndex)
754{
755 if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex))
756 linkSlowCase(iter);
757}
758
759ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(unsigned src)
760{
761 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32();
762}
763
764ALWAYS_INLINE bool JIT::getOperandConstantImmediateInt(unsigned op1, unsigned op2, unsigned& op, int32_t& constant)
765{
766 if (isOperandConstantImmediateInt(op1)) {
767 constant = getConstantOperand(op1).asInt32();
768 op = op2;
769 return true;
770 }
771
772 if (isOperandConstantImmediateInt(op2)) {
773 constant = getConstantOperand(op2).asInt32();
774 op = op1;
775 return true;
776 }
777
778 return false;
779}
780
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000781#else // USE(JSVALUE32_64)
782
783ALWAYS_INLINE void JIT::killLastResultRegister()
784{
785 m_lastResultBytecodeRegister = std::numeric_limits<int>::max();
786}
787
788// get arg puts an arg from the SF register array into a h/w register
789ALWAYS_INLINE void JIT::emitGetVirtualRegister(int src, RegisterID dst)
790{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000791 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000792
793 // TODO: we want to reuse values that are already in registers if we can - add a register allocator!
794 if (m_codeBlock->isConstantRegisterIndex(src)) {
795 JSValue value = m_codeBlock->getConstant(src);
796 move(ImmPtr(JSValue::encode(value)), dst);
797 killLastResultRegister();
798 return;
799 }
800
barraclough@apple.com4836c7a2011-05-01 22:20:59 +0000801 if (src == m_lastResultBytecodeRegister && m_codeBlock->isTemporaryRegisterIndex(src) && !atJumpTarget()) {
802 // The argument we want is already stored in eax
803 if (dst != cachedResultRegister)
804 move(cachedResultRegister, dst);
805 killLastResultRegister();
806 return;
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000807 }
808
809 loadPtr(Address(callFrameRegister, src * sizeof(Register)), dst);
810 killLastResultRegister();
811}
812
813ALWAYS_INLINE void JIT::emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2)
814{
815 if (src2 == m_lastResultBytecodeRegister) {
816 emitGetVirtualRegister(src2, dst2);
817 emitGetVirtualRegister(src1, dst1);
818 } else {
819 emitGetVirtualRegister(src1, dst1);
820 emitGetVirtualRegister(src2, dst2);
821 }
822}
823
824ALWAYS_INLINE int32_t JIT::getConstantOperandImmediateInt(unsigned src)
825{
826 return getConstantOperand(src).asInt32();
827}
828
829ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(unsigned src)
830{
831 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32();
832}
833
834ALWAYS_INLINE void JIT::emitPutVirtualRegister(unsigned dst, RegisterID from)
835{
836 storePtr(from, Address(callFrameRegister, dst * sizeof(Register)));
andersca@apple.comd11d25d2010-06-15 18:52:31 +0000837 m_lastResultBytecodeRegister = (from == cachedResultRegister) ? static_cast<int>(dst) : std::numeric_limits<int>::max();
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000838}
839
840ALWAYS_INLINE void JIT::emitInitRegister(unsigned dst)
841{
oliver@apple.combe4e0672011-03-28 17:14:57 +0000842 storePtr(TrustedImmPtr(JSValue::encode(jsUndefined())), Address(callFrameRegister, dst * sizeof(Register)));
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000843}
844
845ALWAYS_INLINE JIT::Jump JIT::emitJumpIfJSCell(RegisterID reg)
846{
847#if USE(JSVALUE64)
848 return branchTestPtr(Zero, reg, tagMaskRegister);
849#else
barraclough@apple.comc6441e682011-04-11 18:29:14 +0000850 return branchTest32(Zero, reg, TrustedImm32(TagMask));
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000851#endif
852}
853
854ALWAYS_INLINE JIT::Jump JIT::emitJumpIfBothJSCells(RegisterID reg1, RegisterID reg2, RegisterID scratch)
855{
856 move(reg1, scratch);
857 orPtr(reg2, scratch);
858 return emitJumpIfJSCell(scratch);
859}
860
861ALWAYS_INLINE void JIT::emitJumpSlowCaseIfJSCell(RegisterID reg)
862{
863 addSlowCase(emitJumpIfJSCell(reg));
864}
865
866ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotJSCell(RegisterID reg)
867{
868#if USE(JSVALUE64)
869 return branchTestPtr(NonZero, reg, tagMaskRegister);
870#else
barraclough@apple.comc6441e682011-04-11 18:29:14 +0000871 return branchTest32(NonZero, reg, TrustedImm32(TagMask));
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000872#endif
873}
874
875ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg)
876{
877 addSlowCase(emitJumpIfNotJSCell(reg));
878}
879
880ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg, int vReg)
881{
882 if (!m_codeBlock->isKnownNotImmediate(vReg))
883 emitJumpSlowCaseIfNotJSCell(reg);
884}
885
886#if USE(JSVALUE64)
oliver@apple.com8d181632009-09-25 02:40:59 +0000887
888inline void JIT::emitLoadDouble(unsigned index, FPRegisterID value)
889{
890 if (m_codeBlock->isConstantRegisterIndex(index)) {
oliver@apple.comba10bec2011-03-08 23:17:32 +0000891 WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(index);
oliver@apple.com8d181632009-09-25 02:40:59 +0000892 loadDouble(&inConstantPool, value);
893 } else
894 loadDouble(addressFor(index), value);
895}
896
897inline void JIT::emitLoadInt32ToDouble(unsigned index, FPRegisterID value)
898{
899 if (m_codeBlock->isConstantRegisterIndex(index)) {
barraclough@apple.com8af7a532011-03-13 22:11:13 +0000900 ASSERT(isOperandConstantImmediateInt(index));
901 convertInt32ToDouble(Imm32(getConstantOperand(index).asInt32()), value);
oliver@apple.com8d181632009-09-25 02:40:59 +0000902 } else
903 convertInt32ToDouble(addressFor(index), value);
904}
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000905#endif
906
907ALWAYS_INLINE JIT::Jump JIT::emitJumpIfImmediateInteger(RegisterID reg)
908{
909#if USE(JSVALUE64)
910 return branchPtr(AboveOrEqual, reg, tagTypeNumberRegister);
911#else
barraclough@apple.comc6441e682011-04-11 18:29:14 +0000912 return branchTest32(NonZero, reg, TrustedImm32(TagTypeNumber));
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000913#endif
914}
915
916ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateInteger(RegisterID reg)
917{
918#if USE(JSVALUE64)
919 return branchPtr(Below, reg, tagTypeNumberRegister);
920#else
barraclough@apple.comc6441e682011-04-11 18:29:14 +0000921 return branchTest32(Zero, reg, TrustedImm32(TagTypeNumber));
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000922#endif
923}
924
925ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch)
926{
927 move(reg1, scratch);
928 andPtr(reg2, scratch);
929 return emitJumpIfNotImmediateInteger(scratch);
930}
931
932ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateInteger(RegisterID reg)
933{
934 addSlowCase(emitJumpIfNotImmediateInteger(reg));
935}
936
937ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch)
938{
939 addSlowCase(emitJumpIfNotImmediateIntegers(reg1, reg2, scratch));
940}
941
oliver@apple.com8d181632009-09-25 02:40:59 +0000942ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateNumber(RegisterID reg)
943{
944 addSlowCase(emitJumpIfNotImmediateNumber(reg));
945}
946
oliver@apple.com0cc25c32010-10-19 23:55:08 +0000947#if USE(JSVALUE32_64)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000948ALWAYS_INLINE void JIT::emitFastArithDeTagImmediate(RegisterID reg)
949{
barraclough@apple.comc6441e682011-04-11 18:29:14 +0000950 subPtr(TrustedImm32(TagTypeNumber), reg);
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000951}
952
953ALWAYS_INLINE JIT::Jump JIT::emitFastArithDeTagImmediateJumpIfZero(RegisterID reg)
954{
barraclough@apple.comc6441e682011-04-11 18:29:14 +0000955 return branchSubPtr(Zero, TrustedImm32(TagTypeNumber), reg);
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000956}
957#endif
958
959ALWAYS_INLINE void JIT::emitFastArithReTagImmediate(RegisterID src, RegisterID dest)
960{
961#if USE(JSVALUE64)
962 emitFastArithIntToImmNoCheck(src, dest);
963#else
964 if (src != dest)
965 move(src, dest);
barraclough@apple.comc6441e682011-04-11 18:29:14 +0000966 addPtr(TrustedImm32(TagTypeNumber), dest);
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000967#endif
968}
969
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000970// operand is int32_t, must have been zero-extended if register is 64-bit.
971ALWAYS_INLINE void JIT::emitFastArithIntToImmNoCheck(RegisterID src, RegisterID dest)
972{
973#if USE(JSVALUE64)
974 if (src != dest)
975 move(src, dest);
976 orPtr(tagTypeNumberRegister, dest);
977#else
978 signExtend32ToPtr(src, dest);
979 addPtr(dest, dest);
980 emitFastArithReTagImmediate(dest, dest);
981#endif
982}
983
984ALWAYS_INLINE void JIT::emitTagAsBoolImmediate(RegisterID reg)
985{
barraclough@apple.com560dde72011-04-11 22:18:30 +0000986 or32(TrustedImm32(static_cast<int32_t>(ValueFalse)), reg);
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000987}
988
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000989#endif // USE(JSVALUE32_64)
990
991} // namespace JSC
992
barraclough@apple.come367b002008-12-04 05:43:14 +0000993#endif // ENABLE(JIT)
994
995#endif