blob: 8fe1c7a7f7bd6cad19d6b390f106500c380903f0 [file] [log] [blame]
barraclough@apple.come367b002008-12-04 05:43:14 +00001/*
2 * Copyright (C) 2008 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#ifndef JITInlineMethods_h
27#define JITInlineMethods_h
28
barraclough@apple.come367b002008-12-04 05:43:14 +000029
30#if ENABLE(JIT)
31
barraclough@apple.come367b002008-12-04 05:43:14 +000032namespace JSC {
33
ggaren@apple.com540d71a62009-07-30 20:57:44 +000034/* Deprecated: Please use JITStubCall instead. */
barraclough@apple.come367b002008-12-04 05:43:14 +000035
barraclough@apple.com732c7bf2008-12-13 05:25:22 +000036ALWAYS_INLINE void JIT::emitGetJITStubArg(unsigned argumentNumber, RegisterID dst)
barraclough@apple.come367b002008-12-04 05:43:14 +000037{
barraclough@apple.comcaf8b932009-11-06 08:47:47 +000038 unsigned argumentStackOffset = (argumentNumber * (sizeof(JSValue) / sizeof(void*))) + JITSTACKFRAME_ARGS_INDEX;
barraclough@apple.comde296272009-08-21 02:57:10 +000039 peek(dst, argumentStackOffset);
barraclough@apple.come367b002008-12-04 05:43:14 +000040}
41
oliver@apple.com8d181632009-09-25 02:40:59 +000042ALWAYS_INLINE bool JIT::isOperandConstantImmediateDouble(unsigned src)
43{
44 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isDouble();
45}
46
ggaren@apple.comdc067b62009-05-01 22:43:39 +000047ALWAYS_INLINE JSValue JIT::getConstantOperand(unsigned src)
barraclough@apple.combe1ce032009-01-02 03:06:10 +000048{
49 ASSERT(m_codeBlock->isConstantRegisterIndex(src));
50 return m_codeBlock->getConstant(src);
51}
52
barraclough@apple.come367b002008-12-04 05:43:14 +000053ALWAYS_INLINE void JIT::emitPutToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry entry)
54{
oliver@apple.com9d4f0ec2011-03-14 18:16:36 +000055 storePtr(from, payloadFor(entry, callFrameRegister));
56}
57
58ALWAYS_INLINE void JIT::emitPutCellToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry entry)
59{
60#if USE(JSVALUE32_64)
oliver@apple.combe4e0672011-03-28 17:14:57 +000061 store32(TrustedImm32(JSValue::CellTag), tagFor(entry, callFrameRegister));
oliver@apple.com9d4f0ec2011-03-14 18:16:36 +000062#endif
63 storePtr(from, payloadFor(entry, callFrameRegister));
64}
65
66ALWAYS_INLINE void JIT::emitPutIntToCallFrameHeader(RegisterID from, RegisterFile::CallFrameHeaderEntry entry)
67{
oliver@apple.combe4e0672011-03-28 17:14:57 +000068 store32(TrustedImm32(Int32Tag), intTagFor(entry, callFrameRegister));
oliver@apple.com9d4f0ec2011-03-14 18:16:36 +000069 store32(from, intPayloadFor(entry, callFrameRegister));
barraclough@apple.com80924152008-12-05 06:58:40 +000070}
71
72ALWAYS_INLINE void JIT::emitPutImmediateToCallFrameHeader(void* value, RegisterFile::CallFrameHeaderEntry entry)
73{
oliver@apple.combe4e0672011-03-28 17:14:57 +000074 storePtr(TrustedImmPtr(value), Address(callFrameRegister, entry * sizeof(Register)));
barraclough@apple.come367b002008-12-04 05:43:14 +000075}
76
oliver@apple.comb1215342009-05-11 18:05:45 +000077ALWAYS_INLINE void JIT::emitGetFromCallFrameHeaderPtr(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from)
barraclough@apple.come367b002008-12-04 05:43:14 +000078{
oliver@apple.come79807b2009-05-05 11:34:23 +000079 loadPtr(Address(from, entry * sizeof(Register)), to);
oliver@apple.com0cc25c32010-10-19 23:55:08 +000080#if USE(JSVALUE64)
oliver@apple.come79807b2009-05-05 11:34:23 +000081 killLastResultRegister();
ggaren@apple.com540d71a62009-07-30 20:57:44 +000082#endif
oliver@apple.come79807b2009-05-05 11:34:23 +000083}
84
oliver@apple.com5230bd32010-05-06 19:39:54 +000085ALWAYS_INLINE void JIT::emitLoadCharacterString(RegisterID src, RegisterID dst, JumpList& failures)
86{
oliver@apple.combe4e0672011-03-28 17:14:57 +000087 failures.append(branchPtr(NotEqual, Address(src), TrustedImmPtr(m_globalData->jsStringVPtr)));
oliver@apple.com5230bd32010-05-06 19:39:54 +000088 failures.append(branchTest32(NonZero, Address(src, OBJECT_OFFSETOF(JSString, m_fiberCount))));
oliver@apple.combe4e0672011-03-28 17:14:57 +000089 failures.append(branch32(NotEqual, MacroAssembler::Address(src, ThunkHelpers::jsStringLengthOffset()), TrustedImm32(1)));
oliver@apple.com5230bd32010-05-06 19:39:54 +000090 loadPtr(MacroAssembler::Address(src, ThunkHelpers::jsStringValueOffset()), dst);
91 loadPtr(MacroAssembler::Address(dst, ThunkHelpers::stringImplDataOffset()), dst);
92 load16(MacroAssembler::Address(dst, 0), dst);
93}
94
oliver@apple.comb1215342009-05-11 18:05:45 +000095ALWAYS_INLINE void JIT::emitGetFromCallFrameHeader32(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from)
oliver@apple.come79807b2009-05-05 11:34:23 +000096{
oliver@apple.comb1215342009-05-11 18:05:45 +000097 load32(Address(from, entry * sizeof(Register)), to);
oliver@apple.com0cc25c32010-10-19 23:55:08 +000098#if USE(JSVALUE64)
barraclough@apple.come367b002008-12-04 05:43:14 +000099 killLastResultRegister();
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000100#endif
barraclough@apple.come367b002008-12-04 05:43:14 +0000101}
102
barraclough@apple.com97bacef2009-06-05 07:55:38 +0000103ALWAYS_INLINE JIT::Call JIT::emitNakedCall(CodePtr function)
barraclough@apple.come367b002008-12-04 05:43:14 +0000104{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000105 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
barraclough@apple.com249befb2008-12-13 03:18:10 +0000106
barraclough@apple.comd7e13382009-02-19 22:51:40 +0000107 Call nakedCall = nearCall();
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000108 m_calls.append(CallRecord(nakedCall, m_bytecodeOffset, function.executableAddress()));
barraclough@apple.com80924152008-12-05 06:58:40 +0000109 return nakedCall;
barraclough@apple.come367b002008-12-04 05:43:14 +0000110}
111
barraclough@apple.com4836c7a2011-05-01 22:20:59 +0000112ALWAYS_INLINE bool JIT::atJumpTarget()
113{
114 while (m_jumpTargetsPosition < m_codeBlock->numberOfJumpTargets() && m_codeBlock->jumpTarget(m_jumpTargetsPosition) <= m_bytecodeOffset) {
115 if (m_codeBlock->jumpTarget(m_jumpTargetsPosition) == m_bytecodeOffset)
116 return true;
117 ++m_jumpTargetsPosition;
118 }
119 return false;
120}
121
barraclough@apple.com970af2c2009-08-13 05:58:36 +0000122#if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
123
124ALWAYS_INLINE void JIT::beginUninterruptedSequence(int insnSpace, int constSpace)
125{
oliver@apple.comf7d466a2010-08-10 03:29:31 +0000126 JSInterfaceJIT::beginUninterruptedSequence();
mjs@apple.comcc668212010-01-04 11:38:56 +0000127#if CPU(ARM_TRADITIONAL)
barraclough@apple.com970af2c2009-08-13 05:58:36 +0000128#ifndef NDEBUG
129 // Ensure the label after the sequence can also fit
130 insnSpace += sizeof(ARMWord);
131 constSpace += sizeof(uint64_t);
132#endif
133
134 ensureSpace(insnSpace, constSpace);
135
commit-queue@webkit.orgd5765e42011-04-11 17:11:38 +0000136#elif CPU(SH4)
137#ifndef NDEBUG
138 insnSpace += sizeof(SH4Word);
139 constSpace += sizeof(uint64_t);
140#endif
141
142 m_assembler.ensureSpace(insnSpace + m_assembler.maxInstructionSize + 2, constSpace + 8);
barraclough@apple.com970af2c2009-08-13 05:58:36 +0000143#endif
144
145#if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
146#ifndef NDEBUG
147 m_uninterruptedInstructionSequenceBegin = label();
148 m_uninterruptedConstantSequenceBegin = sizeOfConstantPool();
149#endif
150#endif
151}
152
commit-queue@webkit.orgd5765e42011-04-11 17:11:38 +0000153ALWAYS_INLINE void JIT::endUninterruptedSequence(int insnSpace, int constSpace, int dst)
barraclough@apple.com970af2c2009-08-13 05:58:36 +0000154{
commit-queue@webkit.orgd5765e42011-04-11 17:11:38 +0000155 UNUSED_PARAM(dst);
barraclough@apple.com970af2c2009-08-13 05:58:36 +0000156#if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
loki@webkit.orgebaef242010-11-10 07:36:33 +0000157 /* There are several cases when the uninterrupted sequence is larger than
158 * maximum required offset for pathing the same sequence. Eg.: if in a
159 * uninterrupted sequence the last macroassembler's instruction is a stub
160 * call, it emits store instruction(s) which should not be included in the
161 * calculation of length of uninterrupted sequence. So, the insnSpace and
162 * constSpace should be upper limit instead of hard limit.
163 */
commit-queue@webkit.orgd5765e42011-04-11 17:11:38 +0000164#if CPU(SH4)
165 if ((dst > 15) || (dst < -16)) {
166 insnSpace += 8;
167 constSpace += 2;
168 }
169
170 if (((dst >= -16) && (dst < 0)) || ((dst > 7) && (dst <= 15)))
171 insnSpace += 8;
172#endif
loki@webkit.orgebaef242010-11-10 07:36:33 +0000173 ASSERT(differenceBetween(m_uninterruptedInstructionSequenceBegin, label()) <= insnSpace);
174 ASSERT(sizeOfConstantPool() - m_uninterruptedConstantSequenceBegin <= constSpace);
barraclough@apple.com970af2c2009-08-13 05:58:36 +0000175#endif
oliver@apple.comf7d466a2010-08-10 03:29:31 +0000176 JSInterfaceJIT::endUninterruptedSequence();
barraclough@apple.com970af2c2009-08-13 05:58:36 +0000177}
178
179#endif
180
mjs@apple.comcc668212010-01-04 11:38:56 +0000181#if CPU(ARM)
barraclough@apple.comf5455492009-06-09 06:52:25 +0000182
barraclough@apple.com0a30fcf2009-07-27 22:56:45 +0000183ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
barraclough@apple.comf5455492009-06-09 06:52:25 +0000184{
185 move(linkRegister, reg);
186}
187
188ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(RegisterID reg)
189{
190 move(reg, linkRegister);
191}
192
193ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(Address address)
194{
195 loadPtr(address, linkRegister);
196}
commit-queue@webkit.orgd5765e42011-04-11 17:11:38 +0000197#elif CPU(SH4)
198
199ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
200{
201 m_assembler.stspr(reg);
202}
203
204ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(RegisterID reg)
205{
206 m_assembler.ldspr(reg);
207}
208
209ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(Address address)
210{
211 loadPtrLinkReg(address);
212}
barraclough@apple.comf5455492009-06-09 06:52:25 +0000213
eric@webkit.org69363ec2010-03-30 02:59:20 +0000214#elif CPU(MIPS)
215
216ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
217{
218 move(returnAddressRegister, reg);
219}
220
221ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(RegisterID reg)
222{
223 move(reg, returnAddressRegister);
224}
225
226ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(Address address)
227{
228 loadPtr(address, returnAddressRegister);
229}
230
mjs@apple.comcc668212010-01-04 11:38:56 +0000231#else // CPU(X86) || CPU(X86_64)
barraclough@apple.com32aaad02009-09-18 20:18:27 +0000232
233ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
234{
235 pop(reg);
236}
237
238ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(RegisterID reg)
239{
240 push(reg);
241}
242
243ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(Address address)
244{
245 push(address);
246}
247
barraclough@apple.comf5455492009-06-09 06:52:25 +0000248#endif
249
barraclough@apple.com23f0c052008-12-16 04:48:16 +0000250ALWAYS_INLINE void JIT::restoreArgumentReference()
251{
barraclough@apple.comea6b6092009-02-13 04:22:02 +0000252 move(stackPointerRegister, firstArgumentRegister);
barraclough@apple.com2607dd02010-10-27 20:46:09 +0000253 poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
barraclough@apple.com23f0c052008-12-16 04:48:16 +0000254}
barraclough@apple.com0368c332010-07-14 20:12:51 +0000255
commit-queue@webkit.org84814622011-08-25 01:25:38 +0000256ALWAYS_INLINE void JIT::updateTopCallFrame()
257{
258 storePtr(callFrameRegister, &m_globalData->topCallFrame);
259}
260
barraclough@apple.com23f0c052008-12-16 04:48:16 +0000261ALWAYS_INLINE void JIT::restoreArgumentReferenceForTrampoline()
262{
mjs@apple.comcc668212010-01-04 11:38:56 +0000263#if CPU(X86)
barraclough@apple.comf5455492009-06-09 06:52:25 +0000264 // Within a trampoline the return address will be on the stack at this point.
oliver@apple.combe4e0672011-03-28 17:14:57 +0000265 addPtr(TrustedImm32(sizeof(void*)), stackPointerRegister, firstArgumentRegister);
mjs@apple.comcc668212010-01-04 11:38:56 +0000266#elif CPU(ARM)
barraclough@apple.comea6b6092009-02-13 04:22:02 +0000267 move(stackPointerRegister, firstArgumentRegister);
commit-queue@webkit.orgd5765e42011-04-11 17:11:38 +0000268#elif CPU(SH4)
269 move(stackPointerRegister, firstArgumentRegister);
barraclough@apple.com289318a2008-12-22 01:00:07 +0000270#endif
barraclough@apple.comf5455492009-06-09 06:52:25 +0000271 // In the trampoline on x86-64, the first argument register is not overwritten.
barraclough@apple.com23f0c052008-12-16 04:48:16 +0000272}
barraclough@apple.com23f0c052008-12-16 04:48:16 +0000273
barraclough@apple.coma2a2e132008-12-13 23:58:58 +0000274ALWAYS_INLINE JIT::Jump JIT::checkStructure(RegisterID reg, Structure* structure)
barraclough@apple.come367b002008-12-04 05:43:14 +0000275{
oliver@apple.combe4e0672011-03-28 17:14:57 +0000276 return branchPtr(NotEqual, Address(reg, JSCell::structureOffset()), TrustedImmPtr(structure));
barraclough@apple.come367b002008-12-04 05:43:14 +0000277}
278
barraclough@apple.com4f46a502008-12-13 01:39:38 +0000279ALWAYS_INLINE void JIT::linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator& iter, int vReg)
barraclough@apple.come367b002008-12-04 05:43:14 +0000280{
barraclough@apple.com4f46a502008-12-13 01:39:38 +0000281 if (!m_codeBlock->isKnownNotImmediate(vReg))
282 linkSlowCase(iter);
barraclough@apple.come367b002008-12-04 05:43:14 +0000283}
284
barraclough@apple.coma2a2e132008-12-13 23:58:58 +0000285ALWAYS_INLINE void JIT::addSlowCase(Jump jump)
barraclough@apple.com249befb2008-12-13 03:18:10 +0000286{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000287 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
barraclough@apple.com249befb2008-12-13 03:18:10 +0000288
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000289 m_slowCases.append(SlowCaseEntry(jump, m_bytecodeOffset));
barraclough@apple.com249befb2008-12-13 03:18:10 +0000290}
291
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000292ALWAYS_INLINE void JIT::addSlowCase(JumpList jumpList)
293{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000294 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000295
296 const JumpList::JumpVector& jumpVector = jumpList.jumps();
297 size_t size = jumpVector.size();
298 for (size_t i = 0; i < size; ++i)
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000299 m_slowCases.append(SlowCaseEntry(jumpVector[i], m_bytecodeOffset));
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000300}
301
barraclough@apple.coma2a2e132008-12-13 23:58:58 +0000302ALWAYS_INLINE void JIT::addJump(Jump jump, int relativeOffset)
barraclough@apple.com249befb2008-12-13 03:18:10 +0000303{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000304 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
barraclough@apple.com249befb2008-12-13 03:18:10 +0000305
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000306 m_jmpTable.append(JumpTable(jump, m_bytecodeOffset + relativeOffset));
barraclough@apple.com249befb2008-12-13 03:18:10 +0000307}
308
barraclough@apple.coma2a2e132008-12-13 23:58:58 +0000309ALWAYS_INLINE void JIT::emitJumpSlowToHot(Jump jump, int relativeOffset)
barraclough@apple.com249befb2008-12-13 03:18:10 +0000310{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000311 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
barraclough@apple.com249befb2008-12-13 03:18:10 +0000312
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000313 jump.linkTo(m_labels[m_bytecodeOffset + relativeOffset], this);
barraclough@apple.com249befb2008-12-13 03:18:10 +0000314}
315
barraclough@apple.comb8bcc942011-09-07 17:55:50 +0000316ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotObject(RegisterID structureReg)
317{
318 return branch8(NotEqual, Address(structureReg, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType));
319}
320
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000321#if ENABLE(SAMPLING_FLAGS)
barraclough@apple.comc32f32e2009-05-13 09:10:02 +0000322ALWAYS_INLINE void JIT::setSamplingFlag(int32_t flag)
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000323{
324 ASSERT(flag >= 1);
325 ASSERT(flag <= 32);
barraclough@apple.com02dee5d2011-04-21 01:03:44 +0000326 or32(TrustedImm32(1u << (flag - 1)), AbsoluteAddress(SamplingFlags::addressOfFlags()));
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000327}
328
barraclough@apple.comc32f32e2009-05-13 09:10:02 +0000329ALWAYS_INLINE void JIT::clearSamplingFlag(int32_t flag)
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000330{
331 ASSERT(flag >= 1);
332 ASSERT(flag <= 32);
barraclough@apple.com02dee5d2011-04-21 01:03:44 +0000333 and32(TrustedImm32(~(1u << (flag - 1))), AbsoluteAddress(SamplingFlags::addressOfFlags()));
barraclough@apple.comc32f32e2009-05-13 09:10:02 +0000334}
335#endif
336
337#if ENABLE(SAMPLING_COUNTERS)
338ALWAYS_INLINE void JIT::emitCount(AbstractSamplingCounter& counter, uint32_t count)
339{
mjs@apple.comcc668212010-01-04 11:38:56 +0000340#if CPU(X86_64) // Or any other 64-bit plattform.
oliver@apple.combe4e0672011-03-28 17:14:57 +0000341 addPtr(TrustedImm32(count), AbsoluteAddress(counter.addressOfCounter()));
mjs@apple.comcc668212010-01-04 11:38:56 +0000342#elif CPU(X86) // Or any other little-endian 32-bit plattform.
barraclough@apple.com66184e22011-03-13 21:16:29 +0000343 intptr_t hiWord = reinterpret_cast<intptr_t>(counter.addressOfCounter()) + sizeof(int32_t);
oliver@apple.combe4e0672011-03-28 17:14:57 +0000344 add32(TrustedImm32(count), AbsoluteAddress(counter.addressOfCounter()));
345 addWithCarry32(TrustedImm32(0), AbsoluteAddress(reinterpret_cast<void*>(hiWord)));
barraclough@apple.comc32f32e2009-05-13 09:10:02 +0000346#else
347#error "SAMPLING_FLAGS not implemented on this platform."
348#endif
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000349}
350#endif
351
352#if ENABLE(OPCODE_SAMPLING)
mjs@apple.comcc668212010-01-04 11:38:56 +0000353#if CPU(X86_64)
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000354ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
355{
oliver@apple.combe4e0672011-03-28 17:14:57 +0000356 move(TrustedImmPtr(m_interpreter->sampler()->sampleSlot()), X86Registers::ecx);
357 storePtr(TrustedImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), X86Registers::ecx);
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000358}
359#else
360ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
361{
oliver@apple.combe4e0672011-03-28 17:14:57 +0000362 storePtr(TrustedImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), m_interpreter->sampler()->sampleSlot());
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000363}
364#endif
365#endif
366
367#if ENABLE(CODEBLOCK_SAMPLING)
mjs@apple.comcc668212010-01-04 11:38:56 +0000368#if CPU(X86_64)
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000369ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
370{
oliver@apple.combe4e0672011-03-28 17:14:57 +0000371 move(TrustedImmPtr(m_interpreter->sampler()->codeBlockSlot()), X86Registers::ecx);
372 storePtr(TrustedImmPtr(codeBlock), X86Registers::ecx);
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000373}
374#else
375ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
376{
oliver@apple.combe4e0672011-03-28 17:14:57 +0000377 storePtr(TrustedImmPtr(codeBlock), m_interpreter->sampler()->codeBlockSlot());
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000378}
379#endif
380#endif
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000381
oliver@apple.com5230bd32010-05-06 19:39:54 +0000382ALWAYS_INLINE bool JIT::isOperandConstantImmediateChar(unsigned src)
383{
384 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isString() && asString(getConstantOperand(src).asCell())->length() == 1;
385}
386
oliver@apple.come843bc02011-08-05 20:03:19 +0000387template <typename ClassType, typename StructureType> inline void JIT::emitAllocateBasicJSObject(StructureType structure, void* vtable, RegisterID result, RegisterID storagePtr)
commit-queue@webkit.orgc9b19ab2011-07-18 18:55:48 +0000388{
oliver@apple.come843bc02011-08-05 20:03:19 +0000389 NewSpace::SizeClass* sizeClass = &m_globalData->heap.sizeClassFor(sizeof(ClassType));
commit-queue@webkit.orgc9b19ab2011-07-18 18:55:48 +0000390 loadPtr(&sizeClass->firstFreeCell, result);
391 addSlowCase(branchTestPtr(Zero, result));
oliver@apple.come843bc02011-08-05 20:03:19 +0000392
commit-queue@webkit.orgc9b19ab2011-07-18 18:55:48 +0000393 // remove the object from the free list
oliver@apple.come843bc02011-08-05 20:03:19 +0000394 loadPtr(Address(result), storagePtr);
395 storePtr(storagePtr, &sizeClass->firstFreeCell);
396
commit-queue@webkit.orgc9b19ab2011-07-18 18:55:48 +0000397 // initialize the object's vtable
oliver@apple.come843bc02011-08-05 20:03:19 +0000398 storePtr(TrustedImmPtr(vtable), Address(result));
399
commit-queue@webkit.orgc9b19ab2011-07-18 18:55:48 +0000400 // initialize the object's structure
401 storePtr(structure, Address(result, JSCell::structureOffset()));
oliver@apple.come843bc02011-08-05 20:03:19 +0000402
commit-queue@webkit.orgc9b19ab2011-07-18 18:55:48 +0000403 // initialize the inheritor ID
oliver@apple.come843bc02011-08-05 20:03:19 +0000404 storePtr(TrustedImmPtr(0), Address(result, JSObject::offsetOfInheritorID()));
405
commit-queue@webkit.orgc9b19ab2011-07-18 18:55:48 +0000406 // initialize the object's property storage pointer
oliver@apple.come843bc02011-08-05 20:03:19 +0000407 addPtr(TrustedImm32(sizeof(JSObject)), result, storagePtr);
408 storePtr(storagePtr, Address(result, ClassType::offsetOfPropertyStorage()));
409}
410
411template <typename T> inline void JIT::emitAllocateJSFinalObject(T structure, RegisterID result, RegisterID scratch)
412{
413 emitAllocateBasicJSObject<JSFinalObject>(structure, m_globalData->jsFinalObjectVPtr, result, scratch);
414}
415
416inline void JIT::emitAllocateJSFunction(FunctionExecutable* executable, RegisterID scopeChain, RegisterID result, RegisterID storagePtr)
417{
418 emitAllocateBasicJSObject<JSFunction>(TrustedImmPtr(m_codeBlock->globalObject()->namedFunctionStructure()), m_globalData->jsFunctionVPtr, result, storagePtr);
419
420 // store the function's scope chain
421 storePtr(scopeChain, Address(result, JSFunction::offsetOfScopeChain()));
422
423 // store the function's executable member
424 storePtr(TrustedImmPtr(executable), Address(result, JSFunction::offsetOfExecutable()));
425
oliver@apple.come843bc02011-08-05 20:03:19 +0000426 // store the function's name
427 ASSERT(executable->nameValue());
428 int functionNameOffset = sizeof(JSValue) * m_codeBlock->globalObject()->functionNameOffset();
429 storePtr(TrustedImmPtr(executable->nameValue()), Address(regT1, functionNameOffset + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
430#if USE(JSVALUE32_64)
431 store32(TrustedImm32(JSValue::CellTag), Address(regT1, functionNameOffset + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
432#endif
commit-queue@webkit.orgc9b19ab2011-07-18 18:55:48 +0000433}
434
fpizlo@apple.com95a9f0d2011-08-20 02:17:49 +0000435#if ENABLE(VALUE_PROFILER)
436inline void JIT::emitValueProfilingSite(ValueProfilingSiteKind siteKind)
437{
438 const RegisterID value = regT0;
439 const RegisterID scratch = regT3;
440
441 ValueProfile* valueProfile;
442 if (siteKind == FirstProfilingSite)
443 valueProfile = m_codeBlock->addValueProfile(m_bytecodeOffset);
444 else {
445 ASSERT(siteKind == SubsequentProfilingSite);
446 valueProfile = m_codeBlock->valueProfileForBytecodeOffset(m_bytecodeOffset);
447 }
448
449 ASSERT(valueProfile);
450
451 if (m_randomGenerator.getUint32() & 1)
452 add32(Imm32(1), bucketCounterRegister);
453 else
454 add32(Imm32(3), bucketCounterRegister);
455 and32(Imm32(ValueProfile::bucketIndexMask), bucketCounterRegister);
456 move(ImmPtr(valueProfile->buckets), scratch);
457 storePtr(value, BaseIndex(scratch, bucketCounterRegister, TimesEight));
458}
459#endif
460
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000461#if USE(JSVALUE32_64)
462
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000463inline void JIT::emitLoadTag(unsigned index, RegisterID tag)
464{
465 RegisterID mappedTag;
466 if (getMappedTag(index, mappedTag)) {
467 move(mappedTag, tag);
468 unmap(tag);
469 return;
470 }
471
472 if (m_codeBlock->isConstantRegisterIndex(index)) {
473 move(Imm32(getConstantOperand(index).tag()), tag);
474 unmap(tag);
475 return;
476 }
477
478 load32(tagFor(index), tag);
479 unmap(tag);
480}
481
482inline void JIT::emitLoadPayload(unsigned index, RegisterID payload)
483{
484 RegisterID mappedPayload;
485 if (getMappedPayload(index, mappedPayload)) {
486 move(mappedPayload, payload);
487 unmap(payload);
488 return;
489 }
490
491 if (m_codeBlock->isConstantRegisterIndex(index)) {
492 move(Imm32(getConstantOperand(index).payload()), payload);
493 unmap(payload);
494 return;
495 }
496
497 load32(payloadFor(index), payload);
498 unmap(payload);
499}
500
501inline void JIT::emitLoad(const JSValue& v, RegisterID tag, RegisterID payload)
502{
503 move(Imm32(v.payload()), payload);
504 move(Imm32(v.tag()), tag);
505}
506
507inline void JIT::emitLoad(unsigned index, RegisterID tag, RegisterID payload, RegisterID base)
508{
509 ASSERT(tag != payload);
510
511 if (base == callFrameRegister) {
512 ASSERT(payload != base);
513 emitLoadPayload(index, payload);
514 emitLoadTag(index, tag);
515 return;
516 }
517
518 if (payload == base) { // avoid stomping base
519 load32(tagFor(index, base), tag);
520 load32(payloadFor(index, base), payload);
521 return;
522 }
523
524 load32(payloadFor(index, base), payload);
525 load32(tagFor(index, base), tag);
526}
527
528inline void JIT::emitLoad2(unsigned index1, RegisterID tag1, RegisterID payload1, unsigned index2, RegisterID tag2, RegisterID payload2)
529{
530 if (isMapped(index1)) {
531 emitLoad(index1, tag1, payload1);
532 emitLoad(index2, tag2, payload2);
533 return;
534 }
535 emitLoad(index2, tag2, payload2);
536 emitLoad(index1, tag1, payload1);
537}
538
539inline void JIT::emitLoadDouble(unsigned index, FPRegisterID value)
540{
541 if (m_codeBlock->isConstantRegisterIndex(index)) {
oliver@apple.comba10bec2011-03-08 23:17:32 +0000542 WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(index);
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000543 loadDouble(&inConstantPool, value);
544 } else
545 loadDouble(addressFor(index), value);
546}
547
548inline void JIT::emitLoadInt32ToDouble(unsigned index, FPRegisterID value)
549{
550 if (m_codeBlock->isConstantRegisterIndex(index)) {
oliver@apple.comba10bec2011-03-08 23:17:32 +0000551 WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(index);
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000552 char* bytePointer = reinterpret_cast<char*>(&inConstantPool);
553 convertInt32ToDouble(AbsoluteAddress(bytePointer + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), value);
554 } else
555 convertInt32ToDouble(payloadFor(index), value);
556}
557
558inline void JIT::emitStore(unsigned index, RegisterID tag, RegisterID payload, RegisterID base)
559{
560 store32(payload, payloadFor(index, base));
561 store32(tag, tagFor(index, base));
562}
563
564inline void JIT::emitStoreInt32(unsigned index, RegisterID payload, bool indexIsInt32)
565{
566 store32(payload, payloadFor(index, callFrameRegister));
567 if (!indexIsInt32)
oliver@apple.combe4e0672011-03-28 17:14:57 +0000568 store32(TrustedImm32(JSValue::Int32Tag), tagFor(index, callFrameRegister));
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000569}
570
oliver@apple.combe4e0672011-03-28 17:14:57 +0000571inline void JIT::emitStoreInt32(unsigned index, TrustedImm32 payload, bool indexIsInt32)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000572{
573 store32(payload, payloadFor(index, callFrameRegister));
574 if (!indexIsInt32)
oliver@apple.combe4e0672011-03-28 17:14:57 +0000575 store32(TrustedImm32(JSValue::Int32Tag), tagFor(index, callFrameRegister));
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000576}
577
578inline void JIT::emitStoreCell(unsigned index, RegisterID payload, bool indexIsCell)
579{
580 store32(payload, payloadFor(index, callFrameRegister));
581 if (!indexIsCell)
oliver@apple.combe4e0672011-03-28 17:14:57 +0000582 store32(TrustedImm32(JSValue::CellTag), tagFor(index, callFrameRegister));
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000583}
584
barraclough@apple.com5139edc2011-04-07 23:47:06 +0000585inline void JIT::emitStoreBool(unsigned index, RegisterID payload, bool indexIsBool)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000586{
barraclough@apple.com5139edc2011-04-07 23:47:06 +0000587 store32(payload, payloadFor(index, callFrameRegister));
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000588 if (!indexIsBool)
barraclough@apple.com5139edc2011-04-07 23:47:06 +0000589 store32(TrustedImm32(JSValue::BooleanTag), tagFor(index, callFrameRegister));
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000590}
591
592inline void JIT::emitStoreDouble(unsigned index, FPRegisterID value)
593{
594 storeDouble(value, addressFor(index));
595}
596
597inline void JIT::emitStore(unsigned index, const JSValue constant, RegisterID base)
598{
599 store32(Imm32(constant.payload()), payloadFor(index, base));
600 store32(Imm32(constant.tag()), tagFor(index, base));
601}
602
603ALWAYS_INLINE void JIT::emitInitRegister(unsigned dst)
604{
605 emitStore(dst, jsUndefined());
606}
607
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000608inline bool JIT::isLabeled(unsigned bytecodeOffset)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000609{
610 for (size_t numberOfJumpTargets = m_codeBlock->numberOfJumpTargets(); m_jumpTargetIndex != numberOfJumpTargets; ++m_jumpTargetIndex) {
611 unsigned jumpTarget = m_codeBlock->jumpTarget(m_jumpTargetIndex);
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000612 if (jumpTarget == bytecodeOffset)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000613 return true;
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000614 if (jumpTarget > bytecodeOffset)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000615 return false;
616 }
617 return false;
618}
619
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000620inline void JIT::map(unsigned bytecodeOffset, unsigned virtualRegisterIndex, RegisterID tag, RegisterID payload)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000621{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000622 if (isLabeled(bytecodeOffset))
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000623 return;
624
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000625 m_mappedBytecodeOffset = bytecodeOffset;
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000626 m_mappedVirtualRegisterIndex = virtualRegisterIndex;
627 m_mappedTag = tag;
628 m_mappedPayload = payload;
629}
630
631inline void JIT::unmap(RegisterID registerID)
632{
633 if (m_mappedTag == registerID)
634 m_mappedTag = (RegisterID)-1;
635 else if (m_mappedPayload == registerID)
636 m_mappedPayload = (RegisterID)-1;
637}
638
639inline void JIT::unmap()
640{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000641 m_mappedBytecodeOffset = (unsigned)-1;
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000642 m_mappedVirtualRegisterIndex = (unsigned)-1;
643 m_mappedTag = (RegisterID)-1;
644 m_mappedPayload = (RegisterID)-1;
645}
646
647inline bool JIT::isMapped(unsigned virtualRegisterIndex)
648{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000649 if (m_mappedBytecodeOffset != m_bytecodeOffset)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000650 return false;
651 if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
652 return false;
653 return true;
654}
655
656inline bool JIT::getMappedPayload(unsigned virtualRegisterIndex, RegisterID& payload)
657{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000658 if (m_mappedBytecodeOffset != m_bytecodeOffset)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000659 return false;
660 if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
661 return false;
662 if (m_mappedPayload == (RegisterID)-1)
663 return false;
664 payload = m_mappedPayload;
665 return true;
666}
667
668inline bool JIT::getMappedTag(unsigned virtualRegisterIndex, RegisterID& tag)
669{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000670 if (m_mappedBytecodeOffset != m_bytecodeOffset)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000671 return false;
672 if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
673 return false;
674 if (m_mappedTag == (RegisterID)-1)
675 return false;
676 tag = m_mappedTag;
677 return true;
678}
679
680inline void JIT::emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex)
681{
oliver@apple.comfc502ee2010-05-20 00:30:35 +0000682 if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex)) {
683 if (m_codeBlock->isConstantRegisterIndex(virtualRegisterIndex))
684 addSlowCase(jump());
685 else
686 addSlowCase(emitJumpIfNotJSCell(virtualRegisterIndex));
687 }
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000688}
689
690inline void JIT::emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex, RegisterID tag)
691{
oliver@apple.comfc502ee2010-05-20 00:30:35 +0000692 if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex)) {
693 if (m_codeBlock->isConstantRegisterIndex(virtualRegisterIndex))
694 addSlowCase(jump());
695 else
oliver@apple.combe4e0672011-03-28 17:14:57 +0000696 addSlowCase(branch32(NotEqual, tag, TrustedImm32(JSValue::CellTag)));
oliver@apple.comfc502ee2010-05-20 00:30:35 +0000697 }
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000698}
699
700inline void JIT::linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator& iter, unsigned virtualRegisterIndex)
701{
702 if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex))
703 linkSlowCase(iter);
704}
705
706ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(unsigned src)
707{
708 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32();
709}
710
711ALWAYS_INLINE bool JIT::getOperandConstantImmediateInt(unsigned op1, unsigned op2, unsigned& op, int32_t& constant)
712{
713 if (isOperandConstantImmediateInt(op1)) {
714 constant = getConstantOperand(op1).asInt32();
715 op = op2;
716 return true;
717 }
718
719 if (isOperandConstantImmediateInt(op2)) {
720 constant = getConstantOperand(op2).asInt32();
721 op = op1;
722 return true;
723 }
724
725 return false;
726}
727
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000728#else // USE(JSVALUE32_64)
729
730ALWAYS_INLINE void JIT::killLastResultRegister()
731{
732 m_lastResultBytecodeRegister = std::numeric_limits<int>::max();
733}
734
735// get arg puts an arg from the SF register array into a h/w register
736ALWAYS_INLINE void JIT::emitGetVirtualRegister(int src, RegisterID dst)
737{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000738 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000739
740 // TODO: we want to reuse values that are already in registers if we can - add a register allocator!
741 if (m_codeBlock->isConstantRegisterIndex(src)) {
742 JSValue value = m_codeBlock->getConstant(src);
743 move(ImmPtr(JSValue::encode(value)), dst);
744 killLastResultRegister();
745 return;
746 }
747
barraclough@apple.com4836c7a2011-05-01 22:20:59 +0000748 if (src == m_lastResultBytecodeRegister && m_codeBlock->isTemporaryRegisterIndex(src) && !atJumpTarget()) {
749 // The argument we want is already stored in eax
750 if (dst != cachedResultRegister)
751 move(cachedResultRegister, dst);
752 killLastResultRegister();
753 return;
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000754 }
755
756 loadPtr(Address(callFrameRegister, src * sizeof(Register)), dst);
757 killLastResultRegister();
758}
759
760ALWAYS_INLINE void JIT::emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2)
761{
762 if (src2 == m_lastResultBytecodeRegister) {
763 emitGetVirtualRegister(src2, dst2);
764 emitGetVirtualRegister(src1, dst1);
765 } else {
766 emitGetVirtualRegister(src1, dst1);
767 emitGetVirtualRegister(src2, dst2);
768 }
769}
770
771ALWAYS_INLINE int32_t JIT::getConstantOperandImmediateInt(unsigned src)
772{
773 return getConstantOperand(src).asInt32();
774}
775
776ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(unsigned src)
777{
778 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32();
779}
780
781ALWAYS_INLINE void JIT::emitPutVirtualRegister(unsigned dst, RegisterID from)
782{
783 storePtr(from, Address(callFrameRegister, dst * sizeof(Register)));
andersca@apple.comd11d25d2010-06-15 18:52:31 +0000784 m_lastResultBytecodeRegister = (from == cachedResultRegister) ? static_cast<int>(dst) : std::numeric_limits<int>::max();
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000785}
786
787ALWAYS_INLINE void JIT::emitInitRegister(unsigned dst)
788{
oliver@apple.combe4e0672011-03-28 17:14:57 +0000789 storePtr(TrustedImmPtr(JSValue::encode(jsUndefined())), Address(callFrameRegister, dst * sizeof(Register)));
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000790}
791
792ALWAYS_INLINE JIT::Jump JIT::emitJumpIfJSCell(RegisterID reg)
793{
794#if USE(JSVALUE64)
795 return branchTestPtr(Zero, reg, tagMaskRegister);
796#else
barraclough@apple.comc6441e682011-04-11 18:29:14 +0000797 return branchTest32(Zero, reg, TrustedImm32(TagMask));
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000798#endif
799}
800
801ALWAYS_INLINE JIT::Jump JIT::emitJumpIfBothJSCells(RegisterID reg1, RegisterID reg2, RegisterID scratch)
802{
803 move(reg1, scratch);
804 orPtr(reg2, scratch);
805 return emitJumpIfJSCell(scratch);
806}
807
808ALWAYS_INLINE void JIT::emitJumpSlowCaseIfJSCell(RegisterID reg)
809{
810 addSlowCase(emitJumpIfJSCell(reg));
811}
812
813ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotJSCell(RegisterID reg)
814{
815#if USE(JSVALUE64)
816 return branchTestPtr(NonZero, reg, tagMaskRegister);
817#else
barraclough@apple.comc6441e682011-04-11 18:29:14 +0000818 return branchTest32(NonZero, reg, TrustedImm32(TagMask));
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000819#endif
820}
821
822ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg)
823{
824 addSlowCase(emitJumpIfNotJSCell(reg));
825}
826
827ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg, int vReg)
828{
829 if (!m_codeBlock->isKnownNotImmediate(vReg))
830 emitJumpSlowCaseIfNotJSCell(reg);
831}
832
833#if USE(JSVALUE64)
oliver@apple.com8d181632009-09-25 02:40:59 +0000834
835inline void JIT::emitLoadDouble(unsigned index, FPRegisterID value)
836{
837 if (m_codeBlock->isConstantRegisterIndex(index)) {
oliver@apple.comba10bec2011-03-08 23:17:32 +0000838 WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(index);
oliver@apple.com8d181632009-09-25 02:40:59 +0000839 loadDouble(&inConstantPool, value);
840 } else
841 loadDouble(addressFor(index), value);
842}
843
844inline void JIT::emitLoadInt32ToDouble(unsigned index, FPRegisterID value)
845{
846 if (m_codeBlock->isConstantRegisterIndex(index)) {
barraclough@apple.com8af7a532011-03-13 22:11:13 +0000847 ASSERT(isOperandConstantImmediateInt(index));
848 convertInt32ToDouble(Imm32(getConstantOperand(index).asInt32()), value);
oliver@apple.com8d181632009-09-25 02:40:59 +0000849 } else
850 convertInt32ToDouble(addressFor(index), value);
851}
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000852#endif
853
854ALWAYS_INLINE JIT::Jump JIT::emitJumpIfImmediateInteger(RegisterID reg)
855{
856#if USE(JSVALUE64)
857 return branchPtr(AboveOrEqual, reg, tagTypeNumberRegister);
858#else
barraclough@apple.comc6441e682011-04-11 18:29:14 +0000859 return branchTest32(NonZero, reg, TrustedImm32(TagTypeNumber));
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000860#endif
861}
862
863ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateInteger(RegisterID reg)
864{
865#if USE(JSVALUE64)
866 return branchPtr(Below, reg, tagTypeNumberRegister);
867#else
barraclough@apple.comc6441e682011-04-11 18:29:14 +0000868 return branchTest32(Zero, reg, TrustedImm32(TagTypeNumber));
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000869#endif
870}
871
872ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch)
873{
874 move(reg1, scratch);
875 andPtr(reg2, scratch);
876 return emitJumpIfNotImmediateInteger(scratch);
877}
878
879ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateInteger(RegisterID reg)
880{
881 addSlowCase(emitJumpIfNotImmediateInteger(reg));
882}
883
884ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch)
885{
886 addSlowCase(emitJumpIfNotImmediateIntegers(reg1, reg2, scratch));
887}
888
oliver@apple.com8d181632009-09-25 02:40:59 +0000889ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateNumber(RegisterID reg)
890{
891 addSlowCase(emitJumpIfNotImmediateNumber(reg));
892}
893
oliver@apple.com0cc25c32010-10-19 23:55:08 +0000894#if USE(JSVALUE32_64)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000895ALWAYS_INLINE void JIT::emitFastArithDeTagImmediate(RegisterID reg)
896{
barraclough@apple.comc6441e682011-04-11 18:29:14 +0000897 subPtr(TrustedImm32(TagTypeNumber), reg);
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000898}
899
900ALWAYS_INLINE JIT::Jump JIT::emitFastArithDeTagImmediateJumpIfZero(RegisterID reg)
901{
barraclough@apple.comc6441e682011-04-11 18:29:14 +0000902 return branchSubPtr(Zero, TrustedImm32(TagTypeNumber), reg);
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000903}
904#endif
905
906ALWAYS_INLINE void JIT::emitFastArithReTagImmediate(RegisterID src, RegisterID dest)
907{
908#if USE(JSVALUE64)
909 emitFastArithIntToImmNoCheck(src, dest);
910#else
911 if (src != dest)
912 move(src, dest);
barraclough@apple.comc6441e682011-04-11 18:29:14 +0000913 addPtr(TrustedImm32(TagTypeNumber), dest);
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000914#endif
915}
916
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000917// operand is int32_t, must have been zero-extended if register is 64-bit.
918ALWAYS_INLINE void JIT::emitFastArithIntToImmNoCheck(RegisterID src, RegisterID dest)
919{
920#if USE(JSVALUE64)
921 if (src != dest)
922 move(src, dest);
923 orPtr(tagTypeNumberRegister, dest);
924#else
925 signExtend32ToPtr(src, dest);
926 addPtr(dest, dest);
927 emitFastArithReTagImmediate(dest, dest);
928#endif
929}
930
931ALWAYS_INLINE void JIT::emitTagAsBoolImmediate(RegisterID reg)
932{
barraclough@apple.com560dde72011-04-11 22:18:30 +0000933 or32(TrustedImm32(static_cast<int32_t>(ValueFalse)), reg);
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000934}
935
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000936#endif // USE(JSVALUE32_64)
937
938} // namespace JSC
939
barraclough@apple.come367b002008-12-04 05:43:14 +0000940#endif // ENABLE(JIT)
941
942#endif