blob: 6ea0321a09e9049ce71185e5fd70147f866ab3b6 [file] [log] [blame]
barraclough@apple.come367b002008-12-04 05:43:14 +00001/*
fpizlo@apple.com604d38a2012-07-01 03:54:49 +00002 * Copyright (C) 2008, 2012 Apple Inc. All rights reserved.
barraclough@apple.come367b002008-12-04 05:43:14 +00003 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
mark.lam@apple.coma4fe7ab2012-11-09 03:03:44 +000026#ifndef JITInlines_h
27#define JITInlines_h
barraclough@apple.come367b002008-12-04 05:43:14 +000028
barraclough@apple.come367b002008-12-04 05:43:14 +000029
30#if ENABLE(JIT)
31
barraclough@apple.come367b002008-12-04 05:43:14 +000032namespace JSC {
33
oliver@apple.com8d181632009-09-25 02:40:59 +000034ALWAYS_INLINE bool JIT::isOperandConstantImmediateDouble(unsigned src)
35{
36 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isDouble();
37}
38
ggaren@apple.comdc067b62009-05-01 22:43:39 +000039ALWAYS_INLINE JSValue JIT::getConstantOperand(unsigned src)
barraclough@apple.combe1ce032009-01-02 03:06:10 +000040{
41 ASSERT(m_codeBlock->isConstantRegisterIndex(src));
42 return m_codeBlock->getConstant(src);
43}
44
mark.lam@apple.com4fbb9c32012-10-09 07:12:56 +000045ALWAYS_INLINE void JIT::emitPutIntToCallFrameHeader(RegisterID from, JSStack::CallFrameHeaderEntry entry)
oliver@apple.com9d4f0ec2011-03-14 18:16:36 +000046{
yuqiang.xian@intel.com5b1cb732012-10-19 05:46:10 +000047#if USE(JSVALUE32_64)
oliver@apple.combe4e0672011-03-28 17:14:57 +000048 store32(TrustedImm32(Int32Tag), intTagFor(entry, callFrameRegister));
oliver@apple.com9d4f0ec2011-03-14 18:16:36 +000049 store32(from, intPayloadFor(entry, callFrameRegister));
yuqiang.xian@intel.com5b1cb732012-10-19 05:46:10 +000050#else
51 store64(from, addressFor(entry, callFrameRegister));
52#endif
53}
54
mark.lam@apple.com4fbb9c32012-10-09 07:12:56 +000055ALWAYS_INLINE void JIT::emitGetFromCallFrameHeaderPtr(JSStack::CallFrameHeaderEntry entry, RegisterID to, RegisterID from)
barraclough@apple.come367b002008-12-04 05:43:14 +000056{
oliver@apple.come79807b2009-05-05 11:34:23 +000057 loadPtr(Address(from, entry * sizeof(Register)), to);
oliver@apple.com0cc25c32010-10-19 23:55:08 +000058#if USE(JSVALUE64)
oliver@apple.come79807b2009-05-05 11:34:23 +000059 killLastResultRegister();
ggaren@apple.com540d71a62009-07-30 20:57:44 +000060#endif
oliver@apple.come79807b2009-05-05 11:34:23 +000061}
62
yuqiang.xian@intel.com5b1cb732012-10-19 05:46:10 +000063ALWAYS_INLINE void JIT::emitGetFromCallFrameHeader32(JSStack::CallFrameHeaderEntry entry, RegisterID to, RegisterID from)
64{
65 load32(Address(from, entry * sizeof(Register)), to);
66#if USE(JSVALUE64)
67 killLastResultRegister();
68#endif
69}
70
71#if USE(JSVALUE64)
72ALWAYS_INLINE void JIT::emitGetFromCallFrameHeader64(JSStack::CallFrameHeaderEntry entry, RegisterID to, RegisterID from)
73{
74 load64(Address(from, entry * sizeof(Register)), to);
75 killLastResultRegister();
76}
77#endif
78
oliver@apple.com5230bd32010-05-06 19:39:54 +000079ALWAYS_INLINE void JIT::emitLoadCharacterString(RegisterID src, RegisterID dst, JumpList& failures)
80{
mhahnenberg@apple.com85c200b2012-08-02 17:32:35 +000081 failures.append(branchPtr(NotEqual, Address(src, JSCell::structureOffset()), TrustedImmPtr(m_globalData->stringStructure.get())));
oliver@apple.combe4e0672011-03-28 17:14:57 +000082 failures.append(branch32(NotEqual, MacroAssembler::Address(src, ThunkHelpers::jsStringLengthOffset()), TrustedImm32(1)));
oliver@apple.com5230bd32010-05-06 19:39:54 +000083 loadPtr(MacroAssembler::Address(src, ThunkHelpers::jsStringValueOffset()), dst);
ggaren@apple.comffbe44d2011-10-19 19:45:35 +000084 failures.append(branchTest32(Zero, dst));
msaboff@apple.coma64c5812011-10-28 01:09:53 +000085 loadPtr(MacroAssembler::Address(dst, ThunkHelpers::stringImplFlagsOffset()), regT1);
oliver@apple.com5230bd32010-05-06 19:39:54 +000086 loadPtr(MacroAssembler::Address(dst, ThunkHelpers::stringImplDataOffset()), dst);
msaboff@apple.coma64c5812011-10-28 01:09:53 +000087
88 JumpList is16Bit;
89 JumpList cont8Bit;
90 is16Bit.append(branchTest32(Zero, regT1, TrustedImm32(ThunkHelpers::stringImpl8BitFlag())));
91 load8(MacroAssembler::Address(dst, 0), dst);
92 cont8Bit.append(jump());
93 is16Bit.link(this);
oliver@apple.com5230bd32010-05-06 19:39:54 +000094 load16(MacroAssembler::Address(dst, 0), dst);
msaboff@apple.coma64c5812011-10-28 01:09:53 +000095 cont8Bit.link(this);
oliver@apple.com5230bd32010-05-06 19:39:54 +000096}
97
barraclough@apple.com97bacef2009-06-05 07:55:38 +000098ALWAYS_INLINE JIT::Call JIT::emitNakedCall(CodePtr function)
barraclough@apple.come367b002008-12-04 05:43:14 +000099{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000100 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
barraclough@apple.com249befb2008-12-13 03:18:10 +0000101
barraclough@apple.comd7e13382009-02-19 22:51:40 +0000102 Call nakedCall = nearCall();
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000103 m_calls.append(CallRecord(nakedCall, m_bytecodeOffset, function.executableAddress()));
barraclough@apple.com80924152008-12-05 06:58:40 +0000104 return nakedCall;
barraclough@apple.come367b002008-12-04 05:43:14 +0000105}
106
barraclough@apple.com4836c7a2011-05-01 22:20:59 +0000107ALWAYS_INLINE bool JIT::atJumpTarget()
108{
109 while (m_jumpTargetsPosition < m_codeBlock->numberOfJumpTargets() && m_codeBlock->jumpTarget(m_jumpTargetsPosition) <= m_bytecodeOffset) {
110 if (m_codeBlock->jumpTarget(m_jumpTargetsPosition) == m_bytecodeOffset)
111 return true;
112 ++m_jumpTargetsPosition;
113 }
114 return false;
115}
116
barraclough@apple.com970af2c2009-08-13 05:58:36 +0000117#if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
118
119ALWAYS_INLINE void JIT::beginUninterruptedSequence(int insnSpace, int constSpace)
120{
mjs@apple.comcc668212010-01-04 11:38:56 +0000121#if CPU(ARM_TRADITIONAL)
barraclough@apple.com970af2c2009-08-13 05:58:36 +0000122#ifndef NDEBUG
123 // Ensure the label after the sequence can also fit
124 insnSpace += sizeof(ARMWord);
125 constSpace += sizeof(uint64_t);
126#endif
127
128 ensureSpace(insnSpace, constSpace);
129
commit-queue@webkit.orgd5765e42011-04-11 17:11:38 +0000130#elif CPU(SH4)
131#ifndef NDEBUG
132 insnSpace += sizeof(SH4Word);
133 constSpace += sizeof(uint64_t);
134#endif
135
136 m_assembler.ensureSpace(insnSpace + m_assembler.maxInstructionSize + 2, constSpace + 8);
barraclough@apple.com970af2c2009-08-13 05:58:36 +0000137#endif
138
139#if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
140#ifndef NDEBUG
141 m_uninterruptedInstructionSequenceBegin = label();
142 m_uninterruptedConstantSequenceBegin = sizeOfConstantPool();
143#endif
144#endif
145}
146
commit-queue@webkit.orgd5765e42011-04-11 17:11:38 +0000147ALWAYS_INLINE void JIT::endUninterruptedSequence(int insnSpace, int constSpace, int dst)
barraclough@apple.com970af2c2009-08-13 05:58:36 +0000148{
commit-queue@webkit.orgd5765e42011-04-11 17:11:38 +0000149 UNUSED_PARAM(dst);
barraclough@apple.com970af2c2009-08-13 05:58:36 +0000150#if defined(ASSEMBLER_HAS_CONSTANT_POOL) && ASSEMBLER_HAS_CONSTANT_POOL
loki@webkit.orgebaef242010-11-10 07:36:33 +0000151 /* There are several cases when the uninterrupted sequence is larger than
152 * maximum required offset for pathing the same sequence. Eg.: if in a
153 * uninterrupted sequence the last macroassembler's instruction is a stub
154 * call, it emits store instruction(s) which should not be included in the
155 * calculation of length of uninterrupted sequence. So, the insnSpace and
156 * constSpace should be upper limit instead of hard limit.
157 */
commit-queue@webkit.orgd5765e42011-04-11 17:11:38 +0000158#if CPU(SH4)
159 if ((dst > 15) || (dst < -16)) {
160 insnSpace += 8;
161 constSpace += 2;
162 }
163
164 if (((dst >= -16) && (dst < 0)) || ((dst > 7) && (dst <= 15)))
165 insnSpace += 8;
166#endif
loki@webkit.orgebaef242010-11-10 07:36:33 +0000167 ASSERT(differenceBetween(m_uninterruptedInstructionSequenceBegin, label()) <= insnSpace);
168 ASSERT(sizeOfConstantPool() - m_uninterruptedConstantSequenceBegin <= constSpace);
barraclough@apple.com970af2c2009-08-13 05:58:36 +0000169#endif
170}
171
172#endif
173
commit-queue@webkit.org84814622011-08-25 01:25:38 +0000174ALWAYS_INLINE void JIT::updateTopCallFrame()
175{
oliver@apple.com8b234c92012-02-08 21:22:49 +0000176 ASSERT(static_cast<int>(m_bytecodeOffset) >= 0);
fpizlo@apple.com7bbcaab2012-02-22 05:23:19 +0000177 if (m_bytecodeOffset) {
178#if USE(JSVALUE32_64)
mark.lam@apple.com4fbb9c32012-10-09 07:12:56 +0000179 storePtr(TrustedImmPtr(m_codeBlock->instructions().begin() + m_bytecodeOffset + 1), intTagFor(JSStack::ArgumentCount));
fpizlo@apple.com7bbcaab2012-02-22 05:23:19 +0000180#else
mark.lam@apple.com4fbb9c32012-10-09 07:12:56 +0000181 store32(TrustedImm32(m_bytecodeOffset + 1), intTagFor(JSStack::ArgumentCount));
fpizlo@apple.com7bbcaab2012-02-22 05:23:19 +0000182#endif
183 }
commit-queue@webkit.org84814622011-08-25 01:25:38 +0000184 storePtr(callFrameRegister, &m_globalData->topCallFrame);
185}
186
barraclough@apple.com23f0c052008-12-16 04:48:16 +0000187ALWAYS_INLINE void JIT::restoreArgumentReferenceForTrampoline()
188{
mjs@apple.comcc668212010-01-04 11:38:56 +0000189#if CPU(X86)
barraclough@apple.comf5455492009-06-09 06:52:25 +0000190 // Within a trampoline the return address will be on the stack at this point.
oliver@apple.combe4e0672011-03-28 17:14:57 +0000191 addPtr(TrustedImm32(sizeof(void*)), stackPointerRegister, firstArgumentRegister);
mjs@apple.comcc668212010-01-04 11:38:56 +0000192#elif CPU(ARM)
barraclough@apple.comea6b6092009-02-13 04:22:02 +0000193 move(stackPointerRegister, firstArgumentRegister);
commit-queue@webkit.orgd5765e42011-04-11 17:11:38 +0000194#elif CPU(SH4)
195 move(stackPointerRegister, firstArgumentRegister);
barraclough@apple.com289318a2008-12-22 01:00:07 +0000196#endif
barraclough@apple.comf5455492009-06-09 06:52:25 +0000197 // In the trampoline on x86-64, the first argument register is not overwritten.
barraclough@apple.com23f0c052008-12-16 04:48:16 +0000198}
barraclough@apple.com23f0c052008-12-16 04:48:16 +0000199
barraclough@apple.coma2a2e132008-12-13 23:58:58 +0000200ALWAYS_INLINE JIT::Jump JIT::checkStructure(RegisterID reg, Structure* structure)
barraclough@apple.come367b002008-12-04 05:43:14 +0000201{
oliver@apple.combe4e0672011-03-28 17:14:57 +0000202 return branchPtr(NotEqual, Address(reg, JSCell::structureOffset()), TrustedImmPtr(structure));
barraclough@apple.come367b002008-12-04 05:43:14 +0000203}
204
barraclough@apple.com4f46a502008-12-13 01:39:38 +0000205ALWAYS_INLINE void JIT::linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator& iter, int vReg)
barraclough@apple.come367b002008-12-04 05:43:14 +0000206{
barraclough@apple.com4f46a502008-12-13 01:39:38 +0000207 if (!m_codeBlock->isKnownNotImmediate(vReg))
208 linkSlowCase(iter);
barraclough@apple.come367b002008-12-04 05:43:14 +0000209}
210
barraclough@apple.coma2a2e132008-12-13 23:58:58 +0000211ALWAYS_INLINE void JIT::addSlowCase(Jump jump)
barraclough@apple.com249befb2008-12-13 03:18:10 +0000212{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000213 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
barraclough@apple.com249befb2008-12-13 03:18:10 +0000214
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000215 m_slowCases.append(SlowCaseEntry(jump, m_bytecodeOffset));
barraclough@apple.com249befb2008-12-13 03:18:10 +0000216}
217
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000218ALWAYS_INLINE void JIT::addSlowCase(JumpList jumpList)
219{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000220 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000221
222 const JumpList::JumpVector& jumpVector = jumpList.jumps();
223 size_t size = jumpVector.size();
224 for (size_t i = 0; i < size; ++i)
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000225 m_slowCases.append(SlowCaseEntry(jumpVector[i], m_bytecodeOffset));
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000226}
227
fpizlo@apple.com9b0b31e2011-09-19 22:27:38 +0000228ALWAYS_INLINE void JIT::addSlowCase()
229{
230 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
231
232 Jump emptyJump; // Doing it this way to make Windows happy.
233 m_slowCases.append(SlowCaseEntry(emptyJump, m_bytecodeOffset));
234}
235
barraclough@apple.coma2a2e132008-12-13 23:58:58 +0000236ALWAYS_INLINE void JIT::addJump(Jump jump, int relativeOffset)
barraclough@apple.com249befb2008-12-13 03:18:10 +0000237{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000238 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
barraclough@apple.com249befb2008-12-13 03:18:10 +0000239
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000240 m_jmpTable.append(JumpTable(jump, m_bytecodeOffset + relativeOffset));
barraclough@apple.com249befb2008-12-13 03:18:10 +0000241}
242
barraclough@apple.coma2a2e132008-12-13 23:58:58 +0000243ALWAYS_INLINE void JIT::emitJumpSlowToHot(Jump jump, int relativeOffset)
barraclough@apple.com249befb2008-12-13 03:18:10 +0000244{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000245 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
barraclough@apple.com249befb2008-12-13 03:18:10 +0000246
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000247 jump.linkTo(m_labels[m_bytecodeOffset + relativeOffset], this);
barraclough@apple.com249befb2008-12-13 03:18:10 +0000248}
249
barraclough@apple.comb8bcc942011-09-07 17:55:50 +0000250ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotObject(RegisterID structureReg)
251{
weinig@apple.com58576b22011-09-16 21:34:20 +0000252 return branch8(Below, Address(structureReg, Structure::typeInfoTypeOffset()), TrustedImm32(ObjectType));
barraclough@apple.comb8bcc942011-09-07 17:55:50 +0000253}
254
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000255#if ENABLE(SAMPLING_FLAGS)
barraclough@apple.comc32f32e2009-05-13 09:10:02 +0000256ALWAYS_INLINE void JIT::setSamplingFlag(int32_t flag)
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000257{
258 ASSERT(flag >= 1);
259 ASSERT(flag <= 32);
barraclough@apple.com02dee5d2011-04-21 01:03:44 +0000260 or32(TrustedImm32(1u << (flag - 1)), AbsoluteAddress(SamplingFlags::addressOfFlags()));
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000261}
262
barraclough@apple.comc32f32e2009-05-13 09:10:02 +0000263ALWAYS_INLINE void JIT::clearSamplingFlag(int32_t flag)
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000264{
265 ASSERT(flag >= 1);
266 ASSERT(flag <= 32);
barraclough@apple.com02dee5d2011-04-21 01:03:44 +0000267 and32(TrustedImm32(~(1u << (flag - 1))), AbsoluteAddress(SamplingFlags::addressOfFlags()));
barraclough@apple.comc32f32e2009-05-13 09:10:02 +0000268}
269#endif
270
271#if ENABLE(SAMPLING_COUNTERS)
barraclough@apple.com6d410b02011-11-10 20:24:06 +0000272ALWAYS_INLINE void JIT::emitCount(AbstractSamplingCounter& counter, int32_t count)
barraclough@apple.comc32f32e2009-05-13 09:10:02 +0000273{
barraclough@apple.com6d410b02011-11-10 20:24:06 +0000274 add64(TrustedImm32(count), AbsoluteAddress(counter.addressOfCounter()));
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000275}
276#endif
277
278#if ENABLE(OPCODE_SAMPLING)
mjs@apple.comcc668212010-01-04 11:38:56 +0000279#if CPU(X86_64)
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000280ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
281{
oliver@apple.combe4e0672011-03-28 17:14:57 +0000282 move(TrustedImmPtr(m_interpreter->sampler()->sampleSlot()), X86Registers::ecx);
283 storePtr(TrustedImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), X86Registers::ecx);
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000284}
285#else
286ALWAYS_INLINE void JIT::sampleInstruction(Instruction* instruction, bool inHostFunction)
287{
oliver@apple.combe4e0672011-03-28 17:14:57 +0000288 storePtr(TrustedImmPtr(m_interpreter->sampler()->encodeSample(instruction, inHostFunction)), m_interpreter->sampler()->sampleSlot());
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000289}
290#endif
291#endif
292
293#if ENABLE(CODEBLOCK_SAMPLING)
mjs@apple.comcc668212010-01-04 11:38:56 +0000294#if CPU(X86_64)
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000295ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
296{
oliver@apple.combe4e0672011-03-28 17:14:57 +0000297 move(TrustedImmPtr(m_interpreter->sampler()->codeBlockSlot()), X86Registers::ecx);
298 storePtr(TrustedImmPtr(codeBlock), X86Registers::ecx);
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000299}
300#else
301ALWAYS_INLINE void JIT::sampleCodeBlock(CodeBlock* codeBlock)
302{
oliver@apple.combe4e0672011-03-28 17:14:57 +0000303 storePtr(TrustedImmPtr(codeBlock), m_interpreter->sampler()->codeBlockSlot());
barraclough@apple.com536c0db2009-05-12 06:21:56 +0000304}
305#endif
306#endif
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000307
oliver@apple.com5230bd32010-05-06 19:39:54 +0000308ALWAYS_INLINE bool JIT::isOperandConstantImmediateChar(unsigned src)
309{
310 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isString() && asString(getConstantOperand(src).asCell())->length() == 1;
311}
312
mhahnenberg@apple.com30738a72012-10-03 17:51:28 +0000313template <typename ClassType, MarkedBlock::DestructorType destructorType, typename StructureType> inline void JIT::emitAllocateBasicJSObject(StructureType structure, RegisterID result, RegisterID storagePtr)
commit-queue@webkit.orgc9b19ab2011-07-18 18:55:48 +0000314{
ggaren@apple.comac950c42012-10-11 20:56:31 +0000315 size_t size = ClassType::allocationSize(INLINE_STORAGE_CAPACITY);
mhahnenberg@apple.comc2748322012-02-10 22:44:09 +0000316 MarkedAllocator* allocator = 0;
mhahnenberg@apple.com30738a72012-10-03 17:51:28 +0000317 if (destructorType == MarkedBlock::Normal)
ggaren@apple.comac950c42012-10-11 20:56:31 +0000318 allocator = &m_globalData->heap.allocatorForObjectWithNormalDestructor(size);
mhahnenberg@apple.com30738a72012-10-03 17:51:28 +0000319 else if (destructorType == MarkedBlock::ImmortalStructure)
ggaren@apple.comac950c42012-10-11 20:56:31 +0000320 allocator = &m_globalData->heap.allocatorForObjectWithImmortalStructureDestructor(size);
mhahnenberg@apple.comc2748322012-02-10 22:44:09 +0000321 else
ggaren@apple.comac950c42012-10-11 20:56:31 +0000322 allocator = &m_globalData->heap.allocatorForObjectWithoutDestructor(size);
mhahnenberg@apple.com8b5cfd32012-04-20 00:05:37 +0000323 loadPtr(&allocator->m_freeList.head, result);
commit-queue@webkit.orgc9b19ab2011-07-18 18:55:48 +0000324 addSlowCase(branchTestPtr(Zero, result));
oliver@apple.come843bc02011-08-05 20:03:19 +0000325
commit-queue@webkit.orgc9b19ab2011-07-18 18:55:48 +0000326 // remove the object from the free list
oliver@apple.come843bc02011-08-05 20:03:19 +0000327 loadPtr(Address(result), storagePtr);
mhahnenberg@apple.com8b5cfd32012-04-20 00:05:37 +0000328 storePtr(storagePtr, &allocator->m_freeList.head);
oliver@apple.come843bc02011-08-05 20:03:19 +0000329
commit-queue@webkit.orgc9b19ab2011-07-18 18:55:48 +0000330 // initialize the object's structure
331 storePtr(structure, Address(result, JSCell::structureOffset()));
oliver@apple.come843bc02011-08-05 20:03:19 +0000332
commit-queue@webkit.orgc9b19ab2011-07-18 18:55:48 +0000333 // initialize the object's property storage pointer
fpizlo@apple.comd8dd0532012-09-13 04:18:52 +0000334 storePtr(TrustedImmPtr(0), Address(result, JSObject::butterflyOffset()));
oliver@apple.come843bc02011-08-05 20:03:19 +0000335}
336
337template <typename T> inline void JIT::emitAllocateJSFinalObject(T structure, RegisterID result, RegisterID scratch)
338{
mhahnenberg@apple.com30738a72012-10-03 17:51:28 +0000339 emitAllocateBasicJSObject<JSFinalObject, MarkedBlock::None, T>(structure, result, scratch);
oliver@apple.come843bc02011-08-05 20:03:19 +0000340}
341
fpizlo@apple.com95a9f0d2011-08-20 02:17:49 +0000342#if ENABLE(VALUE_PROFILER)
fpizlo@apple.com086d2af2011-12-21 02:29:15 +0000343inline void JIT::emitValueProfilingSite(ValueProfile* valueProfile)
fpizlo@apple.com95a9f0d2011-08-20 02:17:49 +0000344{
fpizlo@apple.com086d2af2011-12-21 02:29:15 +0000345 ASSERT(shouldEmitProfiling());
346 ASSERT(valueProfile);
347
fpizlo@apple.com95a9f0d2011-08-20 02:17:49 +0000348 const RegisterID value = regT0;
fpizlo@apple.com4043e812011-11-06 11:54:59 +0000349#if USE(JSVALUE32_64)
350 const RegisterID valueTag = regT1;
351#endif
fpizlo@apple.com95a9f0d2011-08-20 02:17:49 +0000352 const RegisterID scratch = regT3;
353
fpizlo@apple.com4043e812011-11-06 11:54:59 +0000354 if (ValueProfile::numberOfBuckets == 1) {
355 // We're in a simple configuration: only one bucket, so we can just do a direct
356 // store.
357#if USE(JSVALUE64)
yuqiang.xian@intel.com5b1cb732012-10-19 05:46:10 +0000358 store64(value, valueProfile->m_buckets);
fpizlo@apple.com4043e812011-11-06 11:54:59 +0000359#else
360 EncodedValueDescriptor* descriptor = bitwise_cast<EncodedValueDescriptor*>(valueProfile->m_buckets);
361 store32(value, &descriptor->asBits.payload);
362 store32(valueTag, &descriptor->asBits.tag);
363#endif
364 return;
365 }
366
fpizlo@apple.com95a9f0d2011-08-20 02:17:49 +0000367 if (m_randomGenerator.getUint32() & 1)
oliver@apple.comd5c48682012-03-06 02:40:05 +0000368 add32(TrustedImm32(1), bucketCounterRegister);
fpizlo@apple.com95a9f0d2011-08-20 02:17:49 +0000369 else
oliver@apple.comb2a14ec2012-03-08 01:07:34 +0000370 add32(TrustedImm32(3), bucketCounterRegister);
371 and32(TrustedImm32(ValueProfile::bucketIndexMask), bucketCounterRegister);
372 move(TrustedImmPtr(valueProfile->m_buckets), scratch);
commit-queue@webkit.org387d2ec2011-10-09 10:19:23 +0000373#if USE(JSVALUE64)
yuqiang.xian@intel.com5b1cb732012-10-19 05:46:10 +0000374 store64(value, BaseIndex(scratch, bucketCounterRegister, TimesEight));
commit-queue@webkit.org387d2ec2011-10-09 10:19:23 +0000375#elif USE(JSVALUE32_64)
commit-queue@webkit.org387d2ec2011-10-09 10:19:23 +0000376 store32(value, BaseIndex(scratch, bucketCounterRegister, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
377 store32(valueTag, BaseIndex(scratch, bucketCounterRegister, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
378#endif
fpizlo@apple.com95a9f0d2011-08-20 02:17:49 +0000379}
fpizlo@apple.com086d2af2011-12-21 02:29:15 +0000380
fpizlo@apple.comafcf9042012-01-20 20:22:18 +0000381inline void JIT::emitValueProfilingSite(unsigned bytecodeOffset)
fpizlo@apple.com086d2af2011-12-21 02:29:15 +0000382{
383 if (!shouldEmitProfiling())
384 return;
fpizlo@apple.comafcf9042012-01-20 20:22:18 +0000385 emitValueProfilingSite(m_codeBlock->valueProfileForBytecodeOffset(bytecodeOffset));
386}
387
388inline void JIT::emitValueProfilingSite()
389{
390 emitValueProfilingSite(m_bytecodeOffset);
fpizlo@apple.com086d2af2011-12-21 02:29:15 +0000391}
fpizlo@apple.comc7be5be02012-09-17 19:07:32 +0000392#endif // ENABLE(VALUE_PROFILER)
393
394inline void JIT::emitArrayProfilingSite(RegisterID structureAndIndexingType, RegisterID scratch, ArrayProfile* arrayProfile)
395{
fpizlo@apple.com44e9ef42012-10-19 07:24:32 +0000396 UNUSED_PARAM(scratch); // We had found this scratch register useful here before, so I will keep it for now.
397
fpizlo@apple.comc7be5be02012-09-17 19:07:32 +0000398 RegisterID structure = structureAndIndexingType;
399 RegisterID indexingType = structureAndIndexingType;
400
fpizlo@apple.com44e9ef42012-10-19 07:24:32 +0000401 if (canBeOptimized())
fpizlo@apple.comc7be5be02012-09-17 19:07:32 +0000402 storePtr(structure, arrayProfile->addressOfLastSeenStructure());
fpizlo@apple.com44e9ef42012-10-19 07:24:32 +0000403
404 load8(Address(structure, Structure::indexingTypeOffset()), indexingType);
fpizlo@apple.comc7be5be02012-09-17 19:07:32 +0000405}
406
407inline void JIT::emitArrayProfilingSiteForBytecodeIndex(RegisterID structureAndIndexingType, RegisterID scratch, unsigned bytecodeIndex)
408{
409#if ENABLE(VALUE_PROFILER)
410 emitArrayProfilingSite(structureAndIndexingType, scratch, m_codeBlock->getOrAddArrayProfile(bytecodeIndex));
411#else
barraclough@apple.comb46d57b42012-09-22 00:43:03 +0000412 UNUSED_PARAM(bytecodeIndex);
fpizlo@apple.comc7be5be02012-09-17 19:07:32 +0000413 emitArrayProfilingSite(structureAndIndexingType, scratch, 0);
fpizlo@apple.com95a9f0d2011-08-20 02:17:49 +0000414#endif
fpizlo@apple.comc7be5be02012-09-17 19:07:32 +0000415}
fpizlo@apple.com95a9f0d2011-08-20 02:17:49 +0000416
fpizlo@apple.com69e27842012-09-19 21:43:10 +0000417inline void JIT::emitArrayProfileStoreToHoleSpecialCase(ArrayProfile* arrayProfile)
418{
fpizlo@apple.com0e9910a2012-10-09 23:39:53 +0000419#if ENABLE(VALUE_PROFILER)
fpizlo@apple.com69e27842012-09-19 21:43:10 +0000420 store8(TrustedImm32(1), arrayProfile->addressOfMayStoreToHole());
fpizlo@apple.comc14c8d32012-10-10 02:14:42 +0000421#else
422 UNUSED_PARAM(arrayProfile);
fpizlo@apple.com0e9910a2012-10-09 23:39:53 +0000423#endif
424}
425
fpizlo@apple.com304fbca2012-12-17 21:38:51 +0000426inline void JIT::emitArrayProfileOutOfBoundsSpecialCase(ArrayProfile* arrayProfile)
427{
428#if ENABLE(VALUE_PROFILER)
429 store8(TrustedImm32(1), arrayProfile->addressOfOutOfBounds());
430#else
431 UNUSED_PARAM(arrayProfile);
432#endif
433}
434
fpizlo@apple.com75c91a72012-11-08 22:28:25 +0000435static inline bool arrayProfileSaw(ArrayModes arrayModes, IndexingType capability)
fpizlo@apple.com0e9910a2012-10-09 23:39:53 +0000436{
437#if ENABLE(VALUE_PROFILER)
fpizlo@apple.com75c91a72012-11-08 22:28:25 +0000438 return arrayModesInclude(arrayModes, capability);
fpizlo@apple.com0e9910a2012-10-09 23:39:53 +0000439#else
fpizlo@apple.com75c91a72012-11-08 22:28:25 +0000440 UNUSED_PARAM(arrayModes);
fpizlo@apple.comc14c8d32012-10-10 02:14:42 +0000441 UNUSED_PARAM(capability);
fpizlo@apple.com0e9910a2012-10-09 23:39:53 +0000442 return false;
443#endif
444}
445
446inline JITArrayMode JIT::chooseArrayMode(ArrayProfile* profile)
447{
fpizlo@apple.comaf0bea782012-11-09 01:38:12 +0000448#if ENABLE(VALUE_PROFILER)
fpizlo@apple.com75c91a72012-11-08 22:28:25 +0000449 profile->computeUpdatedPrediction(m_codeBlock);
450 ArrayModes arrayModes = profile->observedArrayModes();
451 if (arrayProfileSaw(arrayModes, DoubleShape))
452 return JITDouble;
453 if (arrayProfileSaw(arrayModes, Int32Shape))
454 return JITInt32;
455 if (arrayProfileSaw(arrayModes, ArrayStorageShape))
fpizlo@apple.com0e9910a2012-10-09 23:39:53 +0000456 return JITArrayStorage;
457 return JITContiguous;
fpizlo@apple.comaf0bea782012-11-09 01:38:12 +0000458#else
459 UNUSED_PARAM(profile);
460 return JITContiguous;
461#endif
fpizlo@apple.com69e27842012-09-19 21:43:10 +0000462}
463
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000464#if USE(JSVALUE32_64)
465
ggaren@apple.come1e45912011-11-14 19:44:32 +0000466inline void JIT::emitLoadTag(int index, RegisterID tag)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000467{
468 RegisterID mappedTag;
469 if (getMappedTag(index, mappedTag)) {
470 move(mappedTag, tag);
471 unmap(tag);
472 return;
473 }
474
475 if (m_codeBlock->isConstantRegisterIndex(index)) {
476 move(Imm32(getConstantOperand(index).tag()), tag);
477 unmap(tag);
478 return;
479 }
480
481 load32(tagFor(index), tag);
482 unmap(tag);
483}
484
ggaren@apple.come1e45912011-11-14 19:44:32 +0000485inline void JIT::emitLoadPayload(int index, RegisterID payload)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000486{
487 RegisterID mappedPayload;
488 if (getMappedPayload(index, mappedPayload)) {
489 move(mappedPayload, payload);
490 unmap(payload);
491 return;
492 }
493
494 if (m_codeBlock->isConstantRegisterIndex(index)) {
495 move(Imm32(getConstantOperand(index).payload()), payload);
496 unmap(payload);
497 return;
498 }
499
500 load32(payloadFor(index), payload);
501 unmap(payload);
502}
503
504inline void JIT::emitLoad(const JSValue& v, RegisterID tag, RegisterID payload)
505{
506 move(Imm32(v.payload()), payload);
507 move(Imm32(v.tag()), tag);
508}
509
ggaren@apple.come1e45912011-11-14 19:44:32 +0000510inline void JIT::emitLoad(int index, RegisterID tag, RegisterID payload, RegisterID base)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000511{
oliver@apple.com903b0c02013-01-24 01:40:37 +0000512 RELEASE_ASSERT(tag != payload);
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000513
514 if (base == callFrameRegister) {
oliver@apple.com903b0c02013-01-24 01:40:37 +0000515 RELEASE_ASSERT(payload != base);
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000516 emitLoadPayload(index, payload);
517 emitLoadTag(index, tag);
518 return;
519 }
520
521 if (payload == base) { // avoid stomping base
522 load32(tagFor(index, base), tag);
523 load32(payloadFor(index, base), payload);
524 return;
525 }
526
527 load32(payloadFor(index, base), payload);
528 load32(tagFor(index, base), tag);
529}
530
ggaren@apple.come1e45912011-11-14 19:44:32 +0000531inline void JIT::emitLoad2(int index1, RegisterID tag1, RegisterID payload1, int index2, RegisterID tag2, RegisterID payload2)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000532{
533 if (isMapped(index1)) {
534 emitLoad(index1, tag1, payload1);
535 emitLoad(index2, tag2, payload2);
536 return;
537 }
538 emitLoad(index2, tag2, payload2);
539 emitLoad(index1, tag1, payload1);
540}
541
ggaren@apple.come1e45912011-11-14 19:44:32 +0000542inline void JIT::emitLoadDouble(int index, FPRegisterID value)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000543{
544 if (m_codeBlock->isConstantRegisterIndex(index)) {
oliver@apple.comba10bec2011-03-08 23:17:32 +0000545 WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(index);
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000546 loadDouble(&inConstantPool, value);
547 } else
548 loadDouble(addressFor(index), value);
549}
550
ggaren@apple.come1e45912011-11-14 19:44:32 +0000551inline void JIT::emitLoadInt32ToDouble(int index, FPRegisterID value)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000552{
553 if (m_codeBlock->isConstantRegisterIndex(index)) {
oliver@apple.comba10bec2011-03-08 23:17:32 +0000554 WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(index);
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000555 char* bytePointer = reinterpret_cast<char*>(&inConstantPool);
556 convertInt32ToDouble(AbsoluteAddress(bytePointer + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), value);
557 } else
558 convertInt32ToDouble(payloadFor(index), value);
559}
560
ggaren@apple.come1e45912011-11-14 19:44:32 +0000561inline void JIT::emitStore(int index, RegisterID tag, RegisterID payload, RegisterID base)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000562{
563 store32(payload, payloadFor(index, base));
564 store32(tag, tagFor(index, base));
565}
566
ggaren@apple.come1e45912011-11-14 19:44:32 +0000567inline void JIT::emitStoreInt32(int index, RegisterID payload, bool indexIsInt32)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000568{
569 store32(payload, payloadFor(index, callFrameRegister));
570 if (!indexIsInt32)
oliver@apple.combe4e0672011-03-28 17:14:57 +0000571 store32(TrustedImm32(JSValue::Int32Tag), tagFor(index, callFrameRegister));
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000572}
573
ggaren@apple.come1e45912011-11-14 19:44:32 +0000574inline void JIT::emitStoreAndMapInt32(int index, RegisterID tag, RegisterID payload, bool indexIsInt32, size_t opcodeLength)
ggaren@apple.com25853082011-09-16 21:05:09 +0000575{
576 emitStoreInt32(index, payload, indexIsInt32);
577 map(m_bytecodeOffset + opcodeLength, index, tag, payload);
578}
579
ggaren@apple.come1e45912011-11-14 19:44:32 +0000580inline void JIT::emitStoreInt32(int index, TrustedImm32 payload, bool indexIsInt32)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000581{
582 store32(payload, payloadFor(index, callFrameRegister));
583 if (!indexIsInt32)
oliver@apple.combe4e0672011-03-28 17:14:57 +0000584 store32(TrustedImm32(JSValue::Int32Tag), tagFor(index, callFrameRegister));
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000585}
586
ggaren@apple.come1e45912011-11-14 19:44:32 +0000587inline void JIT::emitStoreCell(int index, RegisterID payload, bool indexIsCell)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000588{
589 store32(payload, payloadFor(index, callFrameRegister));
590 if (!indexIsCell)
oliver@apple.combe4e0672011-03-28 17:14:57 +0000591 store32(TrustedImm32(JSValue::CellTag), tagFor(index, callFrameRegister));
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000592}
593
ggaren@apple.come1e45912011-11-14 19:44:32 +0000594inline void JIT::emitStoreBool(int index, RegisterID payload, bool indexIsBool)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000595{
barraclough@apple.com5139edc2011-04-07 23:47:06 +0000596 store32(payload, payloadFor(index, callFrameRegister));
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000597 if (!indexIsBool)
barraclough@apple.com5139edc2011-04-07 23:47:06 +0000598 store32(TrustedImm32(JSValue::BooleanTag), tagFor(index, callFrameRegister));
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000599}
600
ggaren@apple.come1e45912011-11-14 19:44:32 +0000601inline void JIT::emitStoreDouble(int index, FPRegisterID value)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000602{
603 storeDouble(value, addressFor(index));
604}
605
ggaren@apple.come1e45912011-11-14 19:44:32 +0000606inline void JIT::emitStore(int index, const JSValue constant, RegisterID base)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000607{
608 store32(Imm32(constant.payload()), payloadFor(index, base));
609 store32(Imm32(constant.tag()), tagFor(index, base));
610}
611
612ALWAYS_INLINE void JIT::emitInitRegister(unsigned dst)
613{
614 emitStore(dst, jsUndefined());
615}
616
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000617inline bool JIT::isLabeled(unsigned bytecodeOffset)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000618{
619 for (size_t numberOfJumpTargets = m_codeBlock->numberOfJumpTargets(); m_jumpTargetIndex != numberOfJumpTargets; ++m_jumpTargetIndex) {
620 unsigned jumpTarget = m_codeBlock->jumpTarget(m_jumpTargetIndex);
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000621 if (jumpTarget == bytecodeOffset)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000622 return true;
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000623 if (jumpTarget > bytecodeOffset)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000624 return false;
625 }
626 return false;
627}
628
ggaren@apple.come1e45912011-11-14 19:44:32 +0000629inline void JIT::map(unsigned bytecodeOffset, int virtualRegisterIndex, RegisterID tag, RegisterID payload)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000630{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000631 if (isLabeled(bytecodeOffset))
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000632 return;
633
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000634 m_mappedBytecodeOffset = bytecodeOffset;
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000635 m_mappedVirtualRegisterIndex = virtualRegisterIndex;
636 m_mappedTag = tag;
637 m_mappedPayload = payload;
fpizlo@apple.com75461572012-02-12 01:24:49 +0000638
639 ASSERT(!canBeOptimized() || m_mappedPayload == regT0);
640 ASSERT(!canBeOptimized() || m_mappedTag == regT1);
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000641}
642
643inline void JIT::unmap(RegisterID registerID)
644{
645 if (m_mappedTag == registerID)
646 m_mappedTag = (RegisterID)-1;
647 else if (m_mappedPayload == registerID)
648 m_mappedPayload = (RegisterID)-1;
649}
650
651inline void JIT::unmap()
652{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000653 m_mappedBytecodeOffset = (unsigned)-1;
mark.lam@apple.com4fbb9c32012-10-09 07:12:56 +0000654 m_mappedVirtualRegisterIndex = JSStack::ReturnPC;
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000655 m_mappedTag = (RegisterID)-1;
656 m_mappedPayload = (RegisterID)-1;
657}
658
ggaren@apple.come1e45912011-11-14 19:44:32 +0000659inline bool JIT::isMapped(int virtualRegisterIndex)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000660{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000661 if (m_mappedBytecodeOffset != m_bytecodeOffset)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000662 return false;
663 if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
664 return false;
665 return true;
666}
667
ggaren@apple.come1e45912011-11-14 19:44:32 +0000668inline bool JIT::getMappedPayload(int virtualRegisterIndex, RegisterID& payload)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000669{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000670 if (m_mappedBytecodeOffset != m_bytecodeOffset)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000671 return false;
672 if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
673 return false;
674 if (m_mappedPayload == (RegisterID)-1)
675 return false;
676 payload = m_mappedPayload;
677 return true;
678}
679
ggaren@apple.come1e45912011-11-14 19:44:32 +0000680inline bool JIT::getMappedTag(int virtualRegisterIndex, RegisterID& tag)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000681{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000682 if (m_mappedBytecodeOffset != m_bytecodeOffset)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000683 return false;
684 if (m_mappedVirtualRegisterIndex != virtualRegisterIndex)
685 return false;
686 if (m_mappedTag == (RegisterID)-1)
687 return false;
688 tag = m_mappedTag;
689 return true;
690}
691
ggaren@apple.come1e45912011-11-14 19:44:32 +0000692inline void JIT::emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000693{
oliver@apple.comfc502ee2010-05-20 00:30:35 +0000694 if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex)) {
695 if (m_codeBlock->isConstantRegisterIndex(virtualRegisterIndex))
696 addSlowCase(jump());
697 else
698 addSlowCase(emitJumpIfNotJSCell(virtualRegisterIndex));
699 }
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000700}
701
ggaren@apple.come1e45912011-11-14 19:44:32 +0000702inline void JIT::emitJumpSlowCaseIfNotJSCell(int virtualRegisterIndex, RegisterID tag)
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000703{
oliver@apple.comfc502ee2010-05-20 00:30:35 +0000704 if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex)) {
705 if (m_codeBlock->isConstantRegisterIndex(virtualRegisterIndex))
706 addSlowCase(jump());
707 else
oliver@apple.combe4e0672011-03-28 17:14:57 +0000708 addSlowCase(branch32(NotEqual, tag, TrustedImm32(JSValue::CellTag)));
oliver@apple.comfc502ee2010-05-20 00:30:35 +0000709 }
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000710}
711
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000712ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(unsigned src)
713{
714 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32();
715}
716
717ALWAYS_INLINE bool JIT::getOperandConstantImmediateInt(unsigned op1, unsigned op2, unsigned& op, int32_t& constant)
718{
719 if (isOperandConstantImmediateInt(op1)) {
720 constant = getConstantOperand(op1).asInt32();
721 op = op2;
722 return true;
723 }
724
725 if (isOperandConstantImmediateInt(op2)) {
726 constant = getConstantOperand(op2).asInt32();
727 op = op1;
728 return true;
729 }
730
731 return false;
732}
733
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000734#else // USE(JSVALUE32_64)
735
yuqiang.xian@intel.com5b1cb732012-10-19 05:46:10 +0000736/* Deprecated: Please use JITStubCall instead. */
737
738ALWAYS_INLINE void JIT::emitGetJITStubArg(unsigned argumentNumber, RegisterID dst)
739{
740 unsigned argumentStackOffset = (argumentNumber * (sizeof(JSValue) / sizeof(void*))) + JITSTACKFRAME_ARGS_INDEX;
741 peek64(dst, argumentStackOffset);
742}
743
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000744ALWAYS_INLINE void JIT::killLastResultRegister()
745{
746 m_lastResultBytecodeRegister = std::numeric_limits<int>::max();
747}
748
749// get arg puts an arg from the SF register array into a h/w register
750ALWAYS_INLINE void JIT::emitGetVirtualRegister(int src, RegisterID dst)
751{
ggaren@apple.com1ba49812010-05-19 18:28:54 +0000752 ASSERT(m_bytecodeOffset != (unsigned)-1); // This method should only be called during hot/cold path generation, so that m_bytecodeOffset is set.
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000753
754 // TODO: we want to reuse values that are already in registers if we can - add a register allocator!
755 if (m_codeBlock->isConstantRegisterIndex(src)) {
756 JSValue value = m_codeBlock->getConstant(src);
oliver@apple.comb2a14ec2012-03-08 01:07:34 +0000757 if (!value.isNumber())
yuqiang.xian@intel.com5b1cb732012-10-19 05:46:10 +0000758 move(TrustedImm64(JSValue::encode(value)), dst);
oliver@apple.comb2a14ec2012-03-08 01:07:34 +0000759 else
yuqiang.xian@intel.com5b1cb732012-10-19 05:46:10 +0000760 move(Imm64(JSValue::encode(value)), dst);
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000761 killLastResultRegister();
762 return;
763 }
764
barraclough@apple.com4836c7a2011-05-01 22:20:59 +0000765 if (src == m_lastResultBytecodeRegister && m_codeBlock->isTemporaryRegisterIndex(src) && !atJumpTarget()) {
766 // The argument we want is already stored in eax
767 if (dst != cachedResultRegister)
768 move(cachedResultRegister, dst);
769 killLastResultRegister();
770 return;
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000771 }
772
yuqiang.xian@intel.com5b1cb732012-10-19 05:46:10 +0000773 load64(Address(callFrameRegister, src * sizeof(Register)), dst);
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000774 killLastResultRegister();
775}
776
777ALWAYS_INLINE void JIT::emitGetVirtualRegisters(int src1, RegisterID dst1, int src2, RegisterID dst2)
778{
779 if (src2 == m_lastResultBytecodeRegister) {
780 emitGetVirtualRegister(src2, dst2);
781 emitGetVirtualRegister(src1, dst1);
782 } else {
783 emitGetVirtualRegister(src1, dst1);
784 emitGetVirtualRegister(src2, dst2);
785 }
786}
787
788ALWAYS_INLINE int32_t JIT::getConstantOperandImmediateInt(unsigned src)
789{
790 return getConstantOperand(src).asInt32();
791}
792
793ALWAYS_INLINE bool JIT::isOperandConstantImmediateInt(unsigned src)
794{
795 return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isInt32();
796}
797
798ALWAYS_INLINE void JIT::emitPutVirtualRegister(unsigned dst, RegisterID from)
799{
yuqiang.xian@intel.com5b1cb732012-10-19 05:46:10 +0000800 store64(from, Address(callFrameRegister, dst * sizeof(Register)));
andersca@apple.comd11d25d2010-06-15 18:52:31 +0000801 m_lastResultBytecodeRegister = (from == cachedResultRegister) ? static_cast<int>(dst) : std::numeric_limits<int>::max();
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000802}
803
804ALWAYS_INLINE void JIT::emitInitRegister(unsigned dst)
805{
yuqiang.xian@intel.com5b1cb732012-10-19 05:46:10 +0000806 store64(TrustedImm64(JSValue::encode(jsUndefined())), Address(callFrameRegister, dst * sizeof(Register)));
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000807}
808
809ALWAYS_INLINE JIT::Jump JIT::emitJumpIfJSCell(RegisterID reg)
810{
yuqiang.xian@intel.com5b1cb732012-10-19 05:46:10 +0000811 return branchTest64(Zero, reg, tagMaskRegister);
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000812}
813
814ALWAYS_INLINE JIT::Jump JIT::emitJumpIfBothJSCells(RegisterID reg1, RegisterID reg2, RegisterID scratch)
815{
816 move(reg1, scratch);
yuqiang.xian@intel.com5b1cb732012-10-19 05:46:10 +0000817 or64(reg2, scratch);
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000818 return emitJumpIfJSCell(scratch);
819}
820
821ALWAYS_INLINE void JIT::emitJumpSlowCaseIfJSCell(RegisterID reg)
822{
823 addSlowCase(emitJumpIfJSCell(reg));
824}
825
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000826ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg)
827{
828 addSlowCase(emitJumpIfNotJSCell(reg));
829}
830
831ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotJSCell(RegisterID reg, int vReg)
832{
833 if (!m_codeBlock->isKnownNotImmediate(vReg))
834 emitJumpSlowCaseIfNotJSCell(reg);
835}
836
ggaren@apple.come1e45912011-11-14 19:44:32 +0000837inline void JIT::emitLoadDouble(int index, FPRegisterID value)
oliver@apple.com8d181632009-09-25 02:40:59 +0000838{
839 if (m_codeBlock->isConstantRegisterIndex(index)) {
oliver@apple.comba10bec2011-03-08 23:17:32 +0000840 WriteBarrier<Unknown>& inConstantPool = m_codeBlock->constantRegister(index);
oliver@apple.com8d181632009-09-25 02:40:59 +0000841 loadDouble(&inConstantPool, value);
842 } else
843 loadDouble(addressFor(index), value);
844}
845
ggaren@apple.come1e45912011-11-14 19:44:32 +0000846inline void JIT::emitLoadInt32ToDouble(int index, FPRegisterID value)
oliver@apple.com8d181632009-09-25 02:40:59 +0000847{
848 if (m_codeBlock->isConstantRegisterIndex(index)) {
barraclough@apple.com8af7a532011-03-13 22:11:13 +0000849 ASSERT(isOperandConstantImmediateInt(index));
850 convertInt32ToDouble(Imm32(getConstantOperand(index).asInt32()), value);
oliver@apple.com8d181632009-09-25 02:40:59 +0000851 } else
852 convertInt32ToDouble(addressFor(index), value);
853}
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000854
855ALWAYS_INLINE JIT::Jump JIT::emitJumpIfImmediateInteger(RegisterID reg)
856{
yuqiang.xian@intel.com5b1cb732012-10-19 05:46:10 +0000857 return branch64(AboveOrEqual, reg, tagTypeNumberRegister);
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000858}
859
860ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateInteger(RegisterID reg)
861{
yuqiang.xian@intel.com5b1cb732012-10-19 05:46:10 +0000862 return branch64(Below, reg, tagTypeNumberRegister);
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000863}
864
865ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch)
866{
867 move(reg1, scratch);
yuqiang.xian@intel.com5b1cb732012-10-19 05:46:10 +0000868 and64(reg2, scratch);
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000869 return emitJumpIfNotImmediateInteger(scratch);
870}
871
872ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateInteger(RegisterID reg)
873{
874 addSlowCase(emitJumpIfNotImmediateInteger(reg));
875}
876
877ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateIntegers(RegisterID reg1, RegisterID reg2, RegisterID scratch)
878{
879 addSlowCase(emitJumpIfNotImmediateIntegers(reg1, reg2, scratch));
880}
881
oliver@apple.com8d181632009-09-25 02:40:59 +0000882ALWAYS_INLINE void JIT::emitJumpSlowCaseIfNotImmediateNumber(RegisterID reg)
883{
884 addSlowCase(emitJumpIfNotImmediateNumber(reg));
885}
886
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000887ALWAYS_INLINE void JIT::emitFastArithReTagImmediate(RegisterID src, RegisterID dest)
888{
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000889 emitFastArithIntToImmNoCheck(src, dest);
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000890}
891
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000892ALWAYS_INLINE void JIT::emitTagAsBoolImmediate(RegisterID reg)
893{
barraclough@apple.com560dde72011-04-11 22:18:30 +0000894 or32(TrustedImm32(static_cast<int32_t>(ValueFalse)), reg);
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000895}
896
ggaren@apple.com540d71a62009-07-30 20:57:44 +0000897#endif // USE(JSVALUE32_64)
898
899} // namespace JSC
900
barraclough@apple.come367b002008-12-04 05:43:14 +0000901#endif // ENABLE(JIT)
902
mark.lam@apple.coma4fe7ab2012-11-09 03:03:44 +0000903#endif // JITInlines_h
904