blob: 90691f2cfccd07f8e267f5403d34ea79b10a9938 [file] [log] [blame]
fpizlo@apple.comda4645e2013-09-22 00:24:36 +00001/*
fpizlo@apple.com80d06562016-04-05 19:58:04 +00002 * Copyright (C) 2011-2016 Apple Inc. All rights reserved.
fpizlo@apple.comda4645e2013-09-22 00:24:36 +00003 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18 * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20 * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21 * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#include "config.h"
mark.lam@apple.com9df8b832013-09-26 20:27:14 +000027#include "Repatch.h"
fpizlo@apple.comda4645e2013-09-22 00:24:36 +000028
mark.lam@apple.com9df8b832013-09-26 20:27:14 +000029#if ENABLE(JIT)
fpizlo@apple.comda4645e2013-09-22 00:24:36 +000030
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +000031#include "BinarySwitch.h"
fpizlo@apple.comda4645e2013-09-22 00:24:36 +000032#include "CCallHelpers.h"
msaboff@apple.com4cd77e62015-09-18 23:51:41 +000033#include "CallFrameShuffler.h"
mhahnenberg@apple.com2ceb9d72013-12-20 00:49:58 +000034#include "DFGOperations.h"
35#include "DFGSpeculativeJIT.h"
utatane.tea@gmail.com9d300ae2016-10-04 19:34:52 +000036#include "DOMJITGetterSetter.h"
fpizlo@apple.comf0e9c5d2016-04-08 19:37:04 +000037#include "DirectArguments.h"
fpizlo@apple.comd2ceb392013-11-11 07:30:50 +000038#include "FTLThunks.h"
fpizlo@apple.comda4645e2013-09-22 00:24:36 +000039#include "GCAwareJITStubRoutine.h"
fpizlo@apple.combf7daac2014-03-25 22:38:52 +000040#include "GetterSetter.h"
fpizlo@apple.comf0e9c5d2016-04-08 19:37:04 +000041#include "ICStats.h"
sbarati@apple.comb5bee812016-06-19 19:42:18 +000042#include "InlineAccess.h"
mhahnenberg@apple.comb6f85192014-02-27 01:27:18 +000043#include "JIT.h"
44#include "JITInlines.h"
fpizlo@apple.comda4645e2013-09-22 00:24:36 +000045#include "LinkBuffer.h"
fpizlo@apple.comfb7eff22014-02-11 01:45:50 +000046#include "JSCInlines.h"
fpizlo@apple.comb26b5242015-09-10 19:49:36 +000047#include "PolymorphicAccess.h"
fpizlo@apple.comf0e9c5d2016-04-08 19:37:04 +000048#include "ScopedArguments.h"
mark.lam@apple.com9df8b832013-09-26 20:27:14 +000049#include "ScratchRegisterAllocator.h"
mhahnenberg@apple.com3ddd7ac2014-01-10 02:28:27 +000050#include "StackAlignment.h"
fpizlo@apple.comda4645e2013-09-22 00:24:36 +000051#include "StructureRareDataInlines.h"
rniwa@webkit.org12932952013-12-16 05:56:25 +000052#include "StructureStubClearingWatchpoint.h"
ossy@webkit.orgff7c1802016-05-10 16:34:26 +000053#include "StructureStubInfo.h"
mark.lam@apple.com9df8b832013-09-26 20:27:14 +000054#include "ThunkGenerators.h"
fpizlo@apple.comb1e1c2d2015-05-05 20:42:44 +000055#include <wtf/CommaPrinter.h>
ossy@webkit.org95b5d172015-02-09 18:07:46 +000056#include <wtf/ListDump.h>
fpizlo@apple.comda4645e2013-09-22 00:24:36 +000057#include <wtf/StringPrintStream.h>
58
mark.lam@apple.com9df8b832013-09-26 20:27:14 +000059namespace JSC {
fpizlo@apple.comda4645e2013-09-22 00:24:36 +000060
fpizlo@apple.com13d41362015-09-03 19:41:42 +000061static FunctionPtr readCallTarget(CodeBlock* codeBlock, CodeLocationCall call)
fpizlo@apple.comd2ceb392013-11-11 07:30:50 +000062{
63 FunctionPtr result = MacroAssembler::readCallTarget(call);
64#if ENABLE(FTL_JIT)
fpizlo@apple.comd2ceb392013-11-11 07:30:50 +000065 if (codeBlock->jitType() == JITCode::FTLJIT) {
66 return FunctionPtr(codeBlock->vm()->ftlThunks->keyForSlowPathCallThunk(
67 MacroAssemblerCodePtr::createFromExecutableAddress(
68 result.executableAddress())).callTarget());
69 }
70#else
fpizlo@apple.com13d41362015-09-03 19:41:42 +000071 UNUSED_PARAM(codeBlock);
fpizlo@apple.comd2ceb392013-11-11 07:30:50 +000072#endif // ENABLE(FTL_JIT)
73 return result;
74}
75
sbarati@apple.comb0b1eb52016-07-21 23:41:44 +000076void ftlThunkAwareRepatchCall(CodeBlock* codeBlock, CodeLocationCall call, FunctionPtr newCalleeFunction)
fpizlo@apple.comd2ceb392013-11-11 07:30:50 +000077{
78#if ENABLE(FTL_JIT)
fpizlo@apple.comd2ceb392013-11-11 07:30:50 +000079 if (codeBlock->jitType() == JITCode::FTLJIT) {
80 VM& vm = *codeBlock->vm();
81 FTL::Thunks& thunks = *vm.ftlThunks;
82 FTL::SlowPathCallKey key = thunks.keyForSlowPathCallThunk(
83 MacroAssemblerCodePtr::createFromExecutableAddress(
84 MacroAssembler::readCallTarget(call).executableAddress()));
85 key = key.withCallTarget(newCalleeFunction.executableAddress());
86 newCalleeFunction = FunctionPtr(
87 thunks.getSlowPathCallThunk(vm, key).code().executableAddress());
88 }
fpizlo@apple.com13d41362015-09-03 19:41:42 +000089#else // ENABLE(FTL_JIT)
90 UNUSED_PARAM(codeBlock);
fpizlo@apple.comd2ceb392013-11-11 07:30:50 +000091#endif // ENABLE(FTL_JIT)
fpizlo@apple.com7a797262015-09-03 21:11:59 +000092 MacroAssembler::repatchCall(call, newCalleeFunction);
fpizlo@apple.comda4645e2013-09-22 00:24:36 +000093}
94
mhahnenberg@apple.com304c25df2014-06-11 22:55:34 +000095enum InlineCacheAction {
96 GiveUpOnCache,
97 RetryCacheLater,
98 AttemptToCache
99};
100
101static InlineCacheAction actionForCell(VM& vm, JSCell* cell)
102{
103 Structure* structure = cell->structure(vm);
104
105 TypeInfo typeInfo = structure->typeInfo();
106 if (typeInfo.prohibitsPropertyCaching())
107 return GiveUpOnCache;
108
109 if (structure->isUncacheableDictionary()) {
110 if (structure->hasBeenFlattenedBefore())
111 return GiveUpOnCache;
112 // Flattening could have changed the offset, so return early for another try.
113 asObject(cell)->flattenDictionaryObject(vm);
114 return RetryCacheLater;
115 }
mhahnenberg@apple.com304c25df2014-06-11 22:55:34 +0000116
fpizlo@apple.com6b62eaf2015-08-03 23:13:56 +0000117 if (!structure->propertyAccessesAreCacheable())
mhahnenberg@apple.com304c25df2014-06-11 22:55:34 +0000118 return GiveUpOnCache;
119
120 return AttemptToCache;
121}
122
fpizlo@apple.com547fbff2015-09-12 20:52:26 +0000123static bool forceICFailure(ExecState*)
124{
ossy@webkit.orgd832aa42016-07-15 09:43:33 +0000125#if CPU(ARM_TRADITIONAL)
126 // FIXME: Remove this workaround once the proper fixes are landed.
127 // [ARM] Disable Inline Caching on ARMv7 traditional until proper fix
128 // https://bugs.webkit.org/show_bug.cgi?id=159759
129 return true;
130#else
fpizlo@apple.com547fbff2015-09-12 20:52:26 +0000131 return Options::forceICFailure();
ossy@webkit.orgd832aa42016-07-15 09:43:33 +0000132#endif
fpizlo@apple.com547fbff2015-09-12 20:52:26 +0000133}
134
keith_miller@apple.com8844d302016-04-07 19:38:00 +0000135inline J_JITOperation_ESsiJI appropriateOptimizingGetByIdFunction(GetByIDKind kind)
136{
137 if (kind == GetByIDKind::Normal)
138 return operationGetByIdOptimize;
139 return operationTryGetByIdOptimize;
140}
141
142inline J_JITOperation_ESsiJI appropriateGenericGetByIdFunction(GetByIDKind kind)
143{
144 if (kind == GetByIDKind::Normal)
145 return operationGetById;
146 return operationTryGetById;
147}
148
149static InlineCacheAction tryCacheGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000150{
fpizlo@apple.com547fbff2015-09-12 20:52:26 +0000151 if (forceICFailure(exec))
mhahnenberg@apple.com304c25df2014-06-11 22:55:34 +0000152 return GiveUpOnCache;
fpizlo@apple.coma502abe2014-03-30 18:43:41 +0000153
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000154 // FIXME: Cache property access for immediates.
155 if (!baseValue.isCell())
mhahnenberg@apple.com304c25df2014-06-11 22:55:34 +0000156 return GiveUpOnCache;
akling@apple.com1c7a38a2014-11-11 03:10:13 +0000157
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000158 CodeBlock* codeBlock = exec->codeBlock();
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000159 VM& vm = exec->vm();
akling@apple.com1c7a38a2014-11-11 03:10:13 +0000160
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000161 std::unique_ptr<AccessCase> newCase;
mhahnenberg@apple.com304c25df2014-06-11 22:55:34 +0000162
fpizlo@apple.comf0e9c5d2016-04-08 19:37:04 +0000163 if (propertyName == vm.propertyNames->length) {
sbarati@apple.comb5bee812016-06-19 19:42:18 +0000164 if (isJSArray(baseValue)) {
165 if (stubInfo.cacheType == CacheType::Unset
166 && slot.slotBase() == baseValue
167 && InlineAccess::isCacheableArrayLength(stubInfo, jsCast<JSArray*>(baseValue))) {
168
169 bool generatedCodeInline = InlineAccess::generateArrayLength(*codeBlock->vm(), stubInfo, jsCast<JSArray*>(baseValue));
170 if (generatedCodeInline) {
sbarati@apple.comb0b1eb52016-07-21 23:41:44 +0000171 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
sbarati@apple.comb5bee812016-06-19 19:42:18 +0000172 stubInfo.initArrayLength();
173 return RetryCacheLater;
174 }
175 }
176
fpizlo@apple.comf0e9c5d2016-04-08 19:37:04 +0000177 newCase = AccessCase::getLength(vm, codeBlock, AccessCase::ArrayLength);
sbarati@apple.comb5bee812016-06-19 19:42:18 +0000178 } else if (isJSString(baseValue))
fpizlo@apple.comf0e9c5d2016-04-08 19:37:04 +0000179 newCase = AccessCase::getLength(vm, codeBlock, AccessCase::StringLength);
180 else if (DirectArguments* arguments = jsDynamicCast<DirectArguments*>(baseValue)) {
181 // If there were overrides, then we can handle this as a normal property load! Guarding
182 // this with such a check enables us to add an IC case for that load if needed.
183 if (!arguments->overrodeThings())
184 newCase = AccessCase::getLength(vm, codeBlock, AccessCase::DirectArgumentsLength);
185 } else if (ScopedArguments* arguments = jsDynamicCast<ScopedArguments*>(baseValue)) {
186 // Ditto.
187 if (!arguments->overrodeThings())
188 newCase = AccessCase::getLength(vm, codeBlock, AccessCase::ScopedArgumentsLength);
189 }
190 }
191
192 if (!newCase) {
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000193 if (!slot.isCacheable() && !slot.isUnset())
194 return GiveUpOnCache;
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000195
keith_miller@apple.com5a877dd2015-12-23 23:45:17 +0000196 ObjectPropertyConditionSet conditionSet;
197 JSCell* baseCell = baseValue.asCell();
198 Structure* structure = baseCell->structure(vm);
199
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000200 bool loadTargetFromProxy = false;
201 if (baseCell->type() == PureForwardingProxyType) {
202 baseValue = jsCast<JSProxy*>(baseCell)->target();
203 baseCell = baseValue.asCell();
204 structure = baseCell->structure(vm);
205 loadTargetFromProxy = true;
206 }
207
208 InlineCacheAction action = actionForCell(vm, baseCell);
209 if (action != AttemptToCache)
210 return action;
sbarati@apple.comb5bee812016-06-19 19:42:18 +0000211
fpizlo@apple.com547fbff2015-09-12 20:52:26 +0000212 // Optimize self access.
213 if (stubInfo.cacheType == CacheType::Unset
214 && slot.isCacheableValue()
215 && slot.slotBase() == baseValue
216 && !slot.watchpointSet()
fpizlo@apple.com547fbff2015-09-12 20:52:26 +0000217 && !structure->needImpurePropertyWatchpoint()
218 && !loadTargetFromProxy) {
sbarati@apple.comb5bee812016-06-19 19:42:18 +0000219
220 bool generatedCodeInline = InlineAccess::generateSelfPropertyAccess(*codeBlock->vm(), stubInfo, structure, slot.cachedOffset());
221 if (generatedCodeInline) {
222 LOG_IC((ICEvent::GetByIdSelfPatch, structure->classInfo(), propertyName));
223 structure->startWatchingPropertyForReplacements(vm, slot.cachedOffset());
sbarati@apple.comb0b1eb52016-07-21 23:41:44 +0000224 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
sbarati@apple.comb5bee812016-06-19 19:42:18 +0000225 stubInfo.initGetByIdSelf(codeBlock, structure, slot.cachedOffset());
226 return RetryCacheLater;
227 }
fpizlo@apple.com547fbff2015-09-12 20:52:26 +0000228 }
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000229
230 PropertyOffset offset = slot.isUnset() ? invalidOffset : slot.cachedOffset();
keith_miller@apple.com59bba5d2015-10-16 22:18:42 +0000231
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000232 if (slot.isUnset() || slot.slotBase() != baseValue) {
ggaren@apple.comf7c06fd2016-06-01 19:32:34 +0000233 if (structure->typeInfo().prohibitsPropertyCaching())
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000234 return GiveUpOnCache;
ggaren@apple.comf7c06fd2016-06-01 19:32:34 +0000235
236 if (structure->isDictionary()) {
237 if (structure->hasBeenFlattenedBefore())
238 return GiveUpOnCache;
239 structure->flattenDictionaryStructure(vm, jsCast<JSObject*>(baseCell));
240 }
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000241
cdumez@apple.com431c1022015-11-20 20:45:12 +0000242 if (slot.isUnset() && structure->typeInfo().getOwnPropertySlotIsImpureForPropertyAbsence())
243 return GiveUpOnCache;
244
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000245 if (slot.isUnset()) {
246 conditionSet = generateConditionsForPropertyMiss(
ggaren@apple.com81def5f2015-10-09 23:10:16 +0000247 vm, codeBlock, exec, structure, propertyName.impl());
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000248 } else {
249 conditionSet = generateConditionsForPrototypePropertyHit(
ggaren@apple.com81def5f2015-10-09 23:10:16 +0000250 vm, codeBlock, exec, structure, slot.slotBase(),
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000251 propertyName.impl());
252 }
253
254 if (!conditionSet.isValid())
255 return GiveUpOnCache;
keith_miller@apple.com5a877dd2015-12-23 23:45:17 +0000256
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000257 offset = slot.isUnset() ? invalidOffset : conditionSet.slotBaseCondition().offset();
258 }
259
keith_miller@apple.com5a877dd2015-12-23 23:45:17 +0000260 JSFunction* getter = nullptr;
261 if (slot.isCacheableGetter())
262 getter = jsDynamicCast<JSFunction*>(slot.getterSetter()->getter());
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000263
utatane.tea@gmail.com9d300ae2016-10-04 19:34:52 +0000264 DOMJIT::GetterSetter* domJIT = nullptr;
265 if (slot.isCacheableCustom() && slot.domJIT())
266 domJIT = slot.domJIT();
267
keith_miller@apple.com8844d302016-04-07 19:38:00 +0000268 if (kind == GetByIDKind::Pure) {
269 AccessCase::AccessType type;
270 if (slot.isCacheableValue())
271 type = AccessCase::Load;
272 else if (slot.isUnset())
273 type = AccessCase::Miss;
274 else if (slot.isCacheableGetter())
275 type = AccessCase::GetGetter;
276 else
277 RELEASE_ASSERT_NOT_REACHED();
278
279 newCase = AccessCase::tryGet(vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy, slot.watchpointSet());
280 } else if (!loadTargetFromProxy && getter && AccessCase::canEmitIntrinsicGetter(getter, structure))
keith_miller@apple.com5a877dd2015-12-23 23:45:17 +0000281 newCase = AccessCase::getIntrinsic(vm, codeBlock, getter, slot.cachedOffset(), structure, conditionSet);
282 else {
283 AccessCase::AccessType type;
284 if (slot.isCacheableValue())
285 type = AccessCase::Load;
286 else if (slot.isUnset())
287 type = AccessCase::Miss;
288 else if (slot.isCacheableGetter())
289 type = AccessCase::Getter;
barraclough@apple.com674f9cb2016-02-09 21:19:59 +0000290 else if (slot.attributes() & CustomAccessor)
291 type = AccessCase::CustomAccessorGetter;
keith_miller@apple.com5a877dd2015-12-23 23:45:17 +0000292 else
barraclough@apple.com674f9cb2016-02-09 21:19:59 +0000293 type = AccessCase::CustomValueGetter;
keith_miller@apple.com5a877dd2015-12-23 23:45:17 +0000294
295 newCase = AccessCase::get(
296 vm, codeBlock, type, offset, structure, conditionSet, loadTargetFromProxy,
297 slot.watchpointSet(), slot.isCacheableCustom() ? slot.customGetter() : nullptr,
utatane.tea@gmail.com9d300ae2016-10-04 19:34:52 +0000298 slot.isCacheableCustom() ? slot.slotBase() : nullptr,
299 domJIT);
keith_miller@apple.com5a877dd2015-12-23 23:45:17 +0000300 }
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000301 }
oliver@apple.com22b40a62013-12-17 00:01:01 +0000302
fpizlo@apple.comf0e9c5d2016-04-08 19:37:04 +0000303 LOG_IC((ICEvent::GetByIdAddAccessCase, baseValue.classInfoOrNull(), propertyName));
304
fpizlo@apple.com80d06562016-04-05 19:58:04 +0000305 AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, propertyName, WTFMove(newCase));
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000306
fpizlo@apple.com6e1a6d42016-04-12 20:06:26 +0000307 if (result.generatedSomeCode()) {
308 LOG_IC((ICEvent::GetByIdReplaceWithJump, baseValue.classInfoOrNull(), propertyName));
309
310 RELEASE_ASSERT(result.code());
sbarati@apple.comb5bee812016-06-19 19:42:18 +0000311 InlineAccess::rewireStubAsJump(exec->vm(), stubInfo, CodeLocationLabel(result.code()));
fpizlo@apple.com6e1a6d42016-04-12 20:06:26 +0000312 }
fpizlo@apple.com80d06562016-04-05 19:58:04 +0000313
fpizlo@apple.com6e1a6d42016-04-12 20:06:26 +0000314 return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000315}
316
keith_miller@apple.com8844d302016-04-07 19:38:00 +0000317void repatchGetByID(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo& stubInfo, GetByIDKind kind)
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000318{
fpizlo@apple.com57aea1c2016-04-11 18:20:59 +0000319 SuperSamplerScope superSamplerScope(false);
mhahnenberg@apple.comefd0d512013-10-16 23:47:45 +0000320 GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
fpizlo@apple.com1cfa0a92013-10-16 02:19:20 +0000321
keith_miller@apple.com8844d302016-04-07 19:38:00 +0000322 if (tryCacheGetByID(exec, baseValue, propertyName, slot, stubInfo, kind) == GiveUpOnCache)
sbarati@apple.comb0b1eb52016-07-21 23:41:44 +0000323 ftlThunkAwareRepatchCall(exec->codeBlock(), stubInfo.slowPathCallLocation(), appropriateGenericGetByIdFunction(kind));
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000324}
325
fpizlo@apple.comd49bfe82013-10-19 02:20:14 +0000326static V_JITOperation_ESsiJJI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000327{
328 if (slot.isStrictMode()) {
329 if (putKind == Direct)
330 return operationPutByIdDirectStrict;
331 return operationPutByIdStrict;
332 }
333 if (putKind == Direct)
334 return operationPutByIdDirectNonStrict;
335 return operationPutByIdNonStrict;
336}
337
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000338static V_JITOperation_ESsiJJI appropriateOptimizingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000339{
340 if (slot.isStrictMode()) {
341 if (putKind == Direct)
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000342 return operationPutByIdDirectStrictOptimize;
343 return operationPutByIdStrictOptimize;
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000344 }
345 if (putKind == Direct)
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000346 return operationPutByIdDirectNonStrictOptimize;
347 return operationPutByIdNonStrictOptimize;
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000348}
349
mmirman@apple.com21ee7772015-01-14 21:00:52 +0000350static InlineCacheAction tryCachePutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000351{
fpizlo@apple.com547fbff2015-09-12 20:52:26 +0000352 if (forceICFailure(exec))
mhahnenberg@apple.com304c25df2014-06-11 22:55:34 +0000353 return GiveUpOnCache;
fpizlo@apple.coma502abe2014-03-30 18:43:41 +0000354
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000355 CodeBlock* codeBlock = exec->codeBlock();
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000356 VM& vm = exec->vm();
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000357
358 if (!baseValue.isCell())
mhahnenberg@apple.com304c25df2014-06-11 22:55:34 +0000359 return GiveUpOnCache;
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000360
fpizlo@apple.com1d55d362014-04-08 19:39:55 +0000361 if (!slot.isCacheablePut() && !slot.isCacheableCustom() && !slot.isCacheableSetter())
mhahnenberg@apple.com304c25df2014-06-11 22:55:34 +0000362 return GiveUpOnCache;
mmirman@apple.com21ee7772015-01-14 21:00:52 +0000363
fpizlo@apple.com6650f782013-10-16 23:49:53 +0000364 if (!structure->propertyAccessesAreCacheable())
mhahnenberg@apple.com304c25df2014-06-11 22:55:34 +0000365 return GiveUpOnCache;
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000366
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000367 std::unique_ptr<AccessCase> newCase;
mmirman@apple.com21ee7772015-01-14 21:00:52 +0000368
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000369 if (slot.base() == baseValue && slot.isCacheablePut()) {
370 if (slot.type() == PutPropertySlot::ExistingProperty) {
371 structure->didCachePropertyReplacement(vm, slot.cachedOffset());
372
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000373 if (stubInfo.cacheType == CacheType::Unset
sbarati@apple.comb5bee812016-06-19 19:42:18 +0000374 && InlineAccess::canGenerateSelfPropertyReplace(stubInfo, slot.cachedOffset())
fpizlo@apple.com12835772015-09-21 20:49:04 +0000375 && !structure->needImpurePropertyWatchpoint()
376 && !structure->inferredTypeFor(ident.impl())) {
fpizlo@apple.comf0e9c5d2016-04-08 19:37:04 +0000377
sbarati@apple.comb5bee812016-06-19 19:42:18 +0000378 bool generatedCodeInline = InlineAccess::generateSelfPropertyReplace(vm, stubInfo, structure, slot.cachedOffset());
379 if (generatedCodeInline) {
380 LOG_IC((ICEvent::PutByIdSelfPatch, structure->classInfo(), ident));
sbarati@apple.comb0b1eb52016-07-21 23:41:44 +0000381 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingPutByIdFunction(slot, putKind));
sbarati@apple.comb5bee812016-06-19 19:42:18 +0000382 stubInfo.initPutByIdReplace(codeBlock, structure, slot.cachedOffset());
383 return RetryCacheLater;
384 }
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000385 }
386
ggaren@apple.com81def5f2015-10-09 23:10:16 +0000387 newCase = AccessCase::replace(vm, codeBlock, structure, slot.cachedOffset());
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000388 } else {
389 ASSERT(slot.type() == PutPropertySlot::NewProperty);
390
ggaren@apple.comf7c06fd2016-06-01 19:32:34 +0000391 if (!structure->isObject())
mhahnenberg@apple.com304c25df2014-06-11 22:55:34 +0000392 return GiveUpOnCache;
fpizlo@apple.com7a797262015-09-03 21:11:59 +0000393
ggaren@apple.comf7c06fd2016-06-01 19:32:34 +0000394 if (structure->isDictionary()) {
395 if (structure->hasBeenFlattenedBefore())
396 return GiveUpOnCache;
397 structure->flattenDictionaryStructure(vm, jsCast<JSObject*>(baseValue));
398 }
399
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000400 PropertyOffset offset;
401 Structure* newStructure =
402 Structure::addPropertyTransitionToExistingStructureConcurrently(
403 structure, ident.impl(), 0, offset);
404 if (!newStructure || !newStructure->propertyAccessesAreCacheable())
405 return GiveUpOnCache;
406
407 ASSERT(newStructure->previousID() == structure);
408 ASSERT(!newStructure->isDictionary());
409 ASSERT(newStructure->isObject());
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000410
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000411 ObjectPropertyConditionSet conditionSet;
412 if (putKind == NotDirect) {
413 conditionSet =
414 generateConditionsForPropertySetterMiss(
ggaren@apple.com81def5f2015-10-09 23:10:16 +0000415 vm, codeBlock, exec, newStructure, ident.impl());
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000416 if (!conditionSet.isValid())
417 return GiveUpOnCache;
418 }
419
ggaren@apple.com81def5f2015-10-09 23:10:16 +0000420 newCase = AccessCase::transition(vm, codeBlock, structure, newStructure, offset, conditionSet);
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000421 }
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000422 } else if (slot.isCacheableCustom() || slot.isCacheableSetter()) {
423 if (slot.isCacheableCustom()) {
424 ObjectPropertyConditionSet conditionSet;
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000425
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000426 if (slot.base() != baseValue) {
fpizlo@apple.com6b62eaf2015-08-03 23:13:56 +0000427 conditionSet =
428 generateConditionsForPrototypePropertyHitCustom(
ggaren@apple.com81def5f2015-10-09 23:10:16 +0000429 vm, codeBlock, exec, structure, slot.base(), ident.impl());
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000430 if (!conditionSet.isValid())
431 return GiveUpOnCache;
432 }
433
434 newCase = AccessCase::setter(
barraclough@apple.com674f9cb2016-02-09 21:19:59 +0000435 vm, codeBlock, slot.isCustomAccessor() ? AccessCase::CustomAccessorSetter : AccessCase::CustomValueSetter, structure, invalidOffset, conditionSet,
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000436 slot.customSetter(), slot.base());
437 } else {
438 ObjectPropertyConditionSet conditionSet;
439 PropertyOffset offset;
440
441 if (slot.base() != baseValue) {
fpizlo@apple.com6b62eaf2015-08-03 23:13:56 +0000442 conditionSet =
443 generateConditionsForPrototypePropertyHit(
ggaren@apple.com81def5f2015-10-09 23:10:16 +0000444 vm, codeBlock, exec, structure, slot.base(), ident.impl());
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000445 if (!conditionSet.isValid())
446 return GiveUpOnCache;
447 offset = conditionSet.slotBaseCondition().offset();
448 } else
449 offset = slot.cachedOffset();
fpizlo@apple.com6b62eaf2015-08-03 23:13:56 +0000450
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000451 newCase = AccessCase::setter(
ggaren@apple.com81def5f2015-10-09 23:10:16 +0000452 vm, codeBlock, AccessCase::Setter, structure, offset, conditionSet);
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000453 }
oliver@apple.com11ce5ff2014-03-06 21:27:13 +0000454 }
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000455
fpizlo@apple.comf0e9c5d2016-04-08 19:37:04 +0000456 LOG_IC((ICEvent::PutByIdAddAccessCase, structure->classInfo(), ident));
457
fpizlo@apple.com80d06562016-04-05 19:58:04 +0000458 AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, ident, WTFMove(newCase));
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000459
fpizlo@apple.com6e1a6d42016-04-12 20:06:26 +0000460 if (result.generatedSomeCode()) {
461 LOG_IC((ICEvent::PutByIdReplaceWithJump, structure->classInfo(), ident));
462
463 RELEASE_ASSERT(result.code());
sbarati@apple.comb5bee812016-06-19 19:42:18 +0000464
465 InlineAccess::rewireStubAsJump(vm, stubInfo, CodeLocationLabel(result.code()));
fpizlo@apple.com6e1a6d42016-04-12 20:06:26 +0000466 }
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000467
fpizlo@apple.com6e1a6d42016-04-12 20:06:26 +0000468 return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000469}
470
mmirman@apple.com21ee7772015-01-14 21:00:52 +0000471void repatchPutByID(ExecState* exec, JSValue baseValue, Structure* structure, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000472{
fpizlo@apple.com57aea1c2016-04-11 18:20:59 +0000473 SuperSamplerScope superSamplerScope(false);
mhahnenberg@apple.comefd0d512013-10-16 23:47:45 +0000474 GCSafeConcurrentJITLocker locker(exec->codeBlock()->m_lock, exec->vm().heap);
fpizlo@apple.comd2d4f0a2013-10-15 00:05:45 +0000475
mmirman@apple.com21ee7772015-01-14 21:00:52 +0000476 if (tryCachePutByID(exec, baseValue, structure, propertyName, slot, stubInfo, putKind) == GiveUpOnCache)
sbarati@apple.comb0b1eb52016-07-21 23:41:44 +0000477 ftlThunkAwareRepatchCall(exec->codeBlock(), stubInfo.slowPathCallLocation(), appropriateGenericPutByIdFunction(slot, putKind));
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000478}
479
mhahnenberg@apple.com304c25df2014-06-11 22:55:34 +0000480static InlineCacheAction tryRepatchIn(
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000481 ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
482 const PropertySlot& slot, StructureStubInfo& stubInfo)
483{
fpizlo@apple.com547fbff2015-09-12 20:52:26 +0000484 if (forceICFailure(exec))
mhahnenberg@apple.com304c25df2014-06-11 22:55:34 +0000485 return GiveUpOnCache;
fpizlo@apple.coma502abe2014-03-30 18:43:41 +0000486
cdumez@apple.com431c1022015-11-20 20:45:12 +0000487 if (!base->structure()->propertyAccessesAreCacheable() || (!wasFound && !base->structure()->propertyAccessesAreCacheableForAbsence()))
mhahnenberg@apple.com304c25df2014-06-11 22:55:34 +0000488 return GiveUpOnCache;
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000489
490 if (wasFound) {
491 if (!slot.isCacheable())
mhahnenberg@apple.com304c25df2014-06-11 22:55:34 +0000492 return GiveUpOnCache;
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000493 }
494
495 CodeBlock* codeBlock = exec->codeBlock();
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000496 VM& vm = exec->vm();
497 Structure* structure = base->structure(vm);
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000498
fpizlo@apple.com6b62eaf2015-08-03 23:13:56 +0000499 ObjectPropertyConditionSet conditionSet;
500 if (wasFound) {
501 if (slot.slotBase() != base) {
502 conditionSet = generateConditionsForPrototypePropertyHit(
ggaren@apple.com81def5f2015-10-09 23:10:16 +0000503 vm, codeBlock, exec, structure, slot.slotBase(), ident.impl());
fpizlo@apple.com6b62eaf2015-08-03 23:13:56 +0000504 }
505 } else {
506 conditionSet = generateConditionsForPropertyMiss(
ggaren@apple.com81def5f2015-10-09 23:10:16 +0000507 vm, codeBlock, exec, structure, ident.impl());
fpizlo@apple.com6b62eaf2015-08-03 23:13:56 +0000508 }
509 if (!conditionSet.isValid())
mhahnenberg@apple.com304c25df2014-06-11 22:55:34 +0000510 return GiveUpOnCache;
rniwa@webkit.org12932952013-12-16 05:56:25 +0000511
fpizlo@apple.comf0e9c5d2016-04-08 19:37:04 +0000512 LOG_IC((ICEvent::InAddAccessCase, structure->classInfo(), ident));
513
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000514 std::unique_ptr<AccessCase> newCase = AccessCase::in(
ggaren@apple.com81def5f2015-10-09 23:10:16 +0000515 vm, codeBlock, wasFound ? AccessCase::InHit : AccessCase::InMiss, structure, conditionSet);
rniwa@webkit.org12932952013-12-16 05:56:25 +0000516
fpizlo@apple.com80d06562016-04-05 19:58:04 +0000517 AccessGenerationResult result = stubInfo.addAccessCase(codeBlock, ident, WTFMove(newCase));
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000518
fpizlo@apple.com6e1a6d42016-04-12 20:06:26 +0000519 if (result.generatedSomeCode()) {
520 LOG_IC((ICEvent::InReplaceWithJump, structure->classInfo(), ident));
521
522 RELEASE_ASSERT(result.code());
sbarati@apple.comb5bee812016-06-19 19:42:18 +0000523
fpizlo@apple.com6e1a6d42016-04-12 20:06:26 +0000524 MacroAssembler::repatchJump(
sbarati@apple.comb5bee812016-06-19 19:42:18 +0000525 stubInfo.patchableJumpForIn(),
fpizlo@apple.com6e1a6d42016-04-12 20:06:26 +0000526 CodeLocationLabel(result.code()));
527 }
528
529 return result.shouldGiveUpNow() ? GiveUpOnCache : RetryCacheLater;
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000530}
531
532void repatchIn(
533 ExecState* exec, JSCell* base, const Identifier& ident, bool wasFound,
534 const PropertySlot& slot, StructureStubInfo& stubInfo)
535{
fpizlo@apple.com57aea1c2016-04-11 18:20:59 +0000536 SuperSamplerScope superSamplerScope(false);
mhahnenberg@apple.com304c25df2014-06-11 22:55:34 +0000537 if (tryRepatchIn(exec, base, ident, wasFound, slot, stubInfo) == GiveUpOnCache)
sbarati@apple.comb0b1eb52016-07-21 23:41:44 +0000538 ftlThunkAwareRepatchCall(exec->codeBlock(), stubInfo.slowPathCallLocation(), operationIn);
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000539}
540
fpizlo@apple.com7a797262015-09-03 21:11:59 +0000541static void linkSlowFor(VM*, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000542{
fpizlo@apple.com7a797262015-09-03 21:11:59 +0000543 MacroAssembler::repatchNearCall(callLinkInfo.callReturnLocation(), CodeLocationLabel(codeRef.code()));
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000544}
545
fpizlo@apple.com7a797262015-09-03 21:11:59 +0000546static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo, ThunkGenerator generator)
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000547{
fpizlo@apple.com7a797262015-09-03 21:11:59 +0000548 linkSlowFor(vm, callLinkInfo, vm->getCTIStub(generator));
basile_clement@apple.com7d9a7092015-07-28 20:12:33 +0000549}
550
fpizlo@apple.com7a797262015-09-03 21:11:59 +0000551static void linkSlowFor(VM* vm, CallLinkInfo& callLinkInfo)
basile_clement@apple.com7d9a7092015-07-28 20:12:33 +0000552{
553 MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
fpizlo@apple.com7a797262015-09-03 21:11:59 +0000554 linkSlowFor(vm, callLinkInfo, virtualThunk);
basile_clement@apple.com7d9a7092015-07-28 20:12:33 +0000555 callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000556}
557
msaboff@apple.com95894332014-01-29 19:18:54 +0000558void linkFor(
559 ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
basile_clement@apple.com7d9a7092015-07-28 20:12:33 +0000560 JSFunction* callee, MacroAssemblerCodePtr codePtr)
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000561{
msaboff@apple.com203a56e2015-06-24 22:37:30 +0000562 ASSERT(!callLinkInfo.stub());
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000563
fpizlo@apple.com8e88c8a2014-02-21 19:48:25 +0000564 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
565
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000566 VM* vm = callerCodeBlock->vm();
567
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000568 ASSERT(!callLinkInfo.isLinked());
fpizlo@apple.com636c3022016-10-12 16:56:34 +0000569 callLinkInfo.setCallee(exec->callerFrame()->vm(), callerCodeBlock, callee);
ggaren@apple.com81def5f2015-10-09 23:10:16 +0000570 callLinkInfo.setLastSeenCallee(exec->callerFrame()->vm(), callerCodeBlock, callee);
mark.lam@apple.comee3c4102015-10-14 18:57:07 +0000571 if (shouldDumpDisassemblyFor(callerCodeBlock))
msaboff@apple.com203a56e2015-06-24 22:37:30 +0000572 dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin(), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
fpizlo@apple.com7a797262015-09-03 21:11:59 +0000573 MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr));
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000574
575 if (calleeCodeBlock)
576 calleeCodeBlock->linkIncomingCall(exec->callerFrame(), &callLinkInfo);
577
fpizlo@apple.comcfb280c2015-09-08 17:25:28 +0000578 if (callLinkInfo.specializationKind() == CodeForCall && callLinkInfo.allowStubs()) {
fpizlo@apple.com7a797262015-09-03 21:11:59 +0000579 linkSlowFor(vm, callLinkInfo, linkPolymorphicCallThunkGenerator);
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000580 return;
581 }
582
fpizlo@apple.com7a797262015-09-03 21:11:59 +0000583 linkSlowFor(vm, callLinkInfo);
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000584}
585
fpizlo@apple.comf0b30cb2016-10-18 18:30:05 +0000586void linkDirectFor(
587 ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock,
588 MacroAssemblerCodePtr codePtr)
589{
590 ASSERT(!callLinkInfo.stub());
591
592 CodeBlock* callerCodeBlock = exec->codeBlock();
593
594 VM* vm = callerCodeBlock->vm();
595
596 ASSERT(!callLinkInfo.isLinked());
597 callLinkInfo.setCodeBlock(*vm, callerCodeBlock, jsCast<FunctionCodeBlock*>(calleeCodeBlock));
598 if (shouldDumpDisassemblyFor(callerCodeBlock))
599 dataLog("Linking call in ", *callerCodeBlock, " at ", callLinkInfo.codeOrigin(), " to ", pointerDump(calleeCodeBlock), ", entrypoint at ", codePtr, "\n");
600 if (callLinkInfo.callType() == CallLinkInfo::DirectTailCall)
601 MacroAssembler::repatchJumpToNop(callLinkInfo.patchableJump());
602 MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), CodeLocationLabel(codePtr));
603
604 if (calleeCodeBlock)
605 calleeCodeBlock->linkIncomingCall(exec, &callLinkInfo);
606}
607
msaboff@apple.com95894332014-01-29 19:18:54 +0000608void linkSlowFor(
basile_clement@apple.com7d9a7092015-07-28 20:12:33 +0000609 ExecState* exec, CallLinkInfo& callLinkInfo)
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000610{
611 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
612 VM* vm = callerCodeBlock->vm();
613
fpizlo@apple.com7a797262015-09-03 21:11:59 +0000614 linkSlowFor(vm, callLinkInfo);
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000615}
616
fpizlo@apple.com7a797262015-09-03 21:11:59 +0000617static void revertCall(VM* vm, CallLinkInfo& callLinkInfo, MacroAssemblerCodeRef codeRef)
fpizlo@apple.com4569a3e82015-01-29 20:33:45 +0000618{
fpizlo@apple.comf0b30cb2016-10-18 18:30:05 +0000619 if (callLinkInfo.isDirect()) {
620 callLinkInfo.clearCodeBlock();
621 if (callLinkInfo.callType() == CallLinkInfo::DirectTailCall)
622 MacroAssembler::repatchJump(callLinkInfo.patchableJump(), callLinkInfo.slowPathStart());
623 else
624 MacroAssembler::repatchNearCall(callLinkInfo.hotPathOther(), callLinkInfo.slowPathStart());
625 } else {
626 MacroAssembler::revertJumpReplacementToBranchPtrWithPatch(
627 MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
628 static_cast<MacroAssembler::RegisterID>(callLinkInfo.calleeGPR()), 0);
629 linkSlowFor(vm, callLinkInfo, codeRef);
630 callLinkInfo.clearCallee();
631 }
msaboff@apple.com203a56e2015-06-24 22:37:30 +0000632 callLinkInfo.clearSeen();
msaboff@apple.com203a56e2015-06-24 22:37:30 +0000633 callLinkInfo.clearStub();
basile_clement@apple.com7d9a7092015-07-28 20:12:33 +0000634 callLinkInfo.clearSlowStub();
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000635 if (callLinkInfo.isOnList())
636 callLinkInfo.remove();
637}
638
fpizlo@apple.com7a797262015-09-03 21:11:59 +0000639void unlinkFor(VM& vm, CallLinkInfo& callLinkInfo)
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000640{
mark.lam@apple.comee3c4102015-10-14 18:57:07 +0000641 if (Options::dumpDisassembly())
fpizlo@apple.comf0b30cb2016-10-18 18:30:05 +0000642 dataLog("Unlinking call at ", callLinkInfo.hotPathOther(), "\n");
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000643
fpizlo@apple.com7a797262015-09-03 21:11:59 +0000644 revertCall(&vm, callLinkInfo, vm.getCTIStub(linkCallThunkGenerator));
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000645}
646
647void linkVirtualFor(
basile_clement@apple.com7d9a7092015-07-28 20:12:33 +0000648 ExecState* exec, CallLinkInfo& callLinkInfo)
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000649{
fpizlo@apple.com4569a3e82015-01-29 20:33:45 +0000650 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
651 VM* vm = callerCodeBlock->vm();
msaboff@apple.comc15ae7e2015-09-16 23:40:35 +0000652
mark.lam@apple.comee3c4102015-10-14 18:57:07 +0000653 if (shouldDumpDisassemblyFor(callerCodeBlock))
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000654 dataLog("Linking virtual call at ", *callerCodeBlock, " ", exec->callerFrame()->codeOrigin(), "\n");
655
basile_clement@apple.com7d9a7092015-07-28 20:12:33 +0000656 MacroAssemblerCodeRef virtualThunk = virtualThunkFor(vm, callLinkInfo);
fpizlo@apple.com7a797262015-09-03 21:11:59 +0000657 revertCall(vm, callLinkInfo, virtualThunk);
basile_clement@apple.com7d9a7092015-07-28 20:12:33 +0000658 callLinkInfo.setSlowStub(createJITStubRoutine(virtualThunk, *vm, nullptr, true));
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000659}
660
661namespace {
662struct CallToCodePtr {
663 CCallHelpers::Call call;
664 MacroAssemblerCodePtr codePtr;
665};
666} // annonymous namespace
667
668void linkPolymorphicCall(
basile_clement@apple.com7d9a7092015-07-28 20:12:33 +0000669 ExecState* exec, CallLinkInfo& callLinkInfo, CallVariant newVariant)
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000670{
fpizlo@apple.comcfb280c2015-09-08 17:25:28 +0000671 RELEASE_ASSERT(callLinkInfo.allowStubs());
672
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000673 // Currently we can't do anything for non-function callees.
674 // https://bugs.webkit.org/show_bug.cgi?id=140685
675 if (!newVariant || !newVariant.executable()) {
basile_clement@apple.com7d9a7092015-07-28 20:12:33 +0000676 linkVirtualFor(exec, callLinkInfo);
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000677 return;
678 }
679
680 CodeBlock* callerCodeBlock = exec->callerFrame()->codeBlock();
681 VM* vm = callerCodeBlock->vm();
682
683 CallVariantList list;
msaboff@apple.com203a56e2015-06-24 22:37:30 +0000684 if (PolymorphicCallStubRoutine* stub = callLinkInfo.stub())
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000685 list = stub->variants();
msaboff@apple.com203a56e2015-06-24 22:37:30 +0000686 else if (JSFunction* oldCallee = callLinkInfo.callee())
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000687 list = CallVariantList{ CallVariant(oldCallee) };
688
689 list = variantListWithVariant(list, newVariant);
690
691 // If there are any closure calls then it makes sense to treat all of them as closure calls.
692 // This makes switching on callee cheaper. It also produces profiling that's easier on the DFG;
693 // the DFG doesn't really want to deal with a combination of closure and non-closure callees.
694 bool isClosureCall = false;
695 for (CallVariant variant : list) {
696 if (variant.isClosureCall()) {
697 list = despecifiedVariantList(list);
698 isClosureCall = true;
699 break;
700 }
701 }
702
fpizlo@apple.com23bc5842015-06-03 20:08:01 +0000703 if (isClosureCall)
msaboff@apple.com203a56e2015-06-24 22:37:30 +0000704 callLinkInfo.setHasSeenClosure();
fpizlo@apple.com23bc5842015-06-03 20:08:01 +0000705
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000706 Vector<PolymorphicCallCase> callCases;
707
708 // Figure out what our cases are.
709 for (CallVariant variant : list) {
710 CodeBlock* codeBlock;
711 if (variant.executable()->isHostFunction())
712 codeBlock = nullptr;
713 else {
commit-queue@webkit.orgfa196632015-08-28 21:07:22 +0000714 ExecutableBase* executable = variant.executable();
715#if ENABLE(WEBASSEMBLY)
716 if (executable->isWebAssemblyExecutable())
717 codeBlock = jsCast<WebAssemblyExecutable*>(executable)->codeBlockForCall();
718 else
719#endif
720 codeBlock = jsCast<FunctionExecutable*>(executable)->codeBlockForCall();
msaboff@apple.comf0467952015-10-24 01:45:30 +0000721 // If we cannot handle a callee, either because we don't have a CodeBlock or because arity mismatch,
722 // assume that it's better for this whole thing to be a virtual call.
723 if (!codeBlock || exec->argumentCountIncludingThis() < static_cast<size_t>(codeBlock->numParameters()) || callLinkInfo.isVarargs()) {
basile_clement@apple.com7d9a7092015-07-28 20:12:33 +0000724 linkVirtualFor(exec, callLinkInfo);
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000725 return;
726 }
727 }
728
729 callCases.append(PolymorphicCallCase(variant, codeBlock));
730 }
731
732 // If we are over the limit, just use a normal virtual call.
733 unsigned maxPolymorphicCallVariantListSize;
734 if (callerCodeBlock->jitType() == JITCode::topTierJIT())
735 maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSizeForTopTier();
736 else
737 maxPolymorphicCallVariantListSize = Options::maxPolymorphicCallVariantListSize();
738 if (list.size() > maxPolymorphicCallVariantListSize) {
basile_clement@apple.com7d9a7092015-07-28 20:12:33 +0000739 linkVirtualFor(exec, callLinkInfo);
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000740 return;
741 }
742
msaboff@apple.com203a56e2015-06-24 22:37:30 +0000743 GPRReg calleeGPR = static_cast<GPRReg>(callLinkInfo.calleeGPR());
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000744
745 CCallHelpers stubJit(vm, callerCodeBlock);
746
747 CCallHelpers::JumpList slowPath;
748
msaboff@apple.com4cd77e62015-09-18 23:51:41 +0000749 std::unique_ptr<CallFrameShuffler> frameShuffler;
750 if (callLinkInfo.frameShuffleData()) {
751 ASSERT(callLinkInfo.isTailCall());
752 frameShuffler = std::make_unique<CallFrameShuffler>(stubJit, *callLinkInfo.frameShuffleData());
753#if USE(JSVALUE32_64)
754 // We would have already checked that the callee is a cell, and we can
755 // use the additional register this buys us.
756 frameShuffler->assumeCalleeIsCell();
757#endif
758 frameShuffler->lockGPR(calleeGPR);
msaboff@apple.com95894332014-01-29 19:18:54 +0000759 }
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000760 GPRReg comparisonValueGPR;
761
762 if (isClosureCall) {
msaboff@apple.com4cd77e62015-09-18 23:51:41 +0000763 GPRReg scratchGPR;
764 if (frameShuffler)
765 scratchGPR = frameShuffler->acquireGPR();
766 else
767 scratchGPR = AssemblyHelpers::selectScratchGPR(calleeGPR);
768 // Verify that we have a function and stash the executable in scratchGPR.
msaboff@apple.com95894332014-01-29 19:18:54 +0000769
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000770#if USE(JSVALUE64)
fpizlo@apple.com4d03e0e2016-04-18 17:13:33 +0000771 slowPath.append(stubJit.branchTest64(CCallHelpers::NonZero, calleeGPR, GPRInfo::tagMaskRegister));
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000772#else
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000773 // We would have already checked that the callee is a cell.
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000774#endif
775
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000776 slowPath.append(
777 stubJit.branch8(
778 CCallHelpers::NotEqual,
779 CCallHelpers::Address(calleeGPR, JSCell::typeInfoTypeOffset()),
780 CCallHelpers::TrustedImm32(JSFunctionType)));
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000781
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000782 stubJit.loadPtr(
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000783 CCallHelpers::Address(calleeGPR, JSFunction::offsetOfExecutable()),
msaboff@apple.com4cd77e62015-09-18 23:51:41 +0000784 scratchGPR);
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000785
msaboff@apple.com4cd77e62015-09-18 23:51:41 +0000786 comparisonValueGPR = scratchGPR;
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000787 } else
788 comparisonValueGPR = calleeGPR;
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000789
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000790 Vector<int64_t> caseValues(callCases.size());
791 Vector<CallToCodePtr> calls(callCases.size());
792 std::unique_ptr<uint32_t[]> fastCounts;
793
794 if (callerCodeBlock->jitType() != JITCode::topTierJIT())
795 fastCounts = std::make_unique<uint32_t[]>(callCases.size());
796
fpizlo@apple.comb1e1c2d2015-05-05 20:42:44 +0000797 for (size_t i = 0; i < callCases.size(); ++i) {
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000798 if (fastCounts)
799 fastCounts[i] = 0;
800
801 CallVariant variant = callCases[i].variant();
fpizlo@apple.comb1e1c2d2015-05-05 20:42:44 +0000802 int64_t newCaseValue;
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000803 if (isClosureCall)
fpizlo@apple.comb1e1c2d2015-05-05 20:42:44 +0000804 newCaseValue = bitwise_cast<intptr_t>(variant.executable());
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000805 else
fpizlo@apple.comb1e1c2d2015-05-05 20:42:44 +0000806 newCaseValue = bitwise_cast<intptr_t>(variant.function());
807
808 if (!ASSERT_DISABLED) {
809 for (size_t j = 0; j < i; ++j) {
810 if (caseValues[j] != newCaseValue)
811 continue;
812
813 dataLog("ERROR: Attempt to add duplicate case value.\n");
814 dataLog("Existing case values: ");
815 CommaPrinter comma;
816 for (size_t k = 0; k < i; ++k)
817 dataLog(comma, caseValues[k]);
818 dataLog("\n");
819 dataLog("Attempting to add: ", newCaseValue, "\n");
820 dataLog("Variant list: ", listDump(callCases), "\n");
821 RELEASE_ASSERT_NOT_REACHED();
822 }
823 }
824
825 caseValues[i] = newCaseValue;
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000826 }
827
msaboff@apple.com4cd77e62015-09-18 23:51:41 +0000828 GPRReg fastCountsBaseGPR;
829 if (frameShuffler)
830 fastCountsBaseGPR = frameShuffler->acquireGPR();
831 else {
832 fastCountsBaseGPR =
833 AssemblyHelpers::selectScratchGPR(calleeGPR, comparisonValueGPR, GPRInfo::regT3);
834 }
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000835 stubJit.move(CCallHelpers::TrustedImmPtr(fastCounts.get()), fastCountsBaseGPR);
msaboff@apple.com87a33cf2015-11-14 21:13:02 +0000836 if (!frameShuffler && callLinkInfo.isTailCall())
837 stubJit.emitRestoreCalleeSaves();
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000838 BinarySwitch binarySwitch(comparisonValueGPR, caseValues, BinarySwitch::IntPtr);
839 CCallHelpers::JumpList done;
840 while (binarySwitch.advance(stubJit)) {
841 size_t caseIndex = binarySwitch.caseIndex();
842
843 CallVariant variant = callCases[caseIndex].variant();
844
845 ASSERT(variant.executable()->hasJITCodeForCall());
846 MacroAssemblerCodePtr codePtr =
msaboff@apple.com21bd7372015-09-18 16:21:08 +0000847 variant.executable()->generatedJITCodeForCall()->addressForCall(ArityCheckNotRequired);
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000848
849 if (fastCounts) {
850 stubJit.add32(
851 CCallHelpers::TrustedImm32(1),
852 CCallHelpers::Address(fastCountsBaseGPR, caseIndex * sizeof(uint32_t)));
853 }
msaboff@apple.com4cd77e62015-09-18 23:51:41 +0000854 if (frameShuffler) {
855 CallFrameShuffler(stubJit, frameShuffler->snapshot()).prepareForTailCall();
856 calls[caseIndex].call = stubJit.nearTailCall();
857 } else if (callLinkInfo.isTailCall()) {
msaboff@apple.comc15ae7e2015-09-16 23:40:35 +0000858 stubJit.prepareForTailCallSlow();
859 calls[caseIndex].call = stubJit.nearTailCall();
860 } else
861 calls[caseIndex].call = stubJit.nearCall();
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000862 calls[caseIndex].codePtr = codePtr;
863 done.append(stubJit.jump());
864 }
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000865
866 slowPath.link(&stubJit);
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000867 binarySwitch.fallThrough().link(&stubJit);
msaboff@apple.com4cd77e62015-09-18 23:51:41 +0000868
869 if (frameShuffler) {
870 frameShuffler->releaseGPR(calleeGPR);
871 frameShuffler->releaseGPR(comparisonValueGPR);
872 frameShuffler->releaseGPR(fastCountsBaseGPR);
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000873#if USE(JSVALUE32_64)
msaboff@apple.com4cd77e62015-09-18 23:51:41 +0000874 frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT1, GPRInfo::regT0));
875#else
876 frameShuffler->setCalleeJSValueRegs(JSValueRegs(GPRInfo::regT0));
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000877#endif
msaboff@apple.com4cd77e62015-09-18 23:51:41 +0000878 frameShuffler->prepareForSlowPath();
879 } else {
880 stubJit.move(calleeGPR, GPRInfo::regT0);
881#if USE(JSVALUE32_64)
882 stubJit.move(CCallHelpers::TrustedImm32(JSValue::CellTag), GPRInfo::regT1);
883#endif
884 }
fpizlo@apple.comba262b22014-03-23 04:34:38 +0000885 stubJit.move(CCallHelpers::TrustedImmPtr(&callLinkInfo), GPRInfo::regT2);
msaboff@apple.com203a56e2015-06-24 22:37:30 +0000886 stubJit.move(CCallHelpers::TrustedImmPtr(callLinkInfo.callReturnLocation().executableAddress()), GPRInfo::regT4);
msaboff@apple.com95894332014-01-29 19:18:54 +0000887
fpizlo@apple.comba262b22014-03-23 04:34:38 +0000888 stubJit.restoreReturnAddressBeforeReturn(GPRInfo::regT4);
mark.lam@apple.com9df8b832013-09-26 20:27:14 +0000889 AssemblyHelpers::Jump slow = stubJit.jump();
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000890
fpizlo@apple.com616e7b52015-03-26 01:26:56 +0000891 LinkBuffer patchBuffer(*vm, stubJit, callerCodeBlock, JITCompilationCanFail);
892 if (patchBuffer.didFailToAllocate()) {
basile_clement@apple.com7d9a7092015-07-28 20:12:33 +0000893 linkVirtualFor(exec, callLinkInfo);
fpizlo@apple.com616e7b52015-03-26 01:26:56 +0000894 return;
895 }
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000896
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000897 RELEASE_ASSERT(callCases.size() == calls.size());
898 for (CallToCodePtr callToCodePtr : calls) {
commit-queue@webkit.orgc8576812016-04-15 09:07:36 +0000899 // Tail call special-casing ensures proper linking on ARM Thumb2, where a tail call jumps to an address
900 // with a non-decorated bottom bit but a normal call calls an address with a decorated bottom bit.
901 bool isTailCall = callToCodePtr.call.isFlagSet(CCallHelpers::Call::Tail);
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000902 patchBuffer.link(
commit-queue@webkit.orgc8576812016-04-15 09:07:36 +0000903 callToCodePtr.call, FunctionPtr(isTailCall ? callToCodePtr.codePtr.dataLocation() : callToCodePtr.codePtr.executableAddress()));
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000904 }
msaboff@apple.com95894332014-01-29 19:18:54 +0000905 if (JITCode::isOptimizingJIT(callerCodeBlock->jitType()))
msaboff@apple.com203a56e2015-06-24 22:37:30 +0000906 patchBuffer.link(done, callLinkInfo.callReturnLocation().labelAtOffset(0));
msaboff@apple.com95894332014-01-29 19:18:54 +0000907 else
msaboff@apple.com203a56e2015-06-24 22:37:30 +0000908 patchBuffer.link(done, callLinkInfo.hotPathOther().labelAtOffset(0));
basile_clement@apple.com7d9a7092015-07-28 20:12:33 +0000909 patchBuffer.link(slow, CodeLocationLabel(vm->getCTIStub(linkPolymorphicCallThunkGenerator).code()));
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000910
krollin@apple.com77db1802016-06-20 20:52:45 +0000911 auto stubRoutine = adoptRef(*new PolymorphicCallStubRoutine(
msaboff@apple.com95894332014-01-29 19:18:54 +0000912 FINALIZE_CODE_FOR(
913 callerCodeBlock, patchBuffer,
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000914 ("Polymorphic call stub for %s, return point %p, targets %s",
msaboff@apple.com203a56e2015-06-24 22:37:30 +0000915 toCString(*callerCodeBlock).data(), callLinkInfo.callReturnLocation().labelAtOffset(0).executableAddress(),
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000916 toCString(listDump(callCases)).data())),
ggaren@apple.com81def5f2015-10-09 23:10:16 +0000917 *vm, callerCodeBlock, exec->callerFrame(), callLinkInfo, callCases,
aestes@apple.com13aae082016-01-02 08:03:08 +0000918 WTFMove(fastCounts)));
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000919
fpizlo@apple.com7a797262015-09-03 21:11:59 +0000920 MacroAssembler::replaceWithJump(
921 MacroAssembler::startOfBranchPtrWithPatchOnRegister(callLinkInfo.hotPathBegin()),
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000922 CodeLocationLabel(stubRoutine->code().code()));
basile_clement@apple.com7d9a7092015-07-28 20:12:33 +0000923 // The original slow path is unreachable on 64-bits, but still
924 // reachable on 32-bits since a non-cell callee will always
925 // trigger the slow path
fpizlo@apple.com7a797262015-09-03 21:11:59 +0000926 linkSlowFor(vm, callLinkInfo);
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000927
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000928 // If there had been a previous stub routine, that one will die as soon as the GC runs and sees
929 // that it's no longer on stack.
krollin@apple.com77db1802016-06-20 20:52:45 +0000930 callLinkInfo.setStub(WTFMove(stubRoutine));
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000931
fpizlo@apple.com8a5fd182015-02-02 18:38:08 +0000932 // The call link info no longer has a call cache apart from the jump to the polymorphic call
933 // stub.
934 if (callLinkInfo.isOnList())
935 callLinkInfo.remove();
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000936}
937
keith_miller@apple.com8844d302016-04-07 19:38:00 +0000938void resetGetByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo, GetByIDKind kind)
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000939{
sbarati@apple.comb0b1eb52016-07-21 23:41:44 +0000940 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), appropriateOptimizingGetByIdFunction(kind));
sbarati@apple.comb5bee812016-06-19 19:42:18 +0000941 InlineAccess::rewireStubAsJump(*codeBlock->vm(), stubInfo, stubInfo.slowPathStartLocation());
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000942}
943
fpizlo@apple.com7a797262015-09-03 21:11:59 +0000944void resetPutByID(CodeBlock* codeBlock, StructureStubInfo& stubInfo)
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000945{
sbarati@apple.comb5bee812016-06-19 19:42:18 +0000946 V_JITOperation_ESsiJJI unoptimizedFunction = bitwise_cast<V_JITOperation_ESsiJJI>(readCallTarget(codeBlock, stubInfo.slowPathCallLocation()).executableAddress());
fpizlo@apple.comd49bfe82013-10-19 02:20:14 +0000947 V_JITOperation_ESsiJJI optimizedFunction;
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000948 if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictOptimize)
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000949 optimizedFunction = operationPutByIdStrictOptimize;
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000950 else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictOptimize)
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000951 optimizedFunction = operationPutByIdNonStrictOptimize;
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000952 else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictOptimize)
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000953 optimizedFunction = operationPutByIdDirectStrictOptimize;
954 else {
fpizlo@apple.comb26b5242015-09-10 19:49:36 +0000955 ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictOptimize);
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000956 optimizedFunction = operationPutByIdDirectNonStrictOptimize;
957 }
sbarati@apple.comb5bee812016-06-19 19:42:18 +0000958
sbarati@apple.comb0b1eb52016-07-21 23:41:44 +0000959 ftlThunkAwareRepatchCall(codeBlock, stubInfo.slowPathCallLocation(), optimizedFunction);
sbarati@apple.comb5bee812016-06-19 19:42:18 +0000960 InlineAccess::rewireStubAsJump(*codeBlock->vm(), stubInfo, stubInfo.slowPathStartLocation());
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000961}
962
fpizlo@apple.com7a797262015-09-03 21:11:59 +0000963void resetIn(CodeBlock*, StructureStubInfo& stubInfo)
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000964{
sbarati@apple.comb5bee812016-06-19 19:42:18 +0000965 MacroAssembler::repatchJump(stubInfo.patchableJumpForIn(), stubInfo.slowPathStartLocation());
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000966}
967
mark.lam@apple.com9df8b832013-09-26 20:27:14 +0000968} // namespace JSC
fpizlo@apple.comda4645e2013-09-22 00:24:36 +0000969
970#endif