Merge the jsCStack branch
https://bugs.webkit.org/show_bug.cgi?id=127763

Reviewed by Mark Hahnenberg.

Source/JavaScriptCore: 

Changes from http://svn.webkit.org/repository/webkit/branches/jsCStack
up to changeset 162958.

Source/WebCore: 

Changes from http://svn.webkit.org/repository/webkit/branches/jsCStack
up to changeset 162958.

Source/WTF: 

Changes from http://svn.webkit.org/repository/webkit/branches/jsCStack
up to changeset 162958.


git-svn-id: http://svn.webkit.org/repository/webkit/trunk@163027 268f45cc-cd09-0410-ab3c-d52691b4dbfc
diff --git a/Source/JavaScriptCore/bytecode/CallLinkInfo.cpp b/Source/JavaScriptCore/bytecode/CallLinkInfo.cpp
index a4baa61..6a60057 100644
--- a/Source/JavaScriptCore/bytecode/CallLinkInfo.cpp
+++ b/Source/JavaScriptCore/bytecode/CallLinkInfo.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2012 Apple Inc. All rights reserved.
+ * Copyright (C) 2012, 2013 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -37,15 +37,15 @@
 {
     ASSERT(isLinked());
     
+    if (Options::showDisassembly())
+        dataLog("Unlinking call from ", callReturnLocation, " to ", pointerDump(repatchBuffer.codeBlock()), "\n");
+
     repatchBuffer.revertJumpReplacementToBranchPtrWithPatch(RepatchBuffer::startOfBranchPtrWithPatchOnRegister(hotPathBegin), static_cast<MacroAssembler::RegisterID>(calleeGPR), 0);
-    if (isDFG) {
-#if ENABLE(DFG_JIT)
-        repatchBuffer.relink(callReturnLocation, (callType == Construct ? vm.getCTIStub(linkConstructThunkGenerator) : vm.getCTIStub(linkCallThunkGenerator)).code());
-#else
-        RELEASE_ASSERT_NOT_REACHED();
-#endif
-    } else
-        repatchBuffer.relink(callReturnLocation, callType == Construct ? vm.getCTIStub(linkConstructThunkGenerator).code() : vm.getCTIStub(linkCallThunkGenerator).code());
+    repatchBuffer.relink(
+        callReturnLocation,
+        vm.getCTIStub(linkThunkGeneratorFor(
+            callType == Construct ? CodeForConstruct : CodeForCall,
+            isFTL ? MustPreserveRegisters : RegisterPreservationNotRequired)).code());
     hasSeenShouldRepatch = false;
     callee.clear();
     stub.clear();
diff --git a/Source/JavaScriptCore/bytecode/CallLinkInfo.h b/Source/JavaScriptCore/bytecode/CallLinkInfo.h
index 0244497..d434e73 100644
--- a/Source/JavaScriptCore/bytecode/CallLinkInfo.h
+++ b/Source/JavaScriptCore/bytecode/CallLinkInfo.h
@@ -55,8 +55,8 @@
     }
         
     CallLinkInfo()
-        : hasSeenShouldRepatch(false)
-        , isDFG(false)
+        : isFTL(false)
+        , hasSeenShouldRepatch(false)
         , hasSeenClosure(false)
         , callType(None)
     {
@@ -79,8 +79,8 @@
     JITWriteBarrier<JSFunction> callee;
     WriteBarrier<JSFunction> lastSeenCallee;
     RefPtr<ClosureCallStubRoutine> stub;
+    bool isFTL : 1;
     bool hasSeenShouldRepatch : 1;
-    bool isDFG : 1;
     bool hasSeenClosure : 1;
     unsigned callType : 5; // CallType
     unsigned calleeGPR : 8;
diff --git a/Source/JavaScriptCore/bytecode/CallLinkStatus.cpp b/Source/JavaScriptCore/bytecode/CallLinkStatus.cpp
index b64c967..d46a68f 100644
--- a/Source/JavaScriptCore/bytecode/CallLinkStatus.cpp
+++ b/Source/JavaScriptCore/bytecode/CallLinkStatus.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2012, 2013 Apple Inc. All rights reserved.
+ * Copyright (C) 2012, 2013, 2014 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -26,13 +26,17 @@
 #include "config.h"
 #include "CallLinkStatus.h"
 
+#include "CallLinkInfo.h"
 #include "CodeBlock.h"
+#include "DFGJITCode.h"
 #include "LLIntCallLinkInfo.h"
 #include "Operations.h"
 #include <wtf/CommaPrinter.h>
 
 namespace JSC {
 
+static const bool verbose = false;
+
 CallLinkStatus::CallLinkStatus(JSValue value)
     : m_callTarget(value)
     , m_executable(0)
@@ -81,12 +85,28 @@
     return m_executable->intrinsicFor(kind);
 }
 
-CallLinkStatus CallLinkStatus::computeFromLLInt(CodeBlock* profiledBlock, unsigned bytecodeIndex)
+CallLinkStatus CallLinkStatus::computeFromLLInt(const ConcurrentJITLocker& locker, CodeBlock* profiledBlock, unsigned bytecodeIndex)
 {
     UNUSED_PARAM(profiledBlock);
     UNUSED_PARAM(bytecodeIndex);
 #if ENABLE(LLINT)
+#if ENABLE(DFG_JIT)
+    if (profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadFunction))) {
+        // We could force this to be a closure call, but instead we'll just assume that it
+        // takes slow path.
+        return takesSlowPath();
+    }
+#else
+    UNUSED_PARAM(locker);
+#endif
+
+    VM& vm = *profiledBlock->vm();
+    
     Instruction* instruction = profiledBlock->instructions().begin() + bytecodeIndex;
+    OpcodeID op = vm.interpreter->getOpcodeID(instruction[0].u.opcode);
+    if (op != op_call && op != op_construct)
+        return CallLinkStatus();
+    
     LLIntCallLinkInfo* callLinkInfo = instruction[5].u.callLinkInfo;
     
     return CallLinkStatus(callLinkInfo->lastSeenCallee.get());
@@ -102,27 +122,121 @@
     UNUSED_PARAM(profiledBlock);
     UNUSED_PARAM(bytecodeIndex);
 #if ENABLE(JIT)
+    if (profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadCache))
+        || profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadCacheWatchpoint))
+        || profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadExecutable)))
+        return takesSlowPath();
+    
     if (!profiledBlock->hasBaselineJITProfiling())
-        return computeFromLLInt(profiledBlock, bytecodeIndex);
+        return computeFromLLInt(locker, profiledBlock, bytecodeIndex);
     
     if (profiledBlock->couldTakeSlowCase(bytecodeIndex))
-        return CallLinkStatus::takesSlowPath();
+        return takesSlowPath();
     
     CallLinkInfo& callLinkInfo = profiledBlock->getCallLinkInfo(bytecodeIndex);
+    
+    CallLinkStatus result = computeFor(locker, callLinkInfo);
+    if (!result)
+        return computeFromLLInt(locker, profiledBlock, bytecodeIndex);
+    
+    if (profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadFunction)))
+        result.makeClosureCall();
+    
+    return result;
+#else
+    return CallLinkStatus();
+#endif
+}
+
+#if ENABLE(JIT)
+CallLinkStatus CallLinkStatus::computeFor(const ConcurrentJITLocker&, CallLinkInfo& callLinkInfo)
+{
     if (callLinkInfo.stub)
         return CallLinkStatus(callLinkInfo.stub->executable(), callLinkInfo.stub->structure());
     
     JSFunction* target = callLinkInfo.lastSeenCallee.get();
     if (!target)
-        return computeFromLLInt(profiledBlock, bytecodeIndex);
+        return CallLinkStatus();
     
     if (callLinkInfo.hasSeenClosure)
         return CallLinkStatus(target->executable(), target->structure());
 
     return CallLinkStatus(target);
-#else
-    return CallLinkStatus();
+}
 #endif
+
+void CallLinkStatus::computeDFGStatuses(
+    CodeBlock* dfgCodeBlock, CallLinkStatus::ContextMap& map)
+{
+#if ENABLE(DFG_JIT)
+    RELEASE_ASSERT(dfgCodeBlock->jitType() == JITCode::DFGJIT);
+    CodeBlock* baselineCodeBlock = dfgCodeBlock->alternative();
+    DFG::JITCode* jitCode = dfgCodeBlock->jitCode()->dfg();
+    RELEASE_ASSERT(dfgCodeBlock->numberOfCallLinkInfos() <= jitCode->slowPathCalls.size());
+    
+    for (size_t i = dfgCodeBlock->numberOfCallLinkInfos(); i--;) {
+        CallLinkInfo& info = dfgCodeBlock->callLinkInfo(i);
+        CodeOrigin codeOrigin = info.codeOrigin;
+        
+        bool takeSlowPath;
+        bool badFunction;
+        
+        // Check if we had already previously made a terrible mistake in the FTL for this
+        // code origin. Note that this is approximate because we could have a monovariant
+        // inline in the FTL that ended up failing. We should fix that at some point by
+        // having data structures to track the context of frequent exits. This is currently
+        // challenging because it would require creating a CodeOrigin-based database in
+        // baseline CodeBlocks, but those CodeBlocks don't really have a place to put the
+        // InlineCallFrames.
+        CodeBlock* currentBaseline =
+            baselineCodeBlockForOriginAndBaselineCodeBlock(codeOrigin, baselineCodeBlock);
+        {
+            ConcurrentJITLocker locker(currentBaseline->m_lock);
+            takeSlowPath =
+                currentBaseline->hasExitSite(locker, DFG::FrequentExitSite(codeOrigin.bytecodeIndex, BadCache, ExitFromFTL))
+                || currentBaseline->hasExitSite(locker, DFG::FrequentExitSite(codeOrigin.bytecodeIndex, BadCacheWatchpoint, ExitFromFTL))
+                || currentBaseline->hasExitSite(locker, DFG::FrequentExitSite(codeOrigin.bytecodeIndex, BadExecutable, ExitFromFTL));
+            badFunction =
+                currentBaseline->hasExitSite(locker, DFG::FrequentExitSite(codeOrigin.bytecodeIndex, BadFunction, ExitFromFTL));
+        }
+        
+        {
+            ConcurrentJITLocker locker(dfgCodeBlock->m_lock);
+            if (takeSlowPath || jitCode->slowPathCalls[i] >= Options::couldTakeSlowCaseMinimumCount())
+                map.add(info.codeOrigin, takesSlowPath());
+            else {
+                CallLinkStatus status = computeFor(locker, info);
+                if (status.isSet()) {
+                    if (badFunction)
+                        status.makeClosureCall();
+                    map.add(info.codeOrigin, status);
+                }
+            }
+        }
+    }
+#else
+    UNUSED_PARAM(dfgCodeBlock);
+#endif // ENABLE(DFG_JIT)
+    
+    if (verbose) {
+        dataLog("Context map:\n");
+        ContextMap::iterator iter = map.begin();
+        ContextMap::iterator end = map.end();
+        for (; iter != end; ++iter) {
+            dataLog("    ", iter->key, ":\n");
+            dataLog("        ", iter->value, "\n");
+        }
+    }
+}
+
+CallLinkStatus CallLinkStatus::computeFor(
+    CodeBlock* profiledBlock, CodeOrigin codeOrigin, const CallLinkStatus::ContextMap& map)
+{
+    ContextMap::const_iterator iter = map.find(codeOrigin);
+    if (iter != map.end())
+        return iter->value;
+    
+    return computeFor(profiledBlock, codeOrigin.bytecodeIndex);
 }
 
 void CallLinkStatus::dump(PrintStream& out) const
diff --git a/Source/JavaScriptCore/bytecode/CallLinkStatus.h b/Source/JavaScriptCore/bytecode/CallLinkStatus.h
index 51965fe..4bdc985 100644
--- a/Source/JavaScriptCore/bytecode/CallLinkStatus.h
+++ b/Source/JavaScriptCore/bytecode/CallLinkStatus.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2012, 2013 Apple Inc. All rights reserved.
+ * Copyright (C) 2012, 2013, 2014 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -26,7 +26,9 @@
 #ifndef CallLinkStatus_h
 #define CallLinkStatus_h
 
+#include "CodeOrigin.h"
 #include "CodeSpecializationKind.h"
+#include "ConcurrentJITLock.h"
 #include "Intrinsic.h"
 #include "JSCJSValue.h"
 
@@ -37,6 +39,7 @@
 class InternalFunction;
 class JSFunction;
 class Structure;
+struct CallLinkInfo;
 
 class CallLinkStatus {
 public:
@@ -73,32 +76,22 @@
     }
     
     static CallLinkStatus computeFor(CodeBlock*, unsigned bytecodeIndex);
+
+#if ENABLE(JIT)
+    // Computes the status assuming that we never took slow path and never previously
+    // exited.
+    static CallLinkStatus computeFor(const ConcurrentJITLocker&, CallLinkInfo&);
+#endif
     
-    CallLinkStatus& setHasBadFunctionExitSite(bool didHaveExitSite)
-    {
-        ASSERT(!m_isProved);
-        if (didHaveExitSite) {
-            // Turn this into a closure call.
-            m_callTarget = JSValue();
-        }
-        return *this;
-    }
+    typedef HashMap<CodeOrigin, CallLinkStatus, CodeOriginApproximateHash> ContextMap;
     
-    CallLinkStatus& setHasBadCacheExitSite(bool didHaveExitSite)
-    {
-        ASSERT(!m_isProved);
-        if (didHaveExitSite)
-            *this = takesSlowPath();
-        return *this;
-    }
+    // Computes all of the statuses of the DFG code block. Doesn't include statuses that had
+    // no information. Currently we use this when compiling FTL code, to enable polyvariant
+    // inlining.
+    static void computeDFGStatuses(CodeBlock* dfgCodeBlock, ContextMap&);
     
-    CallLinkStatus& setHasBadExecutableExitSite(bool didHaveExitSite)
-    {
-        ASSERT(!m_isProved);
-        if (didHaveExitSite)
-            *this = takesSlowPath();
-        return *this;
-    }
+    // Helper that first consults the ContextMap and then does computeFor().
+    static CallLinkStatus computeFor(CodeBlock*, CodeOrigin, const ContextMap&);
     
     bool isSet() const { return m_callTarget || m_executable || m_couldTakeSlowPath; }
     
@@ -119,7 +112,14 @@
     void dump(PrintStream&) const;
     
 private:
-    static CallLinkStatus computeFromLLInt(CodeBlock*, unsigned bytecodeIndex);
+    void makeClosureCall()
+    {
+        ASSERT(!m_isProved);
+        // Turn this into a closure call.
+        m_callTarget = JSValue();
+    }
+    
+    static CallLinkStatus computeFromLLInt(const ConcurrentJITLocker&, CodeBlock*, unsigned bytecodeIndex);
     
     JSValue m_callTarget;
     ExecutableBase* m_executable;
diff --git a/Source/JavaScriptCore/bytecode/CodeBlock.cpp b/Source/JavaScriptCore/bytecode/CodeBlock.cpp
index 0ad23c4..263ec7c 100644
--- a/Source/JavaScriptCore/bytecode/CodeBlock.cpp
+++ b/Source/JavaScriptCore/bytecode/CodeBlock.cpp
@@ -129,10 +129,15 @@
 
 void CodeBlock::dumpAssumingJITType(PrintStream& out, JITCode::JITType jitType) const
 {
+    out.print(inferredName(), "#");
     if (hasHash() || isSafeToComputeHash())
-        out.print(inferredName(), "#", hash(), ":[", RawPointer(this), "->", RawPointer(ownerExecutable()), ", ", jitType, codeType());
+        out.print(hash());
     else
-        out.print(inferredName(), "#<no-hash>:[", RawPointer(this), "->", RawPointer(ownerExecutable()), ", ", jitType, codeType());
+        out.print("<no-hash>");
+    out.print(":[", RawPointer(this), "->");
+    if (!!m_alternative)
+        out.print(RawPointer(m_alternative.get()), "->");
+    out.print(RawPointer(ownerExecutable()), ", ", jitType, codeType());
 
     if (codeType() == FunctionCode)
         out.print(specializationKind());
@@ -143,6 +148,10 @@
         out.print(" (NeverInline)");
     if (ownerExecutable()->isStrictMode())
         out.print(" (StrictMode)");
+    if (this->jitType() == JITCode::BaselineJIT && m_didFailFTLCompilation)
+        out.print(" (FTLFail)");
+    if (this->jitType() == JITCode::BaselineJIT && m_hasBeenCompiledWithFTL)
+        out.print(" (HadFTLReplacement)");
     out.print("]");
 }
 
@@ -475,8 +484,8 @@
             if (target)
                 out.printf(" jit(%p, exec %p)", target, target->executable());
         }
-#endif
         out.print(" status(", CallLinkStatus::computeFor(this, location), ")");
+#endif
     }
     ++it;
     dumpArrayProfiling(out, it, hasPrintedProfiling);
@@ -526,12 +535,7 @@
     out.printf("\n");
     
     StubInfoMap stubInfos;
-#if ENABLE(JIT)
-    {
-        ConcurrentJITLocker locker(m_lock);
-        getStubInfoMap(locker, stubInfos);
-    }
-#endif
+    getStubInfoMap(stubInfos);
     
     const Instruction* begin = instructions().begin();
     const Instruction* end = instructions().end();
@@ -1469,6 +1473,7 @@
     , m_isConstructor(other.m_isConstructor)
     , m_shouldAlwaysBeInlined(true)
     , m_didFailFTLCompilation(false)
+    , m_hasBeenCompiledWithFTL(false)
     , m_unlinkedCode(*other.m_vm, other.m_ownerExecutable.get(), other.m_unlinkedCode.get())
     , m_steppingMode(SteppingModeDisabled)
     , m_numBreakpoints(0)
@@ -1525,6 +1530,7 @@
     , m_isConstructor(unlinkedCodeBlock->isConstructor())
     , m_shouldAlwaysBeInlined(true)
     , m_didFailFTLCompilation(false)
+    , m_hasBeenCompiledWithFTL(false)
     , m_unlinkedCode(m_globalObject->vm(), ownerExecutable, unlinkedCodeBlock)
     , m_steppingMode(SteppingModeDisabled)
     , m_numBreakpoints(0)
@@ -1854,23 +1860,13 @@
 
     // If the concurrent thread will want the code block's hash, then compute it here
     // synchronously.
-    if (Options::showDisassembly()
-        || Options::showDFGDisassembly()
-        || Options::dumpBytecodeAtDFGTime()
-        || Options::dumpGraphAtEachPhase()
-        || Options::verboseCompilation()
-        || Options::logCompilationChanges()
-        || Options::validateGraph()
-        || Options::validateGraphAtEachPhase()
-        || Options::verboseOSR()
-        || Options::verboseCompilationQueue()
-        || Options::reportCompileTimes()
-        || Options::verboseCFA())
+    if (Options::alwaysComputeHash())
         hash();
 
     if (Options::dumpGeneratedBytecodes())
         dumpBytecode();
 
+    
     m_heap->m_codeBlocks.add(this);
     m_heap->reportExtraMemoryCost(sizeof(CodeBlock) + m_instructions.size() * sizeof(Instruction));
 }
@@ -2334,6 +2330,21 @@
 #endif
 }
 
+void CodeBlock::getStubInfoMap(const ConcurrentJITLocker&, StubInfoMap& result)
+{
+#if ENABLE(JIT)
+    toHashMap(m_stubInfos, getStructureStubInfoCodeOrigin, result);
+#else
+    UNUSED_PARAM(result);
+#endif
+}
+
+void CodeBlock::getStubInfoMap(StubInfoMap& result)
+{
+    ConcurrentJITLocker locker(m_lock);
+    getStubInfoMap(locker, result);
+}
+
 #if ENABLE(JIT)
 StructureStubInfo* CodeBlock::addStubInfo()
 {
@@ -2341,11 +2352,6 @@
     return m_stubInfos.add();
 }
 
-void CodeBlock::getStubInfoMap(const ConcurrentJITLocker&, StubInfoMap& result)
-{
-    toHashMap(m_stubInfos, getStructureStubInfoCodeOrigin, result);
-}
-
 void CodeBlock::resetStub(StructureStubInfo& stubInfo)
 {
     if (stubInfo.accessType == access_unset)
@@ -2899,6 +2905,11 @@
 unsigned CodeBlock::numberOfDFGCompiles()
 {
     ASSERT(JITCode::isBaselineCode(jitType()));
+    if (Options::testTheFTL()) {
+        if (m_didFailFTLCompilation)
+            return 1000000;
+        return (m_hasBeenCompiledWithFTL ? 1 : 0) + m_reoptimizationRetryCounter;
+    }
     return (JITCode::isOptimizingJIT(replacement()->jitType()) ? 1 : 0) + m_reoptimizationRetryCounter;
 }
 
@@ -3010,7 +3021,7 @@
 bool CodeBlock::checkIfOptimizationThresholdReached()
 {
 #if ENABLE(DFG_JIT)
-    if (DFG::Worklist* worklist = m_vm->worklist.get()) {
+    if (DFG::Worklist* worklist = DFG::existingGlobalDFGWorklistOrNull()) {
         if (worklist->compilationState(DFG::CompilationKey(this, DFG::DFGMode))
             == DFG::Worklist::Compiled) {
             optimizeNextInvocation();
@@ -3341,6 +3352,11 @@
     }
 }
 
+int CodeBlock::stackPointerOffset()
+{
+    return virtualRegisterForLocal(frameRegisterCount() - 1).offset();
+}
+
 size_t CodeBlock::predictedMachineCodeSize()
 {
     // This will be called from CodeBlock::CodeBlock before either m_vm or the
diff --git a/Source/JavaScriptCore/bytecode/CodeBlock.h b/Source/JavaScriptCore/bytecode/CodeBlock.h
index 0d98680..68bc037 100644
--- a/Source/JavaScriptCore/bytecode/CodeBlock.h
+++ b/Source/JavaScriptCore/bytecode/CodeBlock.h
@@ -175,6 +175,9 @@
     void expressionRangeForBytecodeOffset(unsigned bytecodeOffset, int& divot,
                                           int& startOffset, int& endOffset, unsigned& line, unsigned& column);
 
+    void getStubInfoMap(const ConcurrentJITLocker&, StubInfoMap& result);
+    void getStubInfoMap(StubInfoMap& result);
+
 #if ENABLE(JIT)
     StructureStubInfo* addStubInfo();
     Bag<StructureStubInfo>::iterator begin() { return m_stubInfos.begin(); }
@@ -182,8 +185,6 @@
 
     void resetStub(StructureStubInfo&);
     
-    void getStubInfoMap(const ConcurrentJITLocker&, StubInfoMap& result);
-
     ByValInfo& getByValInfo(unsigned bytecodeIndex)
     {
         return *(binarySearch<ByValInfo, unsigned>(m_byValInfos, m_byValInfos.size(), bytecodeIndex, getByValInfoBytecodeIndex));
@@ -256,17 +257,15 @@
     // Exactly equivalent to codeBlock->ownerExecutable()->newReplacementCodeBlockFor(codeBlock->specializationKind())
     PassRefPtr<CodeBlock> newReplacement();
     
-    void setJITCode(PassRefPtr<JITCode> code, MacroAssemblerCodePtr codeWithArityCheck)
+    void setJITCode(PassRefPtr<JITCode> code)
     {
         ASSERT(m_heap->isDeferred());
         m_heap->reportExtraMemoryCost(code->size());
         ConcurrentJITLocker locker(m_lock);
         WTF::storeStoreFence(); // This is probably not needed because the lock will also do something similar, but it's good to be paranoid.
         m_jitCode = code;
-        m_jitCodeWithArityCheck = codeWithArityCheck;
     }
     PassRefPtr<JITCode> jitCode() { return m_jitCode; }
-    MacroAssemblerCodePtr jitCodeWithArityCheck() { return m_jitCodeWithArityCheck; }
     JITCode::JITType jitType() const
     {
         JITCode* jitCode = m_jitCode.get();
@@ -489,8 +488,8 @@
     RareCaseProfile* specialFastCaseProfileForBytecodeOffset(int bytecodeOffset)
     {
         return tryBinarySearch<RareCaseProfile, int>(
-                                                     m_specialFastCaseProfiles, m_specialFastCaseProfiles.size(), bytecodeOffset,
-                                                     getRareCaseProfileBytecodeOffset);
+            m_specialFastCaseProfiles, m_specialFastCaseProfiles.size(), bytecodeOffset,
+            getRareCaseProfileBytecodeOffset);
     }
 
     bool likelyToTakeSpecialFastCase(int bytecodeOffset)
@@ -576,11 +575,15 @@
         ConcurrentJITLocker locker(m_lock);
         return m_exitProfile.add(locker, site);
     }
-        
+
+    bool hasExitSite(const ConcurrentJITLocker& locker, const DFG::FrequentExitSite& site) const
+    {
+        return m_exitProfile.hasExitSite(locker, site);
+    }
     bool hasExitSite(const DFG::FrequentExitSite& site) const
     {
         ConcurrentJITLocker locker(m_lock);
-        return m_exitProfile.hasExitSite(locker, site);
+        return hasExitSite(locker, site);
     }
 
     DFG::ExitProfile& exitProfile() { return m_exitProfile; }
@@ -589,11 +592,6 @@
     {
         return m_lazyOperandValueProfiles;
     }
-#else // ENABLE(DFG_JIT)
-    bool addFrequentExitSite(const DFG::FrequentExitSite&)
-    {
-        return false;
-    }
 #endif // ENABLE(DFG_JIT)
 
     // Constant Pool
@@ -679,9 +677,19 @@
 
     BytecodeLivenessAnalysis& livenessAnalysis()
     {
-        if (!m_livenessAnalysis)
-            m_livenessAnalysis = std::make_unique<BytecodeLivenessAnalysis>(this);
-        return *m_livenessAnalysis;
+        {
+            ConcurrentJITLocker locker(m_lock);
+            if (!!m_livenessAnalysis)
+                return *m_livenessAnalysis;
+        }
+        std::unique_ptr<BytecodeLivenessAnalysis> analysis =
+            std::make_unique<BytecodeLivenessAnalysis>(this);
+        {
+            ConcurrentJITLocker locker(m_lock);
+            if (!m_livenessAnalysis)
+                m_livenessAnalysis = std::move(analysis);
+            return *m_livenessAnalysis;
+        }
     }
     
     void validate();
@@ -866,6 +874,7 @@
     void updateAllPredictions();
 
     unsigned frameRegisterCount();
+    int stackPointerOffset();
 
     bool hasOpDebugForLineAndColumn(unsigned line, unsigned column);
 
@@ -891,7 +900,7 @@
 
     int m_numCalleeRegisters;
     int m_numVars;
-    bool m_isConstructor;
+    bool m_isConstructor : 1;
     
     // This is intentionally public; it's the responsibility of anyone doing any
     // of the following to hold the lock:
@@ -911,10 +920,11 @@
     // concurrent compilation threads finish what they're doing.
     mutable ConcurrentJITLock m_lock;
     
-    bool m_shouldAlwaysBeInlined;
-    bool m_allTransitionsHaveBeenMarked; // Initialized and used on every GC.
+    bool m_shouldAlwaysBeInlined; // Not a bitfield because the JIT wants to store to it.
+    bool m_allTransitionsHaveBeenMarked : 1; // Initialized and used on every GC.
     
-    bool m_didFailFTLCompilation;
+    bool m_didFailFTLCompilation : 1;
+    bool m_hasBeenCompiledWithFTL : 1;
 
     // Internal methods for use by validation code. It would be private if it wasn't
     // for the fact that we use it from anonymous namespaces.
@@ -1054,7 +1064,6 @@
     SentinelLinkedList<LLIntCallLinkInfo, BasicRawSentinelNode<LLIntCallLinkInfo>> m_incomingLLIntCalls;
 #endif
     RefPtr<JITCode> m_jitCode;
-    MacroAssemblerCodePtr m_jitCodeWithArityCheck;
 #if ENABLE(JIT)
     Bag<StructureStubInfo> m_stubInfos;
     Vector<ByValInfo> m_byValInfos;
diff --git a/Source/JavaScriptCore/bytecode/CodeOrigin.cpp b/Source/JavaScriptCore/bytecode/CodeOrigin.cpp
index 39b83fe..68dc6b0 100644
--- a/Source/JavaScriptCore/bytecode/CodeOrigin.cpp
+++ b/Source/JavaScriptCore/bytecode/CodeOrigin.cpp
@@ -45,7 +45,64 @@
 {
     return inlineDepthForCallFrame(inlineCallFrame);
 }
+
+bool CodeOrigin::isApproximatelyEqualTo(const CodeOrigin& other) const
+{
+    CodeOrigin a = *this;
+    CodeOrigin b = other;
+
+    if (!a.isSet())
+        return !b.isSet();
+    if (!b.isSet())
+        return false;
     
+    if (a.isHashTableDeletedValue())
+        return b.isHashTableDeletedValue();
+    if (b.isHashTableDeletedValue())
+        return false;
+    
+    for (;;) {
+        ASSERT(a.isSet());
+        ASSERT(b.isSet());
+        
+        if (a.bytecodeIndex != b.bytecodeIndex)
+            return false;
+        
+        if ((!!a.inlineCallFrame) != (!!b.inlineCallFrame))
+            return false;
+        
+        if (!a.inlineCallFrame)
+            return true;
+        
+        if (a.inlineCallFrame->executable != b.inlineCallFrame->executable)
+            return false;
+        
+        a = a.inlineCallFrame->caller;
+        b = b.inlineCallFrame->caller;
+    }
+}
+
+unsigned CodeOrigin::approximateHash() const
+{
+    if (!isSet())
+        return 0;
+    if (isHashTableDeletedValue())
+        return 1;
+    
+    unsigned result = 2;
+    CodeOrigin codeOrigin = *this;
+    for (;;) {
+        result += codeOrigin.bytecodeIndex;
+        
+        if (!codeOrigin.inlineCallFrame)
+            return result;
+        
+        result += WTF::PtrHash<JSCell*>::hash(codeOrigin.inlineCallFrame->executable.get());
+        
+        codeOrigin = codeOrigin.inlineCallFrame->caller;
+    }
+}
+
 Vector<CodeOrigin> CodeOrigin::inlineStack() const
 {
     Vector<CodeOrigin> result(inlineDepth());
diff --git a/Source/JavaScriptCore/bytecode/CodeOrigin.h b/Source/JavaScriptCore/bytecode/CodeOrigin.h
index ed660c2..c525179 100644
--- a/Source/JavaScriptCore/bytecode/CodeOrigin.h
+++ b/Source/JavaScriptCore/bytecode/CodeOrigin.h
@@ -63,7 +63,7 @@
     
     CodeOrigin(WTF::HashTableDeletedValueType)
         : bytecodeIndex(invalidBytecodeIndex)
-        , inlineCallFrame(bitwise_cast<InlineCallFrame*>(static_cast<uintptr_t>(1)))
+        , inlineCallFrame(deletedMarker())
     {
     }
     
@@ -97,15 +97,27 @@
     bool operator==(const CodeOrigin& other) const;
     bool operator!=(const CodeOrigin& other) const { return !(*this == other); }
     
+    // This checks if the two code origins correspond to the same stack trace snippets,
+    // but ignore whether the InlineCallFrame's are identical.
+    bool isApproximatelyEqualTo(const CodeOrigin& other) const;
+    
+    unsigned approximateHash() const;
+    
     // Get the inline stack. This is slow, and is intended for debugging only.
     Vector<CodeOrigin> inlineStack() const;
     
     void dump(PrintStream&) const;
     void dumpInContext(PrintStream&, DumpContext*) const;
+
+private:
+    static InlineCallFrame* deletedMarker()
+    {
+        return bitwise_cast<InlineCallFrame*>(static_cast<uintptr_t>(1));
+    }
 };
 
 struct InlineCallFrame {
-    Vector<ValueRecovery> arguments;
+    Vector<ValueRecovery> arguments; // Includes 'this'.
     WriteBarrier<ScriptExecutable> executable;
     ValueRecovery calleeRecovery;
     CodeOrigin caller;
@@ -185,6 +197,12 @@
     static const bool safeToCompareToEmptyOrDeleted = true;
 };
 
+struct CodeOriginApproximateHash {
+    static unsigned hash(const CodeOrigin& key) { return key.approximateHash(); }
+    static bool equal(const CodeOrigin& a, const CodeOrigin& b) { return a.isApproximatelyEqualTo(b); }
+    static const bool safeToCompareToEmptyOrDeleted = true;
+};
+
 } // namespace JSC
 
 namespace WTF {
diff --git a/Source/JavaScriptCore/bytecode/DFGExitProfile.cpp b/Source/JavaScriptCore/bytecode/DFGExitProfile.cpp
index 5d05bbb..73ba88c 100644
--- a/Source/JavaScriptCore/bytecode/DFGExitProfile.cpp
+++ b/Source/JavaScriptCore/bytecode/DFGExitProfile.cpp
@@ -37,6 +37,8 @@
 
 bool ExitProfile::add(const ConcurrentJITLocker&, const FrequentExitSite& site)
 {
+    ASSERT(site.jitType() != ExitFromAnything);
+    
     // If we've never seen any frequent exits then create the list and put this site
     // into it.
     if (!m_frequentExitSites) {
@@ -78,7 +80,7 @@
         return false;
     
     for (unsigned i = m_frequentExitSites->size(); i--;) {
-        if (m_frequentExitSites->at(i) == site)
+        if (site.subsumes(m_frequentExitSites->at(i)))
             return true;
     }
     return false;
diff --git a/Source/JavaScriptCore/bytecode/DFGExitProfile.h b/Source/JavaScriptCore/bytecode/DFGExitProfile.h
index ab1a60d..8e0df41 100644
--- a/Source/JavaScriptCore/bytecode/DFGExitProfile.h
+++ b/Source/JavaScriptCore/bytecode/DFGExitProfile.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2011, 2012, 2013 Apple Inc. All rights reserved.
+ * Copyright (C) 2011, 2012, 2013, 2014 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -26,8 +26,11 @@
 #ifndef DFGExitProfile_h
 #define DFGExitProfile_h
 
+#if ENABLE(DFG_JIT)
+
 #include "ConcurrentJITLock.h"
 #include "ExitKind.h"
+#include "ExitingJITType.h"
 #include <wtf/HashSet.h>
 #include <wtf/OwnPtr.h>
 #include <wtf/Vector.h>
@@ -39,18 +42,21 @@
     FrequentExitSite()
         : m_bytecodeOffset(0) // 0 = empty value
         , m_kind(ExitKindUnset)
+        , m_jitType(ExitFromAnything)
     {
     }
     
     FrequentExitSite(WTF::HashTableDeletedValueType)
         : m_bytecodeOffset(1) // 1 = deleted value
         , m_kind(ExitKindUnset)
+        , m_jitType(ExitFromAnything)
     {
     }
     
-    explicit FrequentExitSite(unsigned bytecodeOffset, ExitKind kind)
+    explicit FrequentExitSite(unsigned bytecodeOffset, ExitKind kind, ExitingJITType jitType = ExitFromAnything)
         : m_bytecodeOffset(bytecodeOffset)
         , m_kind(kind)
+        , m_jitType(jitType)
     {
         if (m_kind == ArgumentsEscaped) {
             // Count this one globally. It doesn't matter where in the code block the arguments excaped;
@@ -61,9 +67,10 @@
     
     // Use this constructor if you wish for the exit site to be counted globally within its
     // code block.
-    explicit FrequentExitSite(ExitKind kind)
+    explicit FrequentExitSite(ExitKind kind, ExitingJITType jitType = ExitFromAnything)
         : m_bytecodeOffset(0)
         , m_kind(kind)
+        , m_jitType(jitType)
     {
     }
     
@@ -75,16 +82,36 @@
     bool operator==(const FrequentExitSite& other) const
     {
         return m_bytecodeOffset == other.m_bytecodeOffset
-            && m_kind == other.m_kind;
+            && m_kind == other.m_kind
+            && m_jitType == other.m_jitType;
+    }
+    
+    bool subsumes(const FrequentExitSite& other) const
+    {
+        if (m_bytecodeOffset != other.m_bytecodeOffset)
+            return false;
+        if (m_kind != other.m_kind)
+            return false;
+        if (m_jitType == ExitFromAnything)
+            return true;
+        return m_jitType == other.m_jitType;
     }
     
     unsigned hash() const
     {
-        return WTF::intHash(m_bytecodeOffset) + m_kind;
+        return WTF::intHash(m_bytecodeOffset) + m_kind + m_jitType * 7;
     }
     
     unsigned bytecodeOffset() const { return m_bytecodeOffset; }
     ExitKind kind() const { return m_kind; }
+    ExitingJITType jitType() const { return m_jitType; }
+    
+    FrequentExitSite withJITType(ExitingJITType jitType) const
+    {
+        FrequentExitSite result = *this;
+        result.m_jitType = jitType;
+        return result;
+    }
 
     bool isHashTableDeletedValue() const
     {
@@ -94,6 +121,7 @@
 private:
     unsigned m_bytecodeOffset;
     ExitKind m_kind;
+    ExitingJITType m_jitType;
 };
 
 struct FrequentExitSiteHash {
@@ -104,6 +132,7 @@
 
 } } // namespace JSC::DFG
 
+
 namespace WTF {
 
 template<typename T> struct DefaultHash;
@@ -166,6 +195,10 @@
 
     bool hasExitSite(const FrequentExitSite& site) const
     {
+        if (site.jitType() == ExitFromAnything) {
+            return hasExitSite(site.withJITType(ExitFromDFG))
+                || hasExitSite(site.withJITType(ExitFromFTL));
+        }
         return m_frequentExitSites.find(site) != m_frequentExitSites.end();
     }
     
@@ -184,4 +217,6 @@
 
 } } // namespace JSC::DFG
 
+#endif // ENABLE(DFG_JIT)
+
 #endif // DFGExitProfile_h
diff --git a/Source/JavaScriptCore/bytecode/ExitKind.h b/Source/JavaScriptCore/bytecode/ExitKind.h
index a9f6df6..6ac78a2 100644
--- a/Source/JavaScriptCore/bytecode/ExitKind.h
+++ b/Source/JavaScriptCore/bytecode/ExitKind.h
@@ -28,7 +28,7 @@
 
 namespace JSC {
 
-enum ExitKind {
+enum ExitKind : uint8_t {
     ExitKindUnset,
     BadType, // We exited because a type prediction was wrong.
     BadFunction, // We exited because we made an incorrect assumption about what function we would see.
diff --git a/Source/JavaScriptCore/bytecode/ExitingJITType.h b/Source/JavaScriptCore/bytecode/ExitingJITType.h
new file mode 100644
index 0000000..fbdc28e
--- /dev/null
+++ b/Source/JavaScriptCore/bytecode/ExitingJITType.h
@@ -0,0 +1,55 @@
+/*
+ * Copyright (C) 2014 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in the
+ *    documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
+ */
+
+#ifndef ExitingJITType_h
+#define ExitingJITType_h
+
+#include "JITCode.h"
+
+namespace JSC {
+
+enum ExitingJITType : uint8_t {
+    ExitFromAnything,
+    ExitFromDFG,
+    ExitFromFTL
+};
+
+inline ExitingJITType exitingJITTypeFor(JITCode::JITType type)
+{
+    switch (type) {
+    case JITCode::DFGJIT:
+        return ExitFromDFG;
+    case JITCode::FTLJIT:
+        return ExitFromFTL;
+    default:
+        RELEASE_ASSERT_NOT_REACHED();
+        return ExitFromAnything;
+    }
+}
+
+} // namespace JSC
+
+#endif // ExitingJITType_h
+
diff --git a/Source/JavaScriptCore/bytecode/GetByIdStatus.cpp b/Source/JavaScriptCore/bytecode/GetByIdStatus.cpp
index fbb3da1..a3c4fbf 100644
--- a/Source/JavaScriptCore/bytecode/GetByIdStatus.cpp
+++ b/Source/JavaScriptCore/bytecode/GetByIdStatus.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2012, 2013 Apple Inc. All rights reserved.
+ * Copyright (C) 2012, 2013, 2014 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -34,6 +34,16 @@
 
 namespace JSC {
 
+#if ENABLE(JIT)
+bool GetByIdStatus::hasExitSite(const ConcurrentJITLocker& locker, CodeBlock* profiledBlock, unsigned bytecodeIndex, ExitingJITType jitType)
+{
+    return profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadCache, jitType))
+        || profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadCacheWatchpoint, jitType))
+        || profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadWeakConstantCache, jitType))
+        || profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadWeakConstantCacheWatchpoint, jitType));
+}
+#endif
+
 GetByIdStatus GetByIdStatus::computeFromLLInt(CodeBlock* profiledBlock, unsigned bytecodeIndex, StringImpl* uid)
 {
     UNUSED_PARAM(profiledBlock);
@@ -119,15 +129,35 @@
 GetByIdStatus GetByIdStatus::computeFor(CodeBlock* profiledBlock, StubInfoMap& map, unsigned bytecodeIndex, StringImpl* uid)
 {
     ConcurrentJITLocker locker(profiledBlock->m_lock);
-    
-    UNUSED_PARAM(profiledBlock);
-    UNUSED_PARAM(bytecodeIndex);
-    UNUSED_PARAM(uid);
+
+    GetByIdStatus result;
+
 #if ENABLE(JIT)
-    StructureStubInfo* stubInfo = map.get(CodeOrigin(bytecodeIndex));
-    if (!stubInfo || !stubInfo->seen)
+    result = computeForStubInfo(
+        locker, profiledBlock, map.get(CodeOrigin(bytecodeIndex)), uid);
+    
+    if (!result.takesSlowPath()
+        && (hasExitSite(locker, profiledBlock, bytecodeIndex)
+            || profiledBlock->likelyToTakeSlowCase(bytecodeIndex)))
+        return GetByIdStatus(TakesSlowPath, true);
+#else
+    UNUSED_PARAM(map);
+#endif
+
+    if (!result)
         return computeFromLLInt(profiledBlock, bytecodeIndex, uid);
     
+    return result;
+}
+
+#if ENABLE(JIT)
+GetByIdStatus GetByIdStatus::computeForStubInfo(
+    const ConcurrentJITLocker&, CodeBlock* profiledBlock, StructureStubInfo* stubInfo,
+    StringImpl* uid)
+{
+    if (!stubInfo || !stubInfo->seen)
+        return GetByIdStatus(NoInformation);
+    
     if (stubInfo->resetByGC)
         return GetByIdStatus(TakesSlowPath, true);
 
@@ -152,16 +182,12 @@
             return GetByIdStatus(MakesCalls, true);
     }
     
-    // Next check if it takes slow case, in which case we want to be kind of careful.
-    if (profiledBlock->likelyToTakeSlowCase(bytecodeIndex))
-        return GetByIdStatus(TakesSlowPath, true);
-    
     // Finally figure out if we can derive an access strategy.
     GetByIdStatus result;
     result.m_wasSeenInJIT = true; // This is interesting for bytecode dumping only.
     switch (stubInfo->accessType) {
     case access_unset:
-        return computeFromLLInt(profiledBlock, bytecodeIndex, uid);
+        return GetByIdStatus(NoInformation);
         
     case access_get_by_id_self: {
         Structure* structure = stubInfo->u.getByIdSelf.baseObjectStructure.get();
@@ -261,10 +287,39 @@
         result.m_state = Simple;
     
     return result;
-#else // ENABLE(JIT)
-    UNUSED_PARAM(map);
-    return GetByIdStatus(NoInformation, false);
+}
 #endif // ENABLE(JIT)
+
+GetByIdStatus GetByIdStatus::computeFor(
+    CodeBlock* profiledBlock, CodeBlock* dfgBlock, StubInfoMap& baselineMap,
+    StubInfoMap& dfgMap, CodeOrigin codeOrigin, StringImpl* uid)
+{
+#if ENABLE(DFG_JIT)
+    if (dfgBlock) {
+        GetByIdStatus result;
+        {
+            ConcurrentJITLocker locker(dfgBlock->m_lock);
+            result = computeForStubInfo(locker, dfgBlock, dfgMap.get(codeOrigin), uid);
+        }
+    
+        if (result.takesSlowPath())
+            return result;
+    
+        {
+            ConcurrentJITLocker locker(profiledBlock->m_lock);
+            if (hasExitSite(locker, profiledBlock, codeOrigin.bytecodeIndex, ExitFromFTL))
+                return GetByIdStatus(TakesSlowPath, true);
+        }
+        
+        if (result.isSet())
+            return result;
+    }
+#else
+    UNUSED_PARAM(dfgBlock);
+    UNUSED_PARAM(dfgMap);
+#endif
+
+    return computeFor(profiledBlock, baselineMap, codeOrigin.bytecodeIndex, uid);
 }
 
 GetByIdStatus GetByIdStatus::computeFor(VM& vm, Structure* structure, StringImpl* uid)
diff --git a/Source/JavaScriptCore/bytecode/GetByIdStatus.h b/Source/JavaScriptCore/bytecode/GetByIdStatus.h
index a1e801c..3ebee9e 100644
--- a/Source/JavaScriptCore/bytecode/GetByIdStatus.h
+++ b/Source/JavaScriptCore/bytecode/GetByIdStatus.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2012, 2013 Apple Inc. All rights reserved.
+ * Copyright (C) 2012, 2013, 2014 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -26,6 +26,9 @@
 #ifndef GetByIdStatus_h
 #define GetByIdStatus_h
 
+#include "CodeOrigin.h"
+#include "ConcurrentJITLock.h"
+#include "ExitingJITType.h"
 #include "IntendedStructureChain.h"
 #include "PropertyOffset.h"
 #include "StructureSet.h"
@@ -74,6 +77,8 @@
     static GetByIdStatus computeFor(CodeBlock*, StubInfoMap&, unsigned bytecodeIndex, StringImpl* uid);
     static GetByIdStatus computeFor(VM&, Structure*, StringImpl* uid);
     
+    static GetByIdStatus computeFor(CodeBlock* baselineBlock, CodeBlock* dfgBlock, StubInfoMap& baselineMap, StubInfoMap& dfgMap, CodeOrigin, StringImpl* uid);
+    
     State state() const { return m_state; }
     
     bool isSet() const { return m_state != NoInformation; }
@@ -90,6 +95,10 @@
     bool wasSeenInJIT() const { return m_wasSeenInJIT; }
     
 private:
+#if ENABLE(JIT)
+    static bool hasExitSite(const ConcurrentJITLocker&, CodeBlock*, unsigned bytecodeIndex, ExitingJITType = ExitFromAnything);
+    static GetByIdStatus computeForStubInfo(const ConcurrentJITLocker&, CodeBlock*, StructureStubInfo*, StringImpl* uid);
+#endif
     static void computeForChain(GetByIdStatus& result, CodeBlock*, StringImpl* uid);
     static GetByIdStatus computeFromLLInt(CodeBlock*, unsigned bytecodeIndex, StringImpl* uid);
     
diff --git a/Source/JavaScriptCore/bytecode/ProfiledCodeBlockJettisoningWatchpoint.cpp b/Source/JavaScriptCore/bytecode/ProfiledCodeBlockJettisoningWatchpoint.cpp
index edf8e22..b2730a4 100644
--- a/Source/JavaScriptCore/bytecode/ProfiledCodeBlockJettisoningWatchpoint.cpp
+++ b/Source/JavaScriptCore/bytecode/ProfiledCodeBlockJettisoningWatchpoint.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2013 Apple Inc. All rights reserved.
+ * Copyright (C) 2013, 2014 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -26,6 +26,8 @@
 #include "config.h"
 #include "ProfiledCodeBlockJettisoningWatchpoint.h"
 
+#if ENABLE(DFG_JIT)
+
 #include "CodeBlock.h"
 #include "DFGCommon.h"
 #include "DFGExitProfile.h"
@@ -49,7 +51,9 @@
     
     if (sourceBaselineCodeBlock) {
         sourceBaselineCodeBlock->addFrequentExitSite(
-            DFG::FrequentExitSite(m_codeOrigin.bytecodeIndex, m_exitKind));
+            DFG::FrequentExitSite(
+                m_codeOrigin.bytecodeIndex, m_exitKind,
+                exitingJITTypeFor(m_codeBlock->jitType())));
     }
     
     m_codeBlock->jettison(CountReoptimization);
@@ -60,3 +64,4 @@
 
 } // namespace JSC
 
+#endif // ENABLE(DFG_JIT)
\ No newline at end of file
diff --git a/Source/JavaScriptCore/bytecode/ProfiledCodeBlockJettisoningWatchpoint.h b/Source/JavaScriptCore/bytecode/ProfiledCodeBlockJettisoningWatchpoint.h
index 108e23a..33c32bf 100644
--- a/Source/JavaScriptCore/bytecode/ProfiledCodeBlockJettisoningWatchpoint.h
+++ b/Source/JavaScriptCore/bytecode/ProfiledCodeBlockJettisoningWatchpoint.h
@@ -26,6 +26,8 @@
 #ifndef ProfiledCodeBlockJettisoningWatchpoint_h
 #define ProfiledCodeBlockJettisoningWatchpoint_h
 
+#if ENABLE(DFG_JIT)
+
 #include "CodeOrigin.h"
 #include "ExitKind.h"
 #include "Watchpoint.h"
@@ -61,5 +63,7 @@
 
 } // namespace JSC
 
+#endif // ENABLE(DFG_JIT)
+
 #endif // ProfiledCodeBlockJettisoningWatchpoint_h
 
diff --git a/Source/JavaScriptCore/bytecode/PutByIdStatus.cpp b/Source/JavaScriptCore/bytecode/PutByIdStatus.cpp
index 17cf708..232384f 100644
--- a/Source/JavaScriptCore/bytecode/PutByIdStatus.cpp
+++ b/Source/JavaScriptCore/bytecode/PutByIdStatus.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2012, 2013 Apple Inc. All rights reserved.
+ * Copyright (C) 2012, 2013, 2014 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -35,6 +35,17 @@
 
 namespace JSC {
 
+#if ENABLE(JIT)
+bool PutByIdStatus::hasExitSite(const ConcurrentJITLocker& locker, CodeBlock* profiledBlock, unsigned bytecodeIndex, ExitingJITType exitType)
+{
+    return profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadCache, exitType))
+        || profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadCacheWatchpoint, exitType))
+        || profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadWeakConstantCache, exitType))
+        || profiledBlock->hasExitSite(locker, DFG::FrequentExitSite(bytecodeIndex, BadWeakConstantCacheWatchpoint, exitType));
+    
+}
+#endif
+
 PutByIdStatus PutByIdStatus::computeFromLLInt(CodeBlock* profiledBlock, unsigned bytecodeIndex, StringImpl* uid)
 {
     UNUSED_PARAM(profiledBlock);
@@ -89,13 +100,28 @@
     UNUSED_PARAM(bytecodeIndex);
     UNUSED_PARAM(uid);
 #if ENABLE(JIT)
-    if (profiledBlock->likelyToTakeSlowCase(bytecodeIndex))
+    if (profiledBlock->likelyToTakeSlowCase(bytecodeIndex)
+        || hasExitSite(locker, profiledBlock, bytecodeIndex))
         return PutByIdStatus(TakesSlowPath, 0, 0, 0, invalidOffset);
     
     StructureStubInfo* stubInfo = map.get(CodeOrigin(bytecodeIndex));
-    if (!stubInfo || !stubInfo->seen)
+    PutByIdStatus result = computeForStubInfo(locker, profiledBlock, stubInfo, uid);
+    if (!result)
         return computeFromLLInt(profiledBlock, bytecodeIndex, uid);
     
+    return result;
+#else // ENABLE(JIT)
+    UNUSED_PARAM(map);
+    return PutByIdStatus(NoInformation, 0, 0, 0, invalidOffset);
+#endif // ENABLE(JIT)
+}
+
+#if ENABLE(JIT)
+PutByIdStatus PutByIdStatus::computeForStubInfo(const ConcurrentJITLocker&, CodeBlock* profiledBlock, StructureStubInfo* stubInfo, StringImpl* uid)
+{
+    if (!stubInfo || !stubInfo->seen)
+        return PutByIdStatus();
+    
     if (stubInfo->resetByGC)
         return PutByIdStatus(TakesSlowPath, 0, 0, 0, invalidOffset);
 
@@ -142,10 +168,34 @@
         // we could do about it.
         return PutByIdStatus(TakesSlowPath, 0, 0, 0, invalidOffset);
     }
-#else // ENABLE(JIT)
-    UNUSED_PARAM(map);
-    return PutByIdStatus(NoInformation, 0, 0, 0, invalidOffset);
-#endif // ENABLE(JIT)
+}
+#endif
+
+PutByIdStatus PutByIdStatus::computeFor(CodeBlock* baselineBlock, CodeBlock* dfgBlock, StubInfoMap& baselineMap, StubInfoMap& dfgMap, CodeOrigin codeOrigin, StringImpl* uid)
+{
+#if ENABLE(JIT)
+    if (dfgBlock) {
+        {
+            ConcurrentJITLocker locker(baselineBlock->m_lock);
+            if (hasExitSite(locker, baselineBlock, codeOrigin.bytecodeIndex, ExitFromFTL))
+                return PutByIdStatus(TakesSlowPath);
+        }
+            
+        PutByIdStatus result;
+        {
+            ConcurrentJITLocker locker(dfgBlock->m_lock);
+            result = computeForStubInfo(locker, dfgBlock, dfgMap.get(codeOrigin), uid);
+        }
+        
+        if (result.isSet())
+            return result;
+    }
+#else
+    UNUSED_PARAM(dfgBlock);
+    UNUSED_PARAM(dfgMap);
+#endif
+
+    return computeFor(baselineBlock, baselineMap, codeOrigin.bytecodeIndex, uid);
 }
 
 PutByIdStatus PutByIdStatus::computeFor(VM& vm, JSGlobalObject* globalObject, Structure* structure, StringImpl* uid, bool isDirect)
diff --git a/Source/JavaScriptCore/bytecode/PutByIdStatus.h b/Source/JavaScriptCore/bytecode/PutByIdStatus.h
index c0a1bc3..9d03e4b 100644
--- a/Source/JavaScriptCore/bytecode/PutByIdStatus.h
+++ b/Source/JavaScriptCore/bytecode/PutByIdStatus.h
@@ -26,6 +26,7 @@
 #ifndef PutByIdStatus_h
 #define PutByIdStatus_h
 
+#include "ExitingJITType.h"
 #include "IntendedStructureChain.h"
 #include "PropertyOffset.h"
 #include "StructureStubInfo.h"
@@ -94,6 +95,8 @@
     static PutByIdStatus computeFor(CodeBlock*, StubInfoMap&, unsigned bytecodeIndex, StringImpl* uid);
     static PutByIdStatus computeFor(VM&, JSGlobalObject*, Structure*, StringImpl* uid, bool isDirect);
     
+    static PutByIdStatus computeFor(CodeBlock* baselineBlock, CodeBlock* dfgBlock, StubInfoMap& baselineMap, StubInfoMap& dfgMap, CodeOrigin, StringImpl* uid);
+    
     State state() const { return m_state; }
     
     bool isSet() const { return m_state != NoInformation; }
@@ -108,6 +111,10 @@
     PropertyOffset offset() const { return m_offset; }
     
 private:
+#if ENABLE(JIT)
+    static bool hasExitSite(const ConcurrentJITLocker&, CodeBlock*, unsigned bytecodeIndex, ExitingJITType = ExitFromAnything);
+    static PutByIdStatus computeForStubInfo(const ConcurrentJITLocker&, CodeBlock*, StructureStubInfo*, StringImpl* uid);
+#endif
     static PutByIdStatus computeFromLLInt(CodeBlock*, unsigned bytecodeIndex, StringImpl* uid);
     
     State m_state;
diff --git a/Source/JavaScriptCore/bytecode/StructureSet.h b/Source/JavaScriptCore/bytecode/StructureSet.h
index 4cdcd01..f89684f 100644
--- a/Source/JavaScriptCore/bytecode/StructureSet.h
+++ b/Source/JavaScriptCore/bytecode/StructureSet.h
@@ -45,6 +45,7 @@
     
     StructureSet(Structure* structure)
     {
+        ASSERT(structure);
         m_structures.append(structure);
     }
     
@@ -55,6 +56,7 @@
     
     void add(Structure* structure)
     {
+        ASSERT(structure);
         ASSERT(!contains(structure));
         m_structures.append(structure);
     }
diff --git a/Source/JavaScriptCore/bytecode/StructureStubInfo.h b/Source/JavaScriptCore/bytecode/StructureStubInfo.h
index 5463f3e..9217cb8 100644
--- a/Source/JavaScriptCore/bytecode/StructureStubInfo.h
+++ b/Source/JavaScriptCore/bytecode/StructureStubInfo.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2008, 2012, 2013 Apple Inc. All rights reserved.
+ * Copyright (C) 2008, 2012, 2013, 2014 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
@@ -216,7 +216,6 @@
 
     struct {
         int8_t registersFlushed;
-        int8_t callFrameRegister;
         int8_t baseGPR;
 #if USE(JSVALUE32_64)
         int8_t valueTagGPR;
@@ -290,7 +289,7 @@
     return structureStubInfo.codeOrigin;
 }
 
-typedef HashMap<CodeOrigin, StructureStubInfo*> StubInfoMap;
+typedef HashMap<CodeOrigin, StructureStubInfo*, CodeOriginApproximateHash> StubInfoMap;
 
 #else
 
diff --git a/Source/JavaScriptCore/bytecode/ValueRecovery.h b/Source/JavaScriptCore/bytecode/ValueRecovery.h
index 3af2c34..4031252 100644
--- a/Source/JavaScriptCore/bytecode/ValueRecovery.h
+++ b/Source/JavaScriptCore/bytecode/ValueRecovery.h
@@ -230,6 +230,27 @@
         return VirtualRegister(m_source.virtualReg);
     }
     
+    ValueRecovery withLocalsOffset(int offset) const
+    {
+        switch (m_technique) {
+        case DisplacedInJSStack:
+        case Int32DisplacedInJSStack:
+        case DoubleDisplacedInJSStack:
+        case CellDisplacedInJSStack:
+        case BooleanDisplacedInJSStack:
+        case Int52DisplacedInJSStack:
+        case StrictInt52DisplacedInJSStack: {
+            ValueRecovery result;
+            result.m_technique = m_technique;
+            result.m_source.virtualReg = m_source.virtualReg + offset;
+            return result;
+        }
+            
+        default:
+            return *this;
+        }
+    }
+    
     JSValue constant() const
     {
         ASSERT(m_technique == Constant);
diff --git a/Source/JavaScriptCore/bytecode/VirtualRegister.h b/Source/JavaScriptCore/bytecode/VirtualRegister.h
index c63aee8..61b04b5 100644
--- a/Source/JavaScriptCore/bytecode/VirtualRegister.h
+++ b/Source/JavaScriptCore/bytecode/VirtualRegister.h
@@ -65,6 +65,7 @@
     int toArgument() const { ASSERT(isArgument()); return operandToArgument(m_virtualRegister); }
     int toConstantIndex() const { ASSERT(isConstant()); return m_virtualRegister - s_firstConstantRegisterIndex; }
     int offset() const { return m_virtualRegister; }
+    int offsetInBytes() const { return m_virtualRegister * sizeof(Register); }
 
     bool operator==(const VirtualRegister other) const { return m_virtualRegister == other.m_virtualRegister; }
     bool operator!=(const VirtualRegister other) const { return m_virtualRegister != other.m_virtualRegister; }