blob: 4f0ebcd16363407000c14ceddb69687c856e469b [file] [log] [blame]
/*
* Copyright (C) 2018 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#pragma once
namespace WTF {
#if defined(NDEBUG) && COMPILER(GCC_COMPATIBLE) \
&& (CPU(X86_64) || CPU(X86) || CPU(ARM64) || CPU(ARM_THUMB2) || CPU(ARM_TRADITIONAL))
// We can only use the inline asm implementation on release builds because it
// needs to be inlinable in order to be correct.
ALWAYS_INLINE void* currentStackPointer()
{
void* stackPointer = nullptr;
#if CPU(X86_64)
__asm__ volatile ("movq %%rsp, %0" : "=r"(stackPointer) ::);
#elif CPU(X86)
__asm__ volatile ("movl %%esp, %0" : "=r"(stackPointer) ::);
#elif CPU(ARM64) && defined(__ILP32__)
uint64_t stackPointerRegister = 0;
__asm__ volatile ("mov %0, sp" : "=r"(stackPointerRegister) ::);
stackPointer = reinterpret_cast<void*>(stackPointerRegister);
#elif CPU(ARM64) || CPU(ARM_THUMB2) || CPU(ARM_TRADITIONAL)
__asm__ volatile ("mov %0, sp" : "=r"(stackPointer) ::);
#endif
return stackPointer;
}
#else
#define USE_GENERIC_CURRENT_STACK_POINTER 1
WTF_EXPORT_PRIVATE void* currentStackPointer();
#endif
} // namespace WTF
using WTF::currentStackPointer;