1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66
|
/*
* Copyright (C) 2018-2023 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#pragma once
namespace WTF {
#if defined(NDEBUG) \
&& (CPU(X86_64) || CPU(X86) || CPU(ARM64) || CPU(ARM_THUMB2) || CPU(ARM_TRADITIONAL))
// We can only use the inline asm implementation on release builds because it
// needs to be inlinable in order to be correct.
ALWAYS_INLINE void* currentStackPointer()
{
void* stackPointer = nullptr;
#if CPU(X86_64)
__asm__ volatile ("movq %%rsp, %0" : "=r"(stackPointer) ::);
#elif CPU(X86)
__asm__ volatile ("movl %%esp, %0" : "=r"(stackPointer) ::);
#elif CPU(ARM64) && defined(__ILP32__)
uint64_t stackPointerRegister = 0;
__asm__ volatile ("mov %0, sp" : "=r"(stackPointerRegister) ::);
stackPointer = reinterpret_cast<void*>(stackPointerRegister);
#elif CPU(ARM64) || CPU(ARM_THUMB2) || CPU(ARM_TRADITIONAL)
__asm__ volatile ("mov %0, sp" : "=r"(stackPointer) ::);
#endif
return stackPointer;
}
#elif !ENABLE(C_LOOP)
#define USE_ASM_CURRENT_STACK_POINTER 1
extern "C" WTF_EXPORT_PRIVATE void* CDECL currentStackPointer(void);
#else
#define USE_GENERIC_CURRENT_STACK_POINTER 1
WTF_EXPORT_PRIVATE void* currentStackPointer();
#endif
} // namespace WTF
using WTF::currentStackPointer;
|