Commit e08436ce authored by Peter Marshall's avatar Peter Marshall Committed by Commit Bot

[unwinder] Add a new API based on code pages for arm32 support

This new API uses the code pages rather than code ranges approach.

It's supported on arm32, as well as the previous two supported
platforms, x64 and arm64.

Deprecate the old API which only works on x64 and arm64 to reduce the
maintenance overhead of keeping both. Users of the old API should
migrate to the new one as it can be used all on supported platforms.

We keep the tests for the old API by ignoring deprecation warnings so
that we don't accidentally break it while it is still in the codebase.

Design doc:
https://docs.google.com/document/d/1VGwUult5AHLRk658VetwEHMOmDDxA2eDQs9lDFMZTE0

Bug: v8:8116
Change-Id: I1de8246a48fc1b4991603501ea6087db6b43fdd9
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1969900
Commit-Queue: Peter Marshall <petermarshall@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#65521}
parent f30af0bf
......@@ -2233,6 +2233,12 @@ struct UnwindState {
JSEntryStub js_run_microtasks_entry_stub;
};
struct JSEntryStubs {
JSEntryStub js_entry_stub;
JSEntryStub js_construct_entry_stub;
JSEntryStub js_run_microtasks_entry_stub;
};
/**
* A JSON Parser and Stringifier.
*/
......@@ -9136,8 +9142,15 @@ class V8_EXPORT Isolate {
/**
* Returns the UnwindState necessary for use with the Unwinder API.
*/
// TODO(petermarshall): Remove this API.
V8_DEPRECATE_SOON("Use entry_stubs + code_pages version.")
UnwindState GetUnwindState();
/**
* Returns the JSEntryStubs necessary for use with the Unwinder API.
*/
JSEntryStubs GetJSEntryStubs();
static constexpr size_t kMinCodePagesBufferSize = 32;
/**
......@@ -10457,7 +10470,7 @@ class V8_EXPORT Locker {
/**
* Various helpers for skipping over V8 frames in a given stack.
*
* The unwinder API is only supported on the x64 architecture.
* The unwinder API is only supported on the x64, ARM64 and ARM32 architectures.
*/
class V8_EXPORT Unwinder {
public:
......@@ -10489,10 +10502,26 @@ class V8_EXPORT Unwinder {
*
* \return True on success.
*/
// TODO(petermarshall): Remove this API
V8_DEPRECATE_SOON("Use entry_stubs + code_pages version.")
static bool TryUnwindV8Frames(const UnwindState& unwind_state,
RegisterState* register_state,
const void* stack_base);
/**
* The same as above, but is available on x64, ARM64 and ARM32.
*
* \param code_pages A list of all of the ranges in which V8 has allocated
* executable code. The caller should obtain this list by calling
* Isolate::CopyCodePages() during the same interrupt/thread suspension that
* captures the stack.
*/
static bool TryUnwindV8Frames(const JSEntryStubs& entry_stubs,
size_t code_pages_length,
const MemoryRange* code_pages,
RegisterState* register_state,
const void* stack_base);
/**
* Whether the PC is within the V8 code range represented by code_range or
* embedded_code_range in |unwind_state|.
......@@ -10501,7 +10530,16 @@ class V8_EXPORT Unwinder {
* and unwind_state will always fail. If it returns true, then unwinding may
* (but not necessarily) be successful.
*/
// TODO(petermarshall): Remove this API
V8_DEPRECATE_SOON("Use code_pages version.")
static bool PCIsInV8(const UnwindState& unwind_state, void* pc);
/**
* The same as above, but is available on x64, ARM64 and ARM32. See the
* comment on TryUnwindV8Frames.
*/
static bool PCIsInV8(size_t code_pages_length, const MemoryRange* code_pages,
void* pc);
};
// --- Implementation ---
......
......@@ -8914,6 +8914,25 @@ UnwindState Isolate::GetUnwindState() {
return unwind_state;
}
JSEntryStubs Isolate::GetJSEntryStubs() {
JSEntryStubs entry_stubs;
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
std::array<std::pair<i::Builtins::Name, JSEntryStub*>, 3> stubs = {
{{i::Builtins::kJSEntry, &entry_stubs.js_entry_stub},
{i::Builtins::kJSConstructEntry, &entry_stubs.js_construct_entry_stub},
{i::Builtins::kJSRunMicrotasksEntry,
&entry_stubs.js_run_microtasks_entry_stub}}};
for (auto& pair : stubs) {
i::Code js_entry = isolate->heap()->builtin(pair.first);
pair.second->code.start =
reinterpret_cast<const void*>(js_entry.InstructionStart());
pair.second->code.length_in_bytes = js_entry.InstructionSize();
}
return entry_stubs;
}
size_t Isolate::CopyCodePages(size_t capacity, MemoryRange* code_pages_out) {
#if !defined(V8_TARGET_ARCH_64_BIT) && !defined(V8_TARGET_ARCH_ARM)
// Not implemented on other platforms.
......
......@@ -2,6 +2,8 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <algorithm>
#include "include/v8.h"
#include "src/common/globals.h"
#include "src/execution/frame-constants.h"
......@@ -10,13 +12,37 @@ namespace v8 {
namespace {
bool PCIsInCodeRange(const v8::MemoryRange& code_range, void* pc) {
const i::byte* CalculateEnd(const void* start, size_t length_in_bytes) {
// Given that the length of the memory range is in bytes and it is not
// necessarily aligned, we need to do the pointer arithmetic in byte* here.
const i::byte* pc_as_byte = reinterpret_cast<i::byte*>(pc);
const i::byte* start = reinterpret_cast<const i::byte*>(code_range.start);
const i::byte* end = start + code_range.length_in_bytes;
return pc_as_byte >= start && pc_as_byte < end;
const i::byte* start_as_byte = reinterpret_cast<const i::byte*>(start);
return start_as_byte + length_in_bytes;
}
bool PCIsInCodeRange(const v8::MemoryRange& code_range, void* pc) {
return pc >= code_range.start &&
pc < CalculateEnd(code_range.start, code_range.length_in_bytes);
}
// This relies on the fact that the code pages are ordered, and that they don't
// overlap.
bool PCIsInCodePages(size_t code_pages_length, const MemoryRange* code_pages,
void* pc) {
DCHECK(std::is_sorted(code_pages, code_pages + code_pages_length,
[](const MemoryRange& a, const MemoryRange& b) {
return a.start < b.start;
}));
MemoryRange fake_range{pc, 1};
auto it =
std::upper_bound(code_pages, code_pages + code_pages_length, fake_range,
[](const MemoryRange& a, const MemoryRange& b) {
return a.start < b.start;
});
DCHECK_IMPLIES(it != code_pages + code_pages_length, pc < it->start);
if (it == code_pages) return false;
--it;
return it->start <= pc && pc < CalculateEnd(it->start, it->length_in_bytes);
}
bool IsInJSEntryRange(const UnwindState& unwind_state, void* pc) {
......@@ -34,6 +60,21 @@ bool IsInUnsafeJSEntryRange(const UnwindState& unwind_state, void* pc) {
// within JSEntry.
}
bool IsInJSEntryRange(const JSEntryStubs& entry_stubs, void* pc) {
return PCIsInCodeRange(entry_stubs.js_entry_stub.code, pc) ||
PCIsInCodeRange(entry_stubs.js_construct_entry_stub.code, pc) ||
PCIsInCodeRange(entry_stubs.js_run_microtasks_entry_stub.code, pc);
}
bool IsInUnsafeJSEntryRange(const JSEntryStubs& entry_stubs, void* pc) {
return IsInJSEntryRange(entry_stubs, pc);
// TODO(petermarshall): We can be more precise by checking whether we are
// in JSEntry but after frame setup and before frame teardown, in which case
// we are safe to unwind the stack. For now, we bail out if the PC is anywhere
// within JSEntry.
}
i::Address Load(i::Address address) {
return *reinterpret_cast<i::Address*>(address);
}
......@@ -50,6 +91,18 @@ void* GetReturnAddressFromFP(void* fp, void* pc,
Load(reinterpret_cast<i::Address>(fp) + caller_pc_offset));
}
void* GetReturnAddressFromFP(void* fp, void* pc,
const JSEntryStubs& entry_stubs) {
int caller_pc_offset = i::CommonFrameConstants::kCallerPCOffset;
#ifdef V8_TARGET_ARCH_ARM64
if (IsInJSEntryRange(entry_stubs, pc)) {
caller_pc_offset = i::EntryFrameConstants::kDirectCallerPCOffset;
}
#endif
return reinterpret_cast<void*>(
Load(reinterpret_cast<i::Address>(fp) + caller_pc_offset));
}
void* GetCallerFPFromFP(void* fp, void* pc,
const v8::UnwindState& unwind_state) {
int caller_fp_offset = i::CommonFrameConstants::kCallerFPOffset;
......@@ -62,6 +115,17 @@ void* GetCallerFPFromFP(void* fp, void* pc,
Load(reinterpret_cast<i::Address>(fp) + caller_fp_offset));
}
void* GetCallerFPFromFP(void* fp, void* pc, const JSEntryStubs& entry_stubs) {
int caller_fp_offset = i::CommonFrameConstants::kCallerFPOffset;
#ifdef V8_TARGET_ARCH_ARM64
if (IsInJSEntryRange(entry_stubs, pc)) {
caller_fp_offset = i::EntryFrameConstants::kDirectCallerFPOffset;
}
#endif
return reinterpret_cast<void*>(
Load(reinterpret_cast<i::Address>(fp) + caller_fp_offset));
}
void* GetCallerSPFromFP(void* fp, void* pc,
const v8::UnwindState& unwind_state) {
int caller_sp_offset = i::CommonFrameConstants::kCallerSPOffset;
......@@ -74,6 +138,17 @@ void* GetCallerSPFromFP(void* fp, void* pc,
caller_sp_offset);
}
void* GetCallerSPFromFP(void* fp, void* pc, const JSEntryStubs& entry_stubs) {
int caller_sp_offset = i::CommonFrameConstants::kCallerSPOffset;
#ifdef V8_TARGET_ARCH_ARM64
if (IsInJSEntryRange(entry_stubs, pc)) {
caller_sp_offset = i::EntryFrameConstants::kDirectCallerSPOffset;
}
#endif
return reinterpret_cast<void*>(reinterpret_cast<i::Address>(fp) +
caller_sp_offset);
}
bool AddressIsInStack(const void* address, const void* stack_base,
const void* stack_top) {
return address <= stack_base && address >= stack_top;
......@@ -121,9 +196,57 @@ bool Unwinder::TryUnwindV8Frames(const UnwindState& unwind_state,
return false;
}
bool Unwinder::TryUnwindV8Frames(const JSEntryStubs& entry_stubs,
size_t code_pages_length,
const MemoryRange* code_pages,
RegisterState* register_state,
const void* stack_base) {
const void* stack_top = register_state->sp;
void* pc = register_state->pc;
if (PCIsInV8(code_pages_length, code_pages, pc) &&
!IsInUnsafeJSEntryRange(entry_stubs, pc)) {
void* current_fp = register_state->fp;
if (!AddressIsInStack(current_fp, stack_base, stack_top)) return false;
// Peek at the return address that the caller pushed. If it's in V8, then we
// assume the caller frame is a JS frame and continue to unwind.
void* next_pc = GetReturnAddressFromFP(current_fp, pc, entry_stubs);
while (PCIsInV8(code_pages_length, code_pages, next_pc)) {
current_fp = GetCallerFPFromFP(current_fp, pc, entry_stubs);
if (!AddressIsInStack(current_fp, stack_base, stack_top)) return false;
pc = next_pc;
next_pc = GetReturnAddressFromFP(current_fp, pc, entry_stubs);
}
void* final_sp = GetCallerSPFromFP(current_fp, pc, entry_stubs);
if (!AddressIsInStack(final_sp, stack_base, stack_top)) return false;
register_state->sp = final_sp;
// We don't check that the final FP value is within the stack bounds because
// this is just the rbp value that JSEntryStub pushed. On platforms like
// Win64 this is not used as a dedicated FP register, and could contain
// anything.
void* final_fp = GetCallerFPFromFP(current_fp, pc, entry_stubs);
register_state->fp = final_fp;
register_state->pc = next_pc;
// Link register no longer valid after unwinding.
register_state->lr = nullptr;
return true;
}
return false;
}
bool Unwinder::PCIsInV8(const UnwindState& unwind_state, void* pc) {
return pc && (PCIsInCodeRange(unwind_state.code_range, pc) ||
PCIsInCodeRange(unwind_state.embedded_code_range, pc));
}
bool Unwinder::PCIsInV8(size_t code_pages_length, const MemoryRange* code_pages,
void* pc) {
return pc && PCIsInCodePages(code_pages_length, code_pages, pc);
}
} // namespace v8
......@@ -1490,7 +1490,7 @@ class Isolate final : private HiddenFactory {
void AddCodeMemoryChunk(MemoryChunk* chunk);
void RemoveCodeMemoryChunk(MemoryChunk* chunk);
void AddCodeRange(Address begin, size_t length_in_bytes);
V8_EXPORT_PRIVATE void AddCodeRange(Address begin, size_t length_in_bytes);
private:
explicit Isolate(std::unique_ptr<IsolateAllocator> isolate_allocator);
......
......@@ -259,6 +259,7 @@ v8_source_set("cctest_sources") {
"test-types.cc",
"test-unboxed-doubles.cc",
"test-unscopables-hidden-prototype.cc",
"test-unwinder-code-pages.cc",
"test-unwinder.cc",
"test-usecounters.cc",
"test-utils.cc",
......
......@@ -436,11 +436,15 @@
}], # variant == no_wasm_traps
##############################################################################
# The stack unwinder API is only supported on x64 and arm64.
# The stack unwinder API is only supported on x64, arm64 and arm.
['arch != x64 and arch != arm64', {
'test-unwinder/*': [SKIP]
}],
['arch != x64 and arch != arm64 and arch != arm', {
'test-unwinder-code-pages/*': [SKIP]
}],
##############################################################################
['lite_mode or variant == jitless', {
......
......@@ -26354,7 +26354,16 @@ TEST(TestGetUnwindState) {
v8::Isolate* isolate = env->GetIsolate();
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
// Ignore deprecation warnings so that we can keep the tests for now.
// TODO(petermarshall): Remove this once the deprecated API is gone.
#if __clang__
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated"
#endif
v8::UnwindState unwind_state = isolate->GetUnwindState();
#if __clang__
#pragma clang diagnostic pop
#endif
v8::MemoryRange builtins_range = unwind_state.embedded_code_range;
// Check that each off-heap builtin is within the builtins code range.
......@@ -26376,6 +26385,30 @@ TEST(TestGetUnwindState) {
reinterpret_cast<i::Address>(js_entry_stub.code.start));
}
TEST(GetJSEntryStubs) {
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
v8::JSEntryStubs entry_stubs = isolate->GetJSEntryStubs();
v8::JSEntryStub entry_stub = entry_stubs.js_entry_stub;
CHECK_EQ(i_isolate->heap()->builtin(i::Builtins::kJSEntry).InstructionStart(),
reinterpret_cast<i::Address>(entry_stub.code.start));
v8::JSEntryStub construct_stub = entry_stubs.js_construct_entry_stub;
CHECK_EQ(i_isolate->heap()
->builtin(i::Builtins::kJSConstructEntry)
.InstructionStart(),
reinterpret_cast<i::Address>(construct_stub.code.start));
v8::JSEntryStub microtask_stub = entry_stubs.js_run_microtasks_entry_stub;
CHECK_EQ(i_isolate->heap()
->builtin(i::Builtins::kJSRunMicrotasksEntry)
.InstructionStart(),
reinterpret_cast<i::Address>(microtask_stub.code.start));
}
TEST(MicrotaskContextShouldBeNativeContext) {
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
......
This diff is collapsed.
......@@ -15,14 +15,22 @@ namespace v8 {
namespace internal {
namespace test_unwinder {
static void* unlimited_stack_base = std::numeric_limits<void*>::max();
static const void* fake_stack_base = nullptr;
// Ignore deprecation warnings so that we can keep the tests for now.
// TODO(petermarshall): Delete all the tests here when the old API is removed to
// reduce the duplication.
#if __clang__
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated"
#endif
TEST(Unwind_BadState_Fail) {
UnwindState unwind_state; // Fields are intialized to nullptr.
RegisterState register_state;
bool unwound = v8::Unwinder::TryUnwindV8Frames(unwind_state, &register_state,
unlimited_stack_base);
fake_stack_base);
CHECK(!unwound);
// The register state should not change when unwinding fails.
CHECK_NULL(register_state.fp);
......@@ -341,7 +349,7 @@ TEST(Unwind_JSEntry_Fail) {
register_state.pc = start + 10;
bool unwound = v8::Unwinder::TryUnwindV8Frames(unwind_state, &register_state,
unlimited_stack_base);
fake_stack_base);
CHECK(!unwound);
// The register state should not change when unwinding fails.
CHECK_NULL(register_state.fp);
......@@ -541,6 +549,10 @@ TEST(PCIsInV8_LargeCodeObject) {
CHECK(v8::Unwinder::PCIsInV8(unwind_state, pc));
}
#if __clang__
#pragma clang diagnostic pop
#endif
} // namespace test_unwinder
} // namespace internal
} // namespace v8
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment