Commit efae96fb authored by Thibaud Michaud's avatar Thibaud Michaud Committed by V8 LUCI CQ

[wasm] Mark roots for stack switching

Treat all stack pointers as roots:
- Maintain a global linked-list of StackMemories
- Update StackFrameIterator to process inactive stacks
- Visit roots in all inactive stacks (including root marking and root
pointer updating).

Drive-by:
- Fix some issues uncovered by the test
- Refactor the builtin constants

R=mlippautz@chromium.org,ahaas@chromium.org

Bug: v8:12191
Change-Id: I5b6381f9818166e2eabf80dd59135673dddb2afc
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3310932Reviewed-by: 's avatarAndreas Haas <ahaas@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarJakob Kummerow <jkummerow@chromium.org>
Commit-Queue: Thibaud Michaud <thibaudm@chromium.org>
Cr-Commit-Position: refs/heads/main@{#78291}
parent b8ae525e
...@@ -3658,6 +3658,7 @@ namespace { ...@@ -3658,6 +3658,7 @@ namespace {
// Helper function for WasmReturnPromiseOnSuspend. // Helper function for WasmReturnPromiseOnSuspend.
void LoadJumpBuffer(MacroAssembler* masm, Register jmpbuf) { void LoadJumpBuffer(MacroAssembler* masm, Register jmpbuf) {
__ movq(rsp, MemOperand(jmpbuf, wasm::kJmpBufSpOffset)); __ movq(rsp, MemOperand(jmpbuf, wasm::kJmpBufSpOffset));
__ movq(rbp, MemOperand(jmpbuf, wasm::kJmpBufFpOffset));
// The stack limit is set separately under the ExecutionAccess lock. // The stack limit is set separately under the ExecutionAccess lock.
// TODO(thibaudm): Reload live registers. // TODO(thibaudm): Reload live registers.
} }
...@@ -3674,17 +3675,11 @@ void Builtins::Generate_WasmReturnPromiseOnSuspend(MacroAssembler* masm) { ...@@ -3674,17 +3675,11 @@ void Builtins::Generate_WasmReturnPromiseOnSuspend(MacroAssembler* masm) {
__ decq(param_count); __ decq(param_count);
} }
constexpr int kFrameMarkerOffset = -kSystemPointerSize; __ subq(rsp, Immediate(ReturnPromiseOnSuspendFrameConstants::kSpillAreaSize));
// This slot contains the number of slots at the top of the frame that need to
// be scanned by the GC.
constexpr int kParamCountOffset =
BuiltinWasmWrapperConstants::kGCScanSlotCountOffset - kSystemPointerSize;
// The frame marker is not included in the slot count.
constexpr int kNumSpillSlots =
-(kParamCountOffset - kFrameMarkerOffset) / kSystemPointerSize;
__ subq(rsp, Immediate(kNumSpillSlots * kSystemPointerSize));
__ movq(MemOperand(rbp, kParamCountOffset), param_count); __ movq(
MemOperand(rbp, ReturnPromiseOnSuspendFrameConstants::kParamCountOffset),
param_count);
// ------------------------------------------- // -------------------------------------------
// Get the instance and wasm call target. // Get the instance and wasm call target.
...@@ -3721,6 +3716,7 @@ void Builtins::Generate_WasmReturnPromiseOnSuspend(MacroAssembler* masm) { ...@@ -3721,6 +3716,7 @@ void Builtins::Generate_WasmReturnPromiseOnSuspend(MacroAssembler* masm) {
jmpbuf, FieldOperand(foreign_jmpbuf, Foreign::kForeignAddressOffset), jmpbuf, FieldOperand(foreign_jmpbuf, Foreign::kForeignAddressOffset),
kForeignForeignAddressTag, r8); kForeignForeignAddressTag, r8);
__ movq(MemOperand(jmpbuf, wasm::kJmpBufSpOffset), rsp); __ movq(MemOperand(jmpbuf, wasm::kJmpBufSpOffset), rsp);
__ movq(MemOperand(jmpbuf, wasm::kJmpBufFpOffset), rbp);
Register stack_limit_address = rcx; Register stack_limit_address = rcx;
__ movq(stack_limit_address, __ movq(stack_limit_address,
FieldOperand(wasm_instance, FieldOperand(wasm_instance,
...@@ -3762,9 +3758,9 @@ void Builtins::Generate_WasmReturnPromiseOnSuspend(MacroAssembler* masm) { ...@@ -3762,9 +3758,9 @@ void Builtins::Generate_WasmReturnPromiseOnSuspend(MacroAssembler* masm) {
target_jmpbuf, target_jmpbuf,
FieldOperand(foreign_jmpbuf, Foreign::kForeignAddressOffset), FieldOperand(foreign_jmpbuf, Foreign::kForeignAddressOffset),
kForeignForeignAddressTag, r8); kForeignForeignAddressTag, r8);
__ Move(GCScanSlotPlace, 0);
// Switch stack! // Switch stack!
LoadJumpBuffer(masm, target_jmpbuf); LoadJumpBuffer(masm, target_jmpbuf);
__ movq(rbp, rsp); // New stack, there is no frame yet.
foreign_jmpbuf = no_reg; foreign_jmpbuf = no_reg;
target_jmpbuf = no_reg; target_jmpbuf = no_reg;
// live: [rsi, rdi] // live: [rsi, rdi]
...@@ -3827,7 +3823,6 @@ void Builtins::Generate_WasmReturnPromiseOnSuspend(MacroAssembler* masm) { ...@@ -3827,7 +3823,6 @@ void Builtins::Generate_WasmReturnPromiseOnSuspend(MacroAssembler* masm) {
kForeignForeignAddressTag, r8); kForeignForeignAddressTag, r8);
// Switch stack! // Switch stack!
LoadJumpBuffer(masm, jmpbuf); LoadJumpBuffer(masm, jmpbuf);
__ leaq(rbp, Operand(rsp, (kNumSpillSlots + 1) * kSystemPointerSize));
__ Move(GCScanSlotPlace, 1); __ Move(GCScanSlotPlace, 1);
__ Push(wasm_instance); // Spill. __ Push(wasm_instance); // Spill.
__ Move(kContextRegister, Smi::zero()); __ Move(kContextRegister, Smi::zero());
...@@ -3841,7 +3836,9 @@ void Builtins::Generate_WasmReturnPromiseOnSuspend(MacroAssembler* masm) { ...@@ -3841,7 +3836,9 @@ void Builtins::Generate_WasmReturnPromiseOnSuspend(MacroAssembler* masm) {
// ------------------------------------------- // -------------------------------------------
// Epilogue. // Epilogue.
// ------------------------------------------- // -------------------------------------------
__ movq(param_count, MemOperand(rbp, kParamCountOffset)); __ movq(
param_count,
MemOperand(rbp, ReturnPromiseOnSuspendFrameConstants::kParamCountOffset));
__ LeaveFrame(StackFrame::RETURN_PROMISE_ON_SUSPEND); __ LeaveFrame(StackFrame::RETURN_PROMISE_ON_SUSPEND);
__ DropArguments(param_count, r8, TurboAssembler::kCountIsInteger, __ DropArguments(param_count, r8, TurboAssembler::kCountIsInteger,
TurboAssembler::kCountExcludesReceiver); TurboAssembler::kCountExcludesReceiver);
......
...@@ -209,7 +209,15 @@ class BuiltinWasmWrapperConstants : public TypedFrameConstants { ...@@ -209,7 +209,15 @@ class BuiltinWasmWrapperConstants : public TypedFrameConstants {
// This slot contains the number of slots at the top of the frame that need to // This slot contains the number of slots at the top of the frame that need to
// be scanned by the GC. // be scanned by the GC.
static constexpr int kGCScanSlotCountOffset = static constexpr int kGCScanSlotCountOffset =
kFrameTypeOffset - kSystemPointerSize; TYPED_FRAME_PUSHED_VALUE_OFFSET(0);
};
class ReturnPromiseOnSuspendFrameConstants
: public BuiltinWasmWrapperConstants {
public:
static constexpr int kParamCountOffset = TYPED_FRAME_PUSHED_VALUE_OFFSET(1);
static constexpr int kSpillAreaSize =
-(kParamCountOffset - TypedFrameConstants::kFixedFrameSizeFromFp);
}; };
class ConstructFrameConstants : public TypedFrameConstants { class ConstructFrameConstants : public TypedFrameConstants {
......
...@@ -241,7 +241,7 @@ inline JsToWasmFrame::JsToWasmFrame(StackFrameIteratorBase* iterator) ...@@ -241,7 +241,7 @@ inline JsToWasmFrame::JsToWasmFrame(StackFrameIteratorBase* iterator)
inline ReturnPromiseOnSuspendFrame::ReturnPromiseOnSuspendFrame( inline ReturnPromiseOnSuspendFrame::ReturnPromiseOnSuspendFrame(
StackFrameIteratorBase* iterator) StackFrameIteratorBase* iterator)
: StubFrame(iterator) {} : ExitFrame(iterator) {}
inline CWasmEntryFrame::CWasmEntryFrame(StackFrameIteratorBase* iterator) inline CWasmEntryFrame::CWasmEntryFrame(StackFrameIteratorBase* iterator)
: StubFrame(iterator) {} : StubFrame(iterator) {}
......
...@@ -62,13 +62,19 @@ class StackHandlerIterator { ...@@ -62,13 +62,19 @@ class StackHandlerIterator {
public: public:
StackHandlerIterator(const StackFrame* frame, StackHandler* handler) StackHandlerIterator(const StackFrame* frame, StackHandler* handler)
: limit_(frame->fp()), handler_(handler) { : limit_(frame->fp()), handler_(handler) {
// Make sure the handler has already been unwound to this frame.
DCHECK(frame->sp() <= AddressOf(handler));
#if V8_ENABLE_WEBASSEMBLY #if V8_ENABLE_WEBASSEMBLY
// Make sure the handler has already been unwound to this frame. With stack
// switching this is not equivalent to the inequality below, because the
// frame and the handler could be in different stacks.
DCHECK_IMPLIES(!FLAG_experimental_wasm_stack_switching,
frame->sp() <= AddressOf(handler));
// For CWasmEntry frames, the handler was registered by the last C++ // For CWasmEntry frames, the handler was registered by the last C++
// frame (Execution::CallWasm), so even though its address is already // frame (Execution::CallWasm), so even though its address is already
// beyond the limit, we know we always want to unwind one handler. // beyond the limit, we know we always want to unwind one handler.
if (frame->is_c_wasm_entry()) handler_ = handler_->next(); if (frame->is_c_wasm_entry()) handler_ = handler_->next();
#else
// Make sure the handler has already been unwound to this frame.
DCHECK_LE(frame->sp(), AddressOf(handler));
#endif // V8_ENABLE_WEBASSEMBLY #endif // V8_ENABLE_WEBASSEMBLY
} }
...@@ -103,6 +109,13 @@ StackFrameIterator::StackFrameIterator(Isolate* isolate, ThreadLocalTop* t) ...@@ -103,6 +109,13 @@ StackFrameIterator::StackFrameIterator(Isolate* isolate, ThreadLocalTop* t)
: StackFrameIteratorBase(isolate, true) { : StackFrameIteratorBase(isolate, true) {
Reset(t); Reset(t);
} }
#if V8_ENABLE_WEBASSEMBLY
StackFrameIterator::StackFrameIterator(Isolate* isolate,
wasm::StackMemory* stack)
: StackFrameIterator(isolate) {
Reset(isolate->thread_local_top(), stack);
}
#endif
void StackFrameIterator::Advance() { void StackFrameIterator::Advance() {
DCHECK(!done()); DCHECK(!done());
...@@ -122,8 +135,14 @@ void StackFrameIterator::Advance() { ...@@ -122,8 +135,14 @@ void StackFrameIterator::Advance() {
frame_ = SingletonFor(type, &state); frame_ = SingletonFor(type, &state);
// When we're done iterating over the stack frames, the handler // When we're done iterating over the stack frames, the handler
// chain must have been completely unwound. // chain must have been completely unwound. Except for wasm stack-switching:
DCHECK(!done() || handler_ == nullptr); // we stop at the end of the current segment.
#if V8_ENABLE_WEBASSEMBLY
DCHECK_IMPLIES(done() && !FLAG_experimental_wasm_stack_switching,
handler_ == nullptr);
#else
DCHECK_IMPLIES(done(), handler_ == nullptr);
#endif
} }
StackFrame* StackFrameIterator::Reframe() { StackFrame* StackFrameIterator::Reframe() {
...@@ -140,6 +159,19 @@ void StackFrameIterator::Reset(ThreadLocalTop* top) { ...@@ -140,6 +159,19 @@ void StackFrameIterator::Reset(ThreadLocalTop* top) {
frame_ = SingletonFor(type, &state); frame_ = SingletonFor(type, &state);
} }
#if V8_ENABLE_WEBASSEMBLY
void StackFrameIterator::Reset(ThreadLocalTop* top, wasm::StackMemory* stack) {
if (stack->jmpbuf()->sp == stack->base()) {
// Empty stack.
return;
}
StackFrame::State state;
ReturnPromiseOnSuspendFrame::GetStateForJumpBuffer(stack->jmpbuf(), &state);
handler_ = StackHandler::FromAddress(Isolate::handler(top));
frame_ = SingletonFor(StackFrame::RETURN_PROMISE_ON_SUSPEND, &state);
}
#endif
StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type, StackFrame* StackFrameIteratorBase::SingletonFor(StackFrame::Type type,
StackFrame::State* state) { StackFrame::State* state) {
StackFrame* result = SingletonFor(type); StackFrame* result = SingletonFor(type);
...@@ -562,7 +594,12 @@ void StackFrame::SetReturnAddressLocationResolver( ...@@ -562,7 +594,12 @@ void StackFrame::SetReturnAddressLocationResolver(
StackFrame::Type StackFrame::ComputeType(const StackFrameIteratorBase* iterator, StackFrame::Type StackFrame::ComputeType(const StackFrameIteratorBase* iterator,
State* state) { State* state) {
DCHECK_NE(state->fp, kNullAddress); #if V8_ENABLE_WEBASSEMBLY
if (state->fp == kNullAddress) {
DCHECK(FLAG_experimental_wasm_stack_switching);
return NO_FRAME_TYPE;
}
#endif
MSAN_MEMORY_IS_INITIALIZED( MSAN_MEMORY_IS_INITIALIZED(
state->fp + CommonFrameConstants::kContextOrFrameTypeOffset, state->fp + CommonFrameConstants::kContextOrFrameTypeOffset,
...@@ -792,11 +829,11 @@ StackFrame::Type ExitFrame::ComputeFrameType(Address fp) { ...@@ -792,11 +829,11 @@ StackFrame::Type ExitFrame::ComputeFrameType(Address fp) {
StackFrame::Type frame_type = static_cast<StackFrame::Type>(marker_int >> 1); StackFrame::Type frame_type = static_cast<StackFrame::Type>(marker_int >> 1);
switch (frame_type) { switch (frame_type) {
case BUILTIN_EXIT: case BUILTIN_EXIT:
return BUILTIN_EXIT;
#if V8_ENABLE_WEBASSEMBLY #if V8_ENABLE_WEBASSEMBLY
case WASM_EXIT: case WASM_EXIT:
return WASM_EXIT; case RETURN_PROMISE_ON_SUSPEND:
#endif // V8_ENABLE_WEBASSEMBLY #endif // V8_ENABLE_WEBASSEMBLY
return frame_type;
default: default:
return EXIT; return EXIT;
} }
...@@ -939,8 +976,16 @@ int CommonFrame::ComputeExpressionsCount() const { ...@@ -939,8 +976,16 @@ int CommonFrame::ComputeExpressionsCount() const {
} }
void CommonFrame::ComputeCallerState(State* state) const { void CommonFrame::ComputeCallerState(State* state) const {
state->sp = caller_sp();
state->fp = caller_fp(); state->fp = caller_fp();
#if V8_ENABLE_WEBASSEMBLY
if (state->fp == kNullAddress) {
// An empty FP signals the first frame of a stack segment. The caller is
// on a different stack, or is unbound (suspended stack).
DCHECK(FLAG_experimental_wasm_stack_switching);
return;
}
#endif
state->sp = caller_sp();
state->pc_address = ResolveReturnAddressLocation( state->pc_address = ResolveReturnAddressLocation(
reinterpret_cast<Address*>(ComputePCAddress(fp()))); reinterpret_cast<Address*>(ComputePCAddress(fp())));
state->callee_fp = fp(); state->callee_fp = fp();
...@@ -2099,11 +2144,8 @@ void JsToWasmFrame::Iterate(RootVisitor* v) const { ...@@ -2099,11 +2144,8 @@ void JsToWasmFrame::Iterate(RootVisitor* v) const {
void ReturnPromiseOnSuspendFrame::Iterate(RootVisitor* v) const { void ReturnPromiseOnSuspendFrame::Iterate(RootVisitor* v) const {
// See JsToWasmFrame layout. // See JsToWasmFrame layout.
#ifdef DEBUG // We cannot DCHECK that the pc matches the expected builtin code here,
Code code = GetContainingCode(isolate(), pc()); // because the return address is on a different stack.
DCHECK(code.is_builtin() &&
code.builtin_id() == Builtin::kWasmReturnPromiseOnSuspend);
#endif
// The [fp + BuiltinFrameConstants::kGCScanSlotCountOffset] on the stack is a // The [fp + BuiltinFrameConstants::kGCScanSlotCountOffset] on the stack is a
// value indicating how many values should be scanned from the top. // value indicating how many values should be scanned from the top.
intptr_t scan_count = *reinterpret_cast<intptr_t*>( intptr_t scan_count = *reinterpret_cast<intptr_t*>(
...@@ -2116,6 +2158,15 @@ void ReturnPromiseOnSuspendFrame::Iterate(RootVisitor* v) const { ...@@ -2116,6 +2158,15 @@ void ReturnPromiseOnSuspendFrame::Iterate(RootVisitor* v) const {
spill_slot_limit); spill_slot_limit);
} }
// static
void ReturnPromiseOnSuspendFrame::GetStateForJumpBuffer(
wasm::JumpBuffer* jmpbuf, State* state) {
DCHECK_NE(jmpbuf->fp, kNullAddress);
DCHECK_EQ(ComputeFrameType(jmpbuf->fp), RETURN_PROMISE_ON_SUSPEND);
FillState(jmpbuf->fp, jmpbuf->sp, state);
DCHECK_NE(*state->pc_address, kNullAddress);
}
WasmInstanceObject WasmCompileLazyFrame::wasm_instance() const { WasmInstanceObject WasmCompileLazyFrame::wasm_instance() const {
return WasmInstanceObject::cast(*wasm_instance_slot()); return WasmInstanceObject::cast(*wasm_instance_slot());
} }
......
...@@ -48,6 +48,8 @@ namespace v8 { ...@@ -48,6 +48,8 @@ namespace v8 {
namespace internal { namespace internal {
namespace wasm { namespace wasm {
class WasmCode; class WasmCode;
struct JumpBuffer;
class StackMemory;
} // namespace wasm } // namespace wasm
// Forward declarations. // Forward declarations.
...@@ -716,7 +718,8 @@ class ConstructEntryFrame : public EntryFrame { ...@@ -716,7 +718,8 @@ class ConstructEntryFrame : public EntryFrame {
friend class StackFrameIteratorBase; friend class StackFrameIteratorBase;
}; };
// Exit frames are used to exit JavaScript execution and go to C. // Exit frames are used to exit JavaScript execution and go to C, or to switch
// out of the current stack for wasm stack-switching.
class ExitFrame : public TypedFrame { class ExitFrame : public TypedFrame {
public: public:
Type type() const override { return EXIT; } Type type() const override { return EXIT; }
...@@ -1046,11 +1049,11 @@ class JsToWasmFrame : public StubFrame { ...@@ -1046,11 +1049,11 @@ class JsToWasmFrame : public StubFrame {
friend class StackFrameIteratorBase; friend class StackFrameIteratorBase;
}; };
class ReturnPromiseOnSuspendFrame : public StubFrame { class ReturnPromiseOnSuspendFrame : public ExitFrame {
public: public:
Type type() const override { return RETURN_PROMISE_ON_SUSPEND; } Type type() const override { return RETURN_PROMISE_ON_SUSPEND; }
void Iterate(RootVisitor* v) const override; void Iterate(RootVisitor* v) const override;
static void GetStateForJumpBuffer(wasm::JumpBuffer* jmpbuf, State* state);
protected: protected:
inline explicit ReturnPromiseOnSuspendFrame(StackFrameIteratorBase* iterator); inline explicit ReturnPromiseOnSuspendFrame(StackFrameIteratorBase* iterator);
...@@ -1235,6 +1238,11 @@ class StackFrameIterator : public StackFrameIteratorBase { ...@@ -1235,6 +1238,11 @@ class StackFrameIterator : public StackFrameIteratorBase {
V8_EXPORT_PRIVATE explicit StackFrameIterator(Isolate* isolate); V8_EXPORT_PRIVATE explicit StackFrameIterator(Isolate* isolate);
// An iterator that iterates over a given thread's stack. // An iterator that iterates over a given thread's stack.
V8_EXPORT_PRIVATE StackFrameIterator(Isolate* isolate, ThreadLocalTop* t); V8_EXPORT_PRIVATE StackFrameIterator(Isolate* isolate, ThreadLocalTop* t);
#if V8_ENABLE_WEBASSEMBLY
// An iterator that iterates over a given wasm stack segment.
V8_EXPORT_PRIVATE StackFrameIterator(Isolate* isolate,
wasm::StackMemory* stack);
#endif
StackFrameIterator(const StackFrameIterator&) = delete; StackFrameIterator(const StackFrameIterator&) = delete;
StackFrameIterator& operator=(const StackFrameIterator&) = delete; StackFrameIterator& operator=(const StackFrameIterator&) = delete;
...@@ -1249,6 +1257,9 @@ class StackFrameIterator : public StackFrameIteratorBase { ...@@ -1249,6 +1257,9 @@ class StackFrameIterator : public StackFrameIteratorBase {
private: private:
// Go back to the first frame. // Go back to the first frame.
void Reset(ThreadLocalTop* top); void Reset(ThreadLocalTop* top);
#if V8_ENABLE_WEBASSEMBLY
void Reset(ThreadLocalTop* top, wasm::StackMemory* stack);
#endif
}; };
// Iterator that supports iterating through all JavaScript frames. // Iterator that supports iterating through all JavaScript frames.
......
...@@ -568,6 +568,22 @@ void Isolate::Iterate(RootVisitor* v, ThreadLocalTop* thread) { ...@@ -568,6 +568,22 @@ void Isolate::Iterate(RootVisitor* v, ThreadLocalTop* thread) {
// Iterate over pointers on native execution stack. // Iterate over pointers on native execution stack.
#if V8_ENABLE_WEBASSEMBLY #if V8_ENABLE_WEBASSEMBLY
wasm::WasmCodeRefScope wasm_code_ref_scope; wasm::WasmCodeRefScope wasm_code_ref_scope;
if (FLAG_experimental_wasm_stack_switching) {
wasm::StackMemory* current = wasm_stacks_;
DCHECK_NOT_NULL(current);
do {
if (current->IsActive()) {
// The active stack's jump buffer does not match the current state, use
// the thread info below instead.
current = current->next();
continue;
}
for (StackFrameIterator it(this, current); !it.done(); it.Advance()) {
it.frame()->Iterate(v);
}
current = current->next();
} while (current != wasm_stacks_);
}
#endif // V8_ENABLE_WEBASSEMBLY #endif // V8_ENABLE_WEBASSEMBLY
for (StackFrameIterator it(this, thread); !it.done(); it.Advance()) { for (StackFrameIterator it(this, thread); !it.done(); it.Advance()) {
it.frame()->Iterate(v); it.frame()->Iterate(v);
...@@ -4012,6 +4028,25 @@ bool Isolate::Init(SnapshotData* startup_snapshot_data, ...@@ -4012,6 +4028,25 @@ bool Isolate::Init(SnapshotData* startup_snapshot_data,
PrintF("[Initializing isolate from scratch took %0.3f ms]\n", ms); PrintF("[Initializing isolate from scratch took %0.3f ms]\n", ms);
} }
#ifdef V8_ENABLE_WEBASSEMBLY
if (FLAG_experimental_wasm_stack_switching) {
std::unique_ptr<wasm::StackMemory> stack(
wasm::StackMemory::GetCurrentStackView(this));
this->wasm_stacks() = stack.get();
if (FLAG_trace_wasm_stack_switching) {
PrintF("Set up native stack object (limit: %p, base: %p)\n",
stack->jslimit(), reinterpret_cast<void*>(stack->base()));
}
HandleScope scope(this);
Handle<WasmContinuationObject> continuation =
WasmContinuationObject::New(this, std::move(stack));
heap()
->roots_table()
.slot(RootIndex::kActiveContinuation)
.store(*continuation);
}
#endif
initialized_ = true; initialized_ = true;
return true; return true;
......
...@@ -157,6 +157,10 @@ namespace metrics { ...@@ -157,6 +157,10 @@ namespace metrics {
class Recorder; class Recorder;
} // namespace metrics } // namespace metrics
namespace wasm {
class StackMemory;
}
#define RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate) \ #define RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate) \
do { \ do { \
Isolate* __isolate__ = (isolate); \ Isolate* __isolate__ = (isolate); \
...@@ -1895,6 +1899,10 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory { ...@@ -1895,6 +1899,10 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory {
SimulatorData* simulator_data() { return simulator_data_; } SimulatorData* simulator_data() { return simulator_data_; }
#endif #endif
#ifdef V8_ENABLE_WEBASSEMBLY
wasm::StackMemory*& wasm_stacks() { return wasm_stacks_; }
#endif
private: private:
explicit Isolate(std::unique_ptr<IsolateAllocator> isolate_allocator, explicit Isolate(std::unique_ptr<IsolateAllocator> isolate_allocator,
bool is_shared); bool is_shared);
...@@ -2343,6 +2351,10 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory { ...@@ -2343,6 +2351,10 @@ class V8_EXPORT_PRIVATE Isolate final : private HiddenFactory {
// The mutex only guards adding pages, the retrieval is signal safe. // The mutex only guards adding pages, the retrieval is signal safe.
base::Mutex code_pages_mutex_; base::Mutex code_pages_mutex_;
#ifdef V8_ENABLE_WEBASSEMBLY
wasm::StackMemory* wasm_stacks_;
#endif
// Enables the host application to provide a mechanism for recording a // Enables the host application to provide a mechanism for recording a
// predefined set of data as crash keys to be used in postmortem debugging // predefined set of data as crash keys to be used in postmortem debugging
// in case of a crash. // in case of a crash.
......
...@@ -998,6 +998,8 @@ DEFINE_DEBUG_BOOL(trace_wasm_interpreter, false, ...@@ -998,6 +998,8 @@ DEFINE_DEBUG_BOOL(trace_wasm_interpreter, false,
"trace interpretation of wasm code") "trace interpretation of wasm code")
DEFINE_DEBUG_BOOL(trace_wasm_streaming, false, DEFINE_DEBUG_BOOL(trace_wasm_streaming, false,
"trace streaming compilation of wasm code") "trace streaming compilation of wasm code")
DEFINE_DEBUG_BOOL(trace_wasm_stack_switching, false,
"trace wasm stack switching")
DEFINE_BOOL(liftoff, true, DEFINE_BOOL(liftoff, true,
"enable Liftoff, the baseline compiler for WebAssembly") "enable Liftoff, the baseline compiler for WebAssembly")
DEFINE_BOOL(liftoff_only, false, DEFINE_BOOL(liftoff_only, false,
......
...@@ -120,6 +120,9 @@ class ConcurrentMarkingVisitor final ...@@ -120,6 +120,9 @@ class ConcurrentMarkingVisitor final
int VisitWasmInstanceObject(Map map, WasmInstanceObject object) { int VisitWasmInstanceObject(Map map, WasmInstanceObject object) {
return VisitJSObjectSubclass(map, object); return VisitJSObjectSubclass(map, object);
} }
int VisitWasmSuspenderObject(Map map, WasmSuspenderObject object) {
return VisitJSObjectSubclass(map, object);
}
#endif // V8_ENABLE_WEBASSEMBLY #endif // V8_ENABLE_WEBASSEMBLY
int VisitJSWeakCollection(Map map, JSWeakCollection object) { int VisitJSWeakCollection(Map map, JSWeakCollection object) {
......
...@@ -722,6 +722,9 @@ RUNTIME_FUNCTION(Runtime_WasmAllocateContinuation) { ...@@ -722,6 +722,9 @@ RUNTIME_FUNCTION(Runtime_WasmAllocateContinuation) {
*isolate->roots_table().slot(RootIndex::kActiveContinuation)), *isolate->roots_table().slot(RootIndex::kActiveContinuation)),
isolate); isolate);
auto target = WasmContinuationObject::New(isolate, *parent); auto target = WasmContinuationObject::New(isolate, *parent);
auto target_stack =
Managed<wasm::StackMemory>::cast(target->stack()).get().get();
isolate->wasm_stacks()->Add(target_stack);
isolate->roots_table().slot(RootIndex::kActiveContinuation).store(*target); isolate->roots_table().slot(RootIndex::kActiveContinuation).store(*target);
SyncStackLimit(isolate); SyncStackLimit(isolate);
return *target; return *target;
......
...@@ -19,57 +19,93 @@ namespace internal { ...@@ -19,57 +19,93 @@ namespace internal {
namespace wasm { namespace wasm {
struct JumpBuffer { struct JumpBuffer {
void* sp; Address sp;
Address fp;
void* stack_limit; void* stack_limit;
// TODO(thibaudm/fgm): Add general-purpose registers. // TODO(thibaudm/fgm): Add general-purpose registers.
}; };
constexpr int kJmpBufSpOffset = offsetof(JumpBuffer, sp); constexpr int kJmpBufSpOffset = offsetof(JumpBuffer, sp);
constexpr int kJmpBufFpOffset = offsetof(JumpBuffer, fp);
constexpr int kJmpBufStackLimitOffset = offsetof(JumpBuffer, stack_limit); constexpr int kJmpBufStackLimitOffset = offsetof(JumpBuffer, stack_limit);
class StackMemory { class StackMemory {
public: public:
static StackMemory* New() { return new StackMemory(); } static StackMemory* New(Isolate* isolate) { return new StackMemory(isolate); }
// Returns a non-owning view of the current stack. // Returns a non-owning view of the current stack.
static StackMemory* GetCurrentStackView(Isolate* isolate) { static StackMemory* GetCurrentStackView(Isolate* isolate) {
byte* limit = byte* limit =
reinterpret_cast<byte*>(isolate->stack_guard()->real_jslimit()); reinterpret_cast<byte*>(isolate->stack_guard()->real_jslimit());
return new StackMemory(limit); return new StackMemory(isolate, limit);
} }
~StackMemory() { ~StackMemory() {
if (FLAG_trace_wasm_stack_switching) {
PrintF("Delete stack (sp: %p)\n", reinterpret_cast<void*>(jmpbuf_.sp));
}
PageAllocator* allocator = GetPlatformPageAllocator(); PageAllocator* allocator = GetPlatformPageAllocator();
if (owned_) allocator->DecommitPages(limit_, size_); if (owned_) allocator->DecommitPages(limit_, size_);
// We don't need to handle removing the last stack from the list (next_ ==
// this). This only happens on isolate tear down, otherwise there is always
// at least one reachable stack (the active stack).
isolate_->wasm_stacks() = next_;
prev_->next_ = next_;
next_->prev_ = prev_;
} }
void* jslimit() { return limit_ + kJSLimitOffsetKB; } void* jslimit() const { return limit_ + kJSLimitOffsetKB; }
void* base() { return limit_ + size_; } Address base() const { return reinterpret_cast<Address>(limit_ + size_); }
JumpBuffer* jmpbuf() { return &jmpbuf_; } JumpBuffer* jmpbuf() { return &jmpbuf_; }
// Insert a stack in the linked list after this stack.
void Add(StackMemory* stack) {
stack->next_ = this->next_;
stack->prev_ = this;
this->next_->prev_ = stack;
this->next_ = stack;
}
StackMemory* next() { return next_; }
// Track external memory usage for Managed<StackMemory> objects. // Track external memory usage for Managed<StackMemory> objects.
size_t owned_size() { return sizeof(StackMemory) + (owned_ ? size_ : 0); } size_t owned_size() { return sizeof(StackMemory) + (owned_ ? size_ : 0); }
bool IsActive() {
byte* sp = reinterpret_cast<byte*>(GetCurrentStackPosition());
return limit_ < sp && sp <= limit_ + size_;
}
private: private:
static constexpr int kJSLimitOffsetKB = 40;
// This constructor allocates a new stack segment. // This constructor allocates a new stack segment.
StackMemory() : owned_(true) { explicit StackMemory(Isolate* isolate) : isolate_(isolate), owned_(true) {
PageAllocator* allocator = GetPlatformPageAllocator(); PageAllocator* allocator = GetPlatformPageAllocator();
int kJsStackSizeKB = 4; int kJsStackSizeKB = 4;
size_ = (kJsStackSizeKB + kJSLimitOffsetKB) * KB; size_ = (kJsStackSizeKB + kJSLimitOffsetKB) * KB;
size_ = RoundUp(size_, allocator->AllocatePageSize());
limit_ = static_cast<byte*>( limit_ = static_cast<byte*>(
allocator->AllocatePages(nullptr, size_, allocator->AllocatePageSize(), allocator->AllocatePages(nullptr, size_, allocator->AllocatePageSize(),
PageAllocator::kReadWrite)); PageAllocator::kReadWrite));
if (FLAG_trace_wasm_stack_switching)
PrintF("Allocate stack (sp: %p, limit: %p)\n", limit_ + size_, limit_);
} }
// Overload to represent a view of the libc stack. // Overload to represent a view of the libc stack.
explicit StackMemory(byte* limit) : limit_(limit), size_(0), owned_(false) {} StackMemory(Isolate* isolate, byte* limit)
: isolate_(isolate),
static constexpr int kJSLimitOffsetKB = 40; limit_(limit),
size_(reinterpret_cast<size_t>(limit)),
owned_(false) {}
Isolate* isolate_;
byte* limit_; byte* limit_;
size_t size_; size_t size_;
bool owned_; bool owned_;
JumpBuffer jmpbuf_; JumpBuffer jmpbuf_;
// Stacks form a circular doubly linked list per isolate.
StackMemory* next_ = this;
StackMemory* prev_ = this;
}; };
} // namespace wasm } // namespace wasm
......
...@@ -2869,13 +2869,6 @@ void WasmJs::Install(Isolate* isolate, bool exposed_on_global_object) { ...@@ -2869,13 +2869,6 @@ void WasmJs::Install(Isolate* isolate, bool exposed_on_global_object) {
WasmSuspenderObject::kHeaderSize, "WebAssembly.Suspender"); WasmSuspenderObject::kHeaderSize, "WebAssembly.Suspender");
InstallFunc(isolate, suspender_proto, "returnPromiseOnSuspend", InstallFunc(isolate, suspender_proto, "returnPromiseOnSuspend",
WebAssemblySuspenderReturnPromiseOnSuspend, 1); WebAssemblySuspenderReturnPromiseOnSuspend, 1);
std::unique_ptr<wasm::StackMemory> stack(
wasm::StackMemory::GetCurrentStackView(isolate));
auto continuation = WasmContinuationObject::New(isolate, std::move(stack));
isolate->heap()
->roots_table()
.slot(RootIndex::kActiveContinuation)
.store(*continuation);
} }
// Setup Function // Setup Function
......
...@@ -1749,6 +1749,7 @@ Handle<WasmContinuationObject> WasmContinuationObject::New( ...@@ -1749,6 +1749,7 @@ Handle<WasmContinuationObject> WasmContinuationObject::New(
isolate->factory()->NewStruct(WASM_CONTINUATION_OBJECT_TYPE)); isolate->factory()->NewStruct(WASM_CONTINUATION_OBJECT_TYPE));
stack->jmpbuf()->stack_limit = stack->jslimit(); stack->jmpbuf()->stack_limit = stack->jslimit();
stack->jmpbuf()->sp = stack->base(); stack->jmpbuf()->sp = stack->base();
stack->jmpbuf()->fp = kNullAddress;
result->set_jmpbuf(*isolate->factory()->NewForeign( result->set_jmpbuf(*isolate->factory()->NewForeign(
reinterpret_cast<Address>(stack->jmpbuf()))); reinterpret_cast<Address>(stack->jmpbuf())));
size_t external_size = stack->owned_size(); size_t external_size = stack->owned_size();
...@@ -1769,7 +1770,8 @@ Handle<WasmContinuationObject> WasmContinuationObject::New( ...@@ -1769,7 +1770,8 @@ Handle<WasmContinuationObject> WasmContinuationObject::New(
// static // static
Handle<WasmContinuationObject> WasmContinuationObject::New( Handle<WasmContinuationObject> WasmContinuationObject::New(
Isolate* isolate, WasmContinuationObject parent) { Isolate* isolate, WasmContinuationObject parent) {
auto stack = std::unique_ptr<wasm::StackMemory>(wasm::StackMemory::New()); auto stack =
std::unique_ptr<wasm::StackMemory>(wasm::StackMemory::New(isolate));
return New(isolate, std::move(stack), parent); return New(isolate, std::move(stack), parent);
} }
......
...@@ -212,9 +212,6 @@ ...@@ -212,9 +212,6 @@
'regress/regress-crbug-1191886': [PASS, HEAVY], 'regress/regress-crbug-1191886': [PASS, HEAVY],
'wasm/externref-globals': [PASS, HEAVY], 'wasm/externref-globals': [PASS, HEAVY],
# Enable when stack-switching supports GC.
'wasm/stack-switching': [SKIP],
# TODO(v8:10915): Fails with --future. # TODO(v8:10915): Fails with --future.
'harmony/weakrefs/stress-finalizationregistry-dirty-enqueue': [SKIP], 'harmony/weakrefs/stress-finalizationregistry-dirty-enqueue': [SKIP],
}], # ALWAYS }], # ALWAYS
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be // Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file. // found in the LICENSE file.
// Flags: --allow-natives-syntax --experimental-wasm-stack-switching // Flags: --allow-natives-syntax --experimental-wasm-stack-switching --expose-gc
load("test/mjsunit/wasm/wasm-module-builder.js"); load("test/mjsunit/wasm/wasm-module-builder.js");
...@@ -26,3 +26,15 @@ load("test/mjsunit/wasm/wasm-module-builder.js"); ...@@ -26,3 +26,15 @@ load("test/mjsunit/wasm/wasm-module-builder.js");
wrapper(); wrapper();
assertEquals(42, instance.exports.g.value); assertEquals(42, instance.exports.g.value);
})(); })();
(function TestStackSwitchGC() {
print(arguments.callee.name);
let builder = new WasmModuleBuilder();
let gc_index = builder.addImport('m', 'gc', kSig_v_v);
builder.addFunction("test", kSig_v_v)
.addBody([kExprCallFunction, gc_index]).exportFunc();
let instance = builder.instantiate({'m': {'gc': gc}});
let suspender = new WebAssembly.Suspender();
let wrapper = suspender.returnPromiseOnSuspend(instance.exports.test);
wrapper();
})();
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment