Commit a865d16b authored by Thibaud Michaud's avatar Thibaud Michaud Committed by V8 LUCI CQ

[wasm] Resume suspender on resolved promise

Implement the WasmResume builtin, which resumes a wasm suspender
when the corresponding JS promise resolves.

Drive-by 1: Fix detection of empty stacks in the stack frame iterator.
Drive-by 2: Add a stack ID for better tracing.

R=ahaas@chromium.org
CC=​fgm@chromium.org

Bug: v8:12191
Change-Id: Ifa3f00c4259f802292b04d426c739e9b551f87b9
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3420827Reviewed-by: 's avatarAndreas Haas <ahaas@chromium.org>
Reviewed-by: 's avatarJakob Kummerow <jkummerow@chromium.org>
Commit-Queue: Thibaud Michaud <thibaudm@chromium.org>
Cr-Commit-Position: refs/heads/main@{#78842}
parent 05b9270b
...@@ -3847,15 +3847,26 @@ void Builtins::Generate_WasmReturnPromiseOnSuspend(MacroAssembler* masm) { ...@@ -3847,15 +3847,26 @@ void Builtins::Generate_WasmReturnPromiseOnSuspend(MacroAssembler* masm) {
// ------------------------------------------- // -------------------------------------------
active_continuation = rbx; active_continuation = rbx;
__ LoadRoot(active_continuation, RootIndex::kActiveContinuation); __ LoadRoot(active_continuation, RootIndex::kActiveContinuation);
// Set a null pointer in the jump buffer's SP slot to indicate to the stack
// frame iterator that this stack is empty.
foreign_jmpbuf = rcx;
__ LoadAnyTaggedField(
foreign_jmpbuf,
FieldOperand(active_continuation, WasmContinuationObject::kJmpbufOffset));
jmpbuf = foreign_jmpbuf;
__ LoadExternalPointerField(
jmpbuf, FieldOperand(foreign_jmpbuf, Foreign::kForeignAddressOffset),
kForeignForeignAddressTag, rdx);
__ movq(Operand(jmpbuf, wasm::kJmpBufSpOffset), Immediate(kNullAddress));
Register parent = rdx; Register parent = rdx;
__ LoadAnyTaggedField( __ LoadAnyTaggedField(
parent, parent,
FieldOperand(active_continuation, WasmContinuationObject::kParentOffset)); FieldOperand(active_continuation, WasmContinuationObject::kParentOffset));
active_continuation = no_reg; active_continuation = no_reg;
// live: [rsi] // live: [rsi, rdx]
// ------------------------------------------- // -------------------------------------------
// Update instance active continuation. // Update active continuation.
// ------------------------------------------- // -------------------------------------------
__ movq(masm->RootAsOperand(RootIndex::kActiveContinuation), parent); __ movq(masm->RootAsOperand(RootIndex::kActiveContinuation), parent);
foreign_jmpbuf = rax; foreign_jmpbuf = rax;
...@@ -4019,10 +4030,122 @@ void Builtins::Generate_WasmSuspend(MacroAssembler* masm) { ...@@ -4019,10 +4030,122 @@ void Builtins::Generate_WasmSuspend(MacroAssembler* masm) {
__ ret(0); __ ret(0);
} }
// Resume the suspender stored in the closure.
void Builtins::Generate_WasmResume(MacroAssembler* masm) { void Builtins::Generate_WasmResume(MacroAssembler* masm) {
__ EnterFrame(StackFrame::STACK_SWITCH); __ EnterFrame(StackFrame::STACK_SWITCH);
Register param_count = rax;
Register closure = kJSFunctionRegister; // rdi
__ subq(rsp, Immediate(ReturnPromiseOnSuspendFrameConstants::kSpillAreaSize));
// This slot is not used in this builtin. But when we return from the resumed
// continuation, we return to the ReturnPromiseOnSuspend code, which expects
// this slot to be set.
__ movq(
MemOperand(rbp, ReturnPromiseOnSuspendFrameConstants::kParamCountOffset),
param_count);
param_count = no_reg;
// -------------------------------------------
// Load suspender from closure.
// -------------------------------------------
Register sfi = closure;
__ LoadAnyTaggedField(
sfi,
MemOperand(
closure,
wasm::ObjectAccess::SharedFunctionInfoOffsetInTaggedJSFunction()));
Register function_data = sfi;
__ LoadAnyTaggedField(
function_data,
FieldOperand(sfi, SharedFunctionInfo::kFunctionDataOffset));
Register suspender = rax;
__ LoadAnyTaggedField(
suspender,
FieldOperand(function_data, WasmOnFulfilledData::kSuspenderOffset));
// Check the suspender state.
Label suspender_is_suspended;
Register state = rdx;
__ LoadTaggedSignedField(
state, FieldOperand(suspender, WasmSuspenderObject::kStateOffset));
__ SmiCompare(state, Smi::FromInt(WasmSuspenderObject::Suspended));
__ j(equal, &suspender_is_suspended);
__ Trap(); // TODO(thibaudm): Throw a wasm trap.
closure = no_reg;
sfi = no_reg;
__ bind(&suspender_is_suspended);
// -------------------------------------------
// Save current state.
// -------------------------------------------
Label suspend;
Register active_continuation = r9;
__ LoadRoot(active_continuation, RootIndex::kActiveContinuation);
Register current_jmpbuf = rdi;
__ LoadAnyTaggedField(
current_jmpbuf,
FieldOperand(active_continuation, WasmContinuationObject::kJmpbufOffset));
__ LoadExternalPointerField(
current_jmpbuf,
FieldOperand(current_jmpbuf, Foreign::kForeignAddressOffset),
kForeignForeignAddressTag, rdx);
FillJumpBuffer(masm, current_jmpbuf, &suspend);
current_jmpbuf = no_reg;
// -------------------------------------------
// Set suspender's parent to active continuation.
// -------------------------------------------
__ StoreTaggedSignedField(
FieldOperand(suspender, WasmSuspenderObject::kStateOffset),
Smi::FromInt(WasmSuspenderObject::Active));
Register target_continuation = rdi;
__ LoadAnyTaggedField(
target_continuation,
FieldOperand(suspender, WasmSuspenderObject::kContinuationOffset));
Register slot_address = WriteBarrierDescriptor::SlotAddressRegister();
__ StoreTaggedField(
FieldOperand(target_continuation, WasmContinuationObject::kParentOffset),
active_continuation);
__ RecordWriteField(
target_continuation, WasmContinuationObject::kParentOffset,
active_continuation, slot_address, SaveFPRegsMode::kIgnore);
active_continuation = no_reg;
// -------------------------------------------
// Update roots.
// -------------------------------------------
__ movq(masm->RootAsOperand(RootIndex::kActiveContinuation),
target_continuation);
__ movq(masm->RootAsOperand(RootIndex::kActiveSuspender), suspender);
suspender = no_reg;
MemOperand GCScanSlotPlace =
MemOperand(rbp, BuiltinWasmWrapperConstants::kGCScanSlotCountOffset);
__ Move(GCScanSlotPlace, 1);
__ Push(target_continuation);
__ Move(kContextRegister, Smi::zero());
__ CallRuntime(Runtime::kWasmSyncStackLimit);
__ Pop(target_continuation);
// -------------------------------------------
// Load state from target jmpbuf (longjmp).
// -------------------------------------------
Register target_jmpbuf = target_continuation;
__ LoadAnyTaggedField(
target_jmpbuf,
FieldOperand(target_continuation, WasmContinuationObject::kJmpbufOffset));
__ LoadExternalPointerField(
target_jmpbuf,
FieldOperand(target_jmpbuf, Foreign::kForeignAddressOffset),
kForeignForeignAddressTag, rax);
// Move resolved value to return register.
__ movq(kReturnRegister0, Operand(rbp, 3 * kSystemPointerSize));
__ Move(GCScanSlotPlace, 0);
LoadJumpBuffer(masm, target_jmpbuf, true);
__ Trap();
__ bind(&suspend);
__ LeaveFrame(StackFrame::STACK_SWITCH); __ LeaveFrame(StackFrame::STACK_SWITCH);
__ ret(0); __ ret(3);
} }
void Builtins::Generate_WasmOnStackReplace(MacroAssembler* masm) { void Builtins::Generate_WasmOnStackReplace(MacroAssembler* masm) {
......
...@@ -162,8 +162,9 @@ void StackFrameIterator::Reset(ThreadLocalTop* top) { ...@@ -162,8 +162,9 @@ void StackFrameIterator::Reset(ThreadLocalTop* top) {
#if V8_ENABLE_WEBASSEMBLY #if V8_ENABLE_WEBASSEMBLY
void StackFrameIterator::Reset(ThreadLocalTop* top, wasm::StackMemory* stack) { void StackFrameIterator::Reset(ThreadLocalTop* top, wasm::StackMemory* stack) {
if (stack->jmpbuf()->sp == stack->base()) { if (stack->jmpbuf()->sp == kNullAddress) {
// Empty stack. // A null SP indicates that the computation associated with this stack has
// returned, leaving the stack segment empty.
return; return;
} }
StackFrame::State state; StackFrame::State state;
......
...@@ -731,6 +731,9 @@ void SyncStackLimit(Isolate* isolate) { ...@@ -731,6 +731,9 @@ void SyncStackLimit(Isolate* isolate) {
auto continuation = WasmContinuationObject::cast( auto continuation = WasmContinuationObject::cast(
*isolate->roots_table().slot(RootIndex::kActiveContinuation)); *isolate->roots_table().slot(RootIndex::kActiveContinuation));
auto stack = Managed<wasm::StackMemory>::cast(continuation.stack()).get(); auto stack = Managed<wasm::StackMemory>::cast(continuation.stack()).get();
if (FLAG_trace_wasm_stack_switching) {
PrintF("Switch to stack #%d\n", stack->id());
}
uintptr_t limit = reinterpret_cast<uintptr_t>(stack->jmpbuf()->stack_limit); uintptr_t limit = reinterpret_cast<uintptr_t>(stack->jmpbuf()->stack_limit);
isolate->stack_guard()->SetStackLimit(limit); isolate->stack_guard()->SetStackLimit(limit);
} }
......
...@@ -44,7 +44,7 @@ class StackMemory { ...@@ -44,7 +44,7 @@ class StackMemory {
~StackMemory() { ~StackMemory() {
if (FLAG_trace_wasm_stack_switching) { if (FLAG_trace_wasm_stack_switching) {
PrintF("Delete stack (sp: %p)\n", reinterpret_cast<void*>(jmpbuf_.sp)); PrintF("Delete stack #%d\n", id_);
} }
PageAllocator* allocator = GetPlatformPageAllocator(); PageAllocator* allocator = GetPlatformPageAllocator();
if (owned_) allocator->DecommitPages(limit_, size_); if (owned_) allocator->DecommitPages(limit_, size_);
...@@ -59,6 +59,7 @@ class StackMemory { ...@@ -59,6 +59,7 @@ class StackMemory {
void* jslimit() const { return limit_ + kJSLimitOffsetKB; } void* jslimit() const { return limit_ + kJSLimitOffsetKB; }
Address base() const { return reinterpret_cast<Address>(limit_ + size_); } Address base() const { return reinterpret_cast<Address>(limit_ + size_); }
JumpBuffer* jmpbuf() { return &jmpbuf_; } JumpBuffer* jmpbuf() { return &jmpbuf_; }
int id() { return id_; }
// Insert a stack in the linked list after this stack. // Insert a stack in the linked list after this stack.
void Add(StackMemory* stack) { void Add(StackMemory* stack) {
...@@ -82,6 +83,8 @@ class StackMemory { ...@@ -82,6 +83,8 @@ class StackMemory {
// This constructor allocates a new stack segment. // This constructor allocates a new stack segment.
explicit StackMemory(Isolate* isolate) : isolate_(isolate), owned_(true) { explicit StackMemory(Isolate* isolate) : isolate_(isolate), owned_(true) {
static int next_id = 1;
id_ = next_id++;
PageAllocator* allocator = GetPlatformPageAllocator(); PageAllocator* allocator = GetPlatformPageAllocator();
int kJsStackSizeKB = 4; int kJsStackSizeKB = 4;
size_ = (kJsStackSizeKB + kJSLimitOffsetKB) * KB; size_ = (kJsStackSizeKB + kJSLimitOffsetKB) * KB;
...@@ -89,8 +92,9 @@ class StackMemory { ...@@ -89,8 +92,9 @@ class StackMemory {
limit_ = static_cast<byte*>( limit_ = static_cast<byte*>(
allocator->AllocatePages(nullptr, size_, allocator->AllocatePageSize(), allocator->AllocatePages(nullptr, size_, allocator->AllocatePageSize(),
PageAllocator::kReadWrite)); PageAllocator::kReadWrite));
if (FLAG_trace_wasm_stack_switching) if (FLAG_trace_wasm_stack_switching) {
PrintF("Allocate stack (sp: %p, limit: %p)\n", limit_ + size_, limit_); PrintF("Allocate stack #%d\n", id_);
}
} }
// Overload to represent a view of the libc stack. // Overload to represent a view of the libc stack.
...@@ -98,13 +102,16 @@ class StackMemory { ...@@ -98,13 +102,16 @@ class StackMemory {
: isolate_(isolate), : isolate_(isolate),
limit_(limit), limit_(limit),
size_(reinterpret_cast<size_t>(limit)), size_(reinterpret_cast<size_t>(limit)),
owned_(false) {} owned_(false) {
id_ = 0;
}
Isolate* isolate_; Isolate* isolate_;
byte* limit_; byte* limit_;
size_t size_; size_t size_;
bool owned_; bool owned_;
JumpBuffer jmpbuf_; JumpBuffer jmpbuf_;
int id_;
// Stacks form a circular doubly linked list per isolate. // Stacks form a circular doubly linked list per isolate.
StackMemory* next_ = this; StackMemory* next_ = this;
StackMemory* prev_ = this; StackMemory* prev_ = this;
......
...@@ -48,6 +48,49 @@ load("test/mjsunit/wasm/wasm-module-builder.js"); ...@@ -48,6 +48,49 @@ load("test/mjsunit/wasm/wasm-module-builder.js");
let wrapped_export = suspender.returnPromiseOnSuspend(instance.exports.test); let wrapped_export = suspender.returnPromiseOnSuspend(instance.exports.test);
let combined_promise = wrapped_export(); let combined_promise = wrapped_export();
assertEquals(0, instance.exports.g.value); assertEquals(0, instance.exports.g.value);
combined_promise.then(_ => assertEquals(42, instance.exports.g.value));
})();
// Check that we can suspend back out of a resumed computation.
(function TestStackSwitchSuspendLoop() {
print(arguments.callee.name);
let builder = new WasmModuleBuilder();
builder.addGlobal(kWasmI32, true).exportAs('g');
import_index = builder.addImport('m', 'import', kSig_i_v);
// Pseudo-code for the wasm function:
// for (i = 0; i < 5; ++i) {
// g = g + import();
// }
builder.addFunction("test", kSig_v_v)
.addLocals(kWasmI32, 1)
.addBody([
kExprI32Const, 5,
kExprLocalSet, 0,
kExprLoop, kWasmVoid,
kExprCallFunction, import_index, // suspend
kExprGlobalGet, 0, // resume
kExprI32Add,
kExprGlobalSet, 0,
kExprLocalGet, 0,
kExprI32Const, 1,
kExprI32Sub,
kExprLocalTee, 0,
kExprBrIf, 0,
kExprEnd,
]).exportFunc();
let suspender = new WebAssembly.Suspender();
let i = 0;
// The n-th call to the import returns a promise that resolves to n.
function js_import() {
return new Promise((resolve) => { resolve(++i); });
};
let wasm_js_import = new WebAssembly.Function({parameters: [], results: ['i32']}, js_import);
let suspending_wasm_js_import = suspender.suspendOnReturnedPromise(wasm_js_import);
let instance = builder.instantiate({m: {import: suspending_wasm_js_import}});
let wrapped_export = suspender.returnPromiseOnSuspend(instance.exports.test);
let chained_promise = wrapped_export();
assertEquals(0, instance.exports.g.value);
chained_promise.then(_ => assertEquals(15, instance.exports.g.value));
})(); })();
(function TestStackSwitchGC() { (function TestStackSwitchGC() {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment