Commit 8a1e9457 authored by Andreas Haas's avatar Andreas Haas Committed by Commit Bot

[wasm][liftoff] Define safepoints for stack checks

Safepoints encode which slots in a stack frame store references when a
function is called. Safepoints for normal function calls in Liftoff were
already implemented before. With this CL, a safepoint for the runtime
call in a stack check is emitted.

R=thibaudm@chromium.org, clemensb@chromium.org

Bug: v8:7581
Change-Id: Iacb8b15559502adb7622935edb0cfa7ca03d634e
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2563266
Commit-Queue: Andreas Haas <ahaas@chromium.org>
Reviewed-by: 's avatarThibaud Michaud <thibaudm@chromium.org>
Reviewed-by: 's avatarClemens Backes <clemensb@chromium.org>
Cr-Commit-Position: refs/heads/master@{#71494}
parent a1fc79be
......@@ -751,6 +751,18 @@ RUNTIME_FUNCTION(Runtime_SimulateNewspaceFull) {
return ReadOnlyRoots(isolate).undefined_value();
}
RUNTIME_FUNCTION(Runtime_ScheduleGCInStackCheck) {
SealHandleScope shs(isolate);
DCHECK_EQ(0, args.length());
isolate->RequestInterrupt(
[](v8::Isolate* isolate, void*) {
isolate->RequestGarbageCollectionForTesting(
v8::Isolate::kFullGarbageCollection);
},
nullptr);
return ReadOnlyRoots(isolate).undefined_value();
}
static void DebugPrintImpl(MaybeObject maybe_object) {
StdoutStream os;
if (maybe_object->IsCleared()) {
......
......@@ -540,6 +540,7 @@ namespace internal {
F(SetWasmInstantiateControls, 0, 1) \
F(SetWasmThreadsEnabled, 1, 1) \
F(SimulateNewspaceFull, 0, 1) \
F(ScheduleGCInStackCheck, 0, 1) \
F(StringIteratorProtector, 0, 1) \
F(SystemBreak, 0, 1) \
F(TraceEnter, 0, 1) \
......
......@@ -489,21 +489,45 @@ void LiftoffAssembler::CacheState::Split(const CacheState& source) {
*this = source;
}
namespace {
int GetSafepointIndexForStackSlot(const VarState& slot) {
// index = 0 is for the stack slot at 'fp + kFixedFrameSizeAboveFp -
// kSystemPointerSize', the location of the current stack slot is 'fp -
// slot.offset()'. The index we need is therefore '(fp +
// kFixedFrameSizeAboveFp - kSystemPointerSize) - (fp - slot.offset())' =
// 'slot.offset() + kFixedFrameSizeAboveFp - kSystemPointerSize'.
// Concretely, the index of the first stack slot is '4'.
return (slot.offset() + StandardFrameConstants::kFixedFrameSizeAboveFp -
kSystemPointerSize) /
kSystemPointerSize;
}
} // namespace
void LiftoffAssembler::CacheState::GetTaggedSlotsForOOLCode(
ZoneVector<int>* slots, LiftoffRegList* spills,
SpillLocation spill_location) {
for (const auto& slot : stack_state) {
if (!slot.type().is_reference_type()) continue;
if (spill_location == SpillLocation::kTopOfStack && slot.is_reg()) {
// Registers get spilled just before the call to the runtime. In {spills}
// we store which of the spilled registers contain references, so that we
// can add the spill slots to the safepoint.
spills->set(slot.reg());
continue;
}
DCHECK_IMPLIES(slot.is_reg(), spill_location == SpillLocation::kStackSlots);
slots->push_back(GetSafepointIndexForStackSlot(slot));
}
}
void LiftoffAssembler::CacheState::DefineSafepoint(Safepoint& safepoint) {
for (auto slot : stack_state) {
for (const auto& slot : stack_state) {
DCHECK(!slot.is_reg());
if (slot.type().is_reference_type()) {
// index = 0 is for the stack slot at 'fp + kFixedFrameSizeAboveFp -
// kSystemPointerSize', the location of the current stack slot is 'fp -
// slot.offset()'. The index we need is therefore '(fp +
// kFixedFrameSizeAboveFp - kSystemPointerSize) - (fp - slot.offset())' =
// 'slot.offset() + kFixedFrameSizeAboveFp - kSystemPointerSize'.
auto index =
(slot.offset() + StandardFrameConstants::kFixedFrameSizeAboveFp -
kSystemPointerSize) /
kSystemPointerSize;
safepoint.DefinePointerSlot(index);
safepoint.DefinePointerSlot(GetSafepointIndexForStackSlot(slot));
}
}
}
......@@ -514,7 +538,8 @@ int LiftoffAssembler::GetTotalFrameSlotCountForGC() const {
// that the offset of the first spill slot is kSystemPointerSize and not
// '0'. Therefore we don't have to add '+1' here.
return (max_used_spill_offset_ +
StandardFrameConstants::kFixedFrameSizeAboveFp) /
StandardFrameConstants::kFixedFrameSizeAboveFp +
ool_spill_space_size_) /
kSystemPointerSize;
}
......
......@@ -173,6 +173,18 @@ class LiftoffAssembler : public TurboAssembler {
// Disallow copy construction.
CacheState(const CacheState&) = delete;
enum class SpillLocation { kTopOfStack, kStackSlots };
// Generates two lists of locations that contain references. {slots}
// contains the indices of slots on the value stack that contain references.
// {spills} contains all registers that contain references. The
// {spill_location} defines where register values will be spilled for a
// function call within the out-of-line code. {kStackSlots} means that the
// values in the registers will be written back to their stack slots.
// {kTopOfStack} means that the registers will be spilled on the stack with
// a {push} instruction.
void GetTaggedSlotsForOOLCode(/*out*/ ZoneVector<int>* slots,
/*out*/ LiftoffRegList* spills,
SpillLocation spill_location);
void DefineSafepoint(Safepoint& safepoint);
base::SmallVector<VarState, 8> stack_state;
......@@ -452,6 +464,10 @@ class LiftoffAssembler : public TurboAssembler {
if (offset >= max_used_spill_offset_) max_used_spill_offset_ = offset;
}
void RecordOolSpillSpaceSize(int size) {
if (size > ool_spill_space_size_) ool_spill_space_size_ = size;
}
// Load parameters into the right registers / stack slots for the call.
void PrepareBuiltinCall(const FunctionSig* sig,
compiler::CallDescriptor* call_descriptor,
......@@ -1225,7 +1241,10 @@ class LiftoffAssembler : public TurboAssembler {
static_assert(sizeof(ValueType) == 4,
"Reconsider this inlining if ValueType gets bigger");
CacheState cache_state_;
// The maximum spill offset for slots in the value stack.
int max_used_spill_offset_ = StaticStackFrameSize();
// The amount of memory needed for register spills in OOL code.
int ool_spill_space_size_ = 0;
LiftoffBailoutReason bailout_reason_ = kSuccess;
const char* bailout_detail_ = nullptr;
......
......@@ -301,38 +301,60 @@ class LiftoffCompiler {
explicit SpilledRegistersForInspection(Zone* zone) : entries(zone) {}
};
struct OutOfLineSafepointInfo {
ZoneVector<int> slots;
LiftoffRegList spills;
explicit OutOfLineSafepointInfo(Zone* zone) : slots(zone) {}
};
struct OutOfLineCode {
MovableLabel label;
MovableLabel continuation;
WasmCode::RuntimeStubId stub;
WasmCodePosition position;
LiftoffRegList regs_to_save;
OutOfLineSafepointInfo* safepoint_info;
uint32_t pc; // for trap handler.
// These two pointers will only be used for debug code:
DebugSideTableBuilder::EntryBuilder* debug_sidetable_entry_builder;
SpilledRegistersForInspection* spilled_registers;
DebugSideTableBuilder::EntryBuilder* debug_sidetable_entry_builder;
// Named constructors:
static OutOfLineCode Trap(
WasmCode::RuntimeStubId s, WasmCodePosition pos, uint32_t pc,
DebugSideTableBuilder::EntryBuilder* debug_sidetable_entry_builder,
SpilledRegistersForInspection* spilled_registers) {
WasmCode::RuntimeStubId s, WasmCodePosition pos,
SpilledRegistersForInspection* spilled_registers,
OutOfLineSafepointInfo* safepoint_info, uint32_t pc,
DebugSideTableBuilder::EntryBuilder* debug_sidetable_entry_builder) {
DCHECK_LT(0, pos);
return {{},
{},
s,
pos,
{},
pc,
debug_sidetable_entry_builder,
spilled_registers};
return {
{}, // label
{}, // continuation
s, // stub
pos, // position
{}, // regs_to_save
safepoint_info, // safepoint_info
pc, // pc
spilled_registers, // spilled_registers
debug_sidetable_entry_builder // debug_side_table_entry_builder
};
}
static OutOfLineCode StackCheck(
WasmCodePosition pos, LiftoffRegList regs_to_save,
SpilledRegistersForInspection* spilled_regs,
OutOfLineSafepointInfo* safepoint_info,
DebugSideTableBuilder::EntryBuilder* debug_sidetable_entry_builder) {
return {{}, {}, WasmCode::kWasmStackGuard, pos,
regs_to_save, 0, debug_sidetable_entry_builder, spilled_regs};
return {
{}, // label
{}, // continuation
WasmCode::kWasmStackGuard, // stub
pos, // position
regs_to_save, // regs_to_save
safepoint_info, // safepoint_info
0, // pc
spilled_regs, // spilled_registers
debug_sidetable_entry_builder // debug_side_table_entry_builder
};
}
};
......@@ -437,10 +459,6 @@ class LiftoffCompiler {
return false;
}
int GetSafepointTableOffset() const {
return safepoint_table_builder_.GetCodeOffset();
}
void UnuseLabels(FullDecoder* decoder) {
#ifdef DEBUG
auto Unuse = [](Label* label) {
......@@ -510,12 +528,20 @@ class LiftoffCompiler {
LiftoffRegList regs_to_save = __ cache_state()->used_registers;
SpilledRegistersForInspection* spilled_regs = nullptr;
Register limit_address = __ GetUnusedRegister(kGpReg, {}).gp();
OutOfLineSafepointInfo* safepoint_info =
compilation_zone_->New<OutOfLineSafepointInfo>(compilation_zone_);
__ cache_state()->GetTaggedSlotsForOOLCode(
&safepoint_info->slots, &safepoint_info->spills,
for_debugging_
? LiftoffAssembler::CacheState::SpillLocation::kStackSlots
: LiftoffAssembler::CacheState::SpillLocation::kTopOfStack);
if (V8_UNLIKELY(for_debugging_)) {
regs_to_save = {};
spilled_regs = GetSpilledRegistersForInspection();
}
out_of_line_code_.push_back(OutOfLineCode::StackCheck(
position, regs_to_save, spilled_regs,
position, regs_to_save, spilled_regs, safepoint_info,
RegisterDebugSideTableEntry(DebugSideTableBuilder::kAssumeSpilling)));
OutOfLineCode& ool = out_of_line_code_.back();
LOAD_INSTANCE_FIELD(limit_address, StackLimitAddress, kSystemPointerSize);
......@@ -734,8 +760,38 @@ class LiftoffCompiler {
source_position_table_builder_.AddPosition(
__ pc_offset(), SourcePosition(ool->position), true);
__ CallRuntimeStub(ool->stub);
// TODO(ahaas): Define a proper safepoint here.
safepoint_table_builder_.DefineSafepoint(&asm_, Safepoint::kNoLazyDeopt);
Safepoint safepoint = safepoint_table_builder_.DefineSafepoint(
&asm_, Safepoint::kNoLazyDeopt);
if (ool->safepoint_info) {
for (auto index : ool->safepoint_info->slots) {
safepoint.DefinePointerSlot(index);
}
int total_frame_size = __ GetTotalFrameSize();
LiftoffRegList gp_regs = ool->regs_to_save & kGpCacheRegList;
// {total_frame_size} is the highest offset from the FP that is used to
// store a value. The offset of the first spill slot should therefore be
// {(total_frame_size / kSystemPointerSize) + 1}. However, spill slots
// don't start at offset '0' but at offset '-1' (or
// {-kSystemPointerSize}). Therefore we have to add another '+ 1' to the
// index of the first spill slot.
int index = (total_frame_size / kSystemPointerSize) + 2;
// The size of the stack frame in addition to {total_frame_size} that may
// contain references.
int spill_space_size = 0;
while (!gp_regs.is_empty()) {
LiftoffRegister reg = gp_regs.GetFirstRegSet();
if (ool->safepoint_info->spills.has(reg)) {
safepoint.DefinePointerSlot(index);
}
gp_regs.clear(reg);
++index;
spill_space_size += kSystemPointerSize;
}
__ RecordOolSpillSpaceSize(spill_space_size);
}
DCHECK_EQ(!debug_sidetable_builder_, !ool->debug_sidetable_entry_builder);
if (V8_UNLIKELY(ool->debug_sidetable_entry_builder)) {
ool->debug_sidetable_entry_builder->set_pc_offset(__ pc_offset());
......@@ -2075,12 +2131,24 @@ class LiftoffCompiler {
Label* AddOutOfLineTrap(WasmCodePosition position,
WasmCode::RuntimeStubId stub, uint32_t pc = 0) {
DCHECK(FLAG_wasm_bounds_checks);
OutOfLineSafepointInfo* safepoint_info = nullptr;
if (V8_UNLIKELY(for_debugging_)) {
// Execution does not return after a trap. Therefore we don't have to
// define a safepoint for traps that would preserve references on the
// stack. However, if this is debug code, then we have to preserve the
// references so that they can be inspected.
safepoint_info =
compilation_zone_->New<OutOfLineSafepointInfo>(compilation_zone_);
__ cache_state()->GetTaggedSlotsForOOLCode(
&safepoint_info->slots, &safepoint_info->spills,
LiftoffAssembler::CacheState::SpillLocation::kStackSlots);
}
out_of_line_code_.push_back(OutOfLineCode::Trap(
stub, position, pc,
RegisterDebugSideTableEntry(DebugSideTableBuilder::kAssumeSpilling),
stub, position,
V8_UNLIKELY(for_debugging_) ? GetSpilledRegistersForInspection()
: nullptr));
: nullptr,
safepoint_info, pc,
RegisterDebugSideTableEntry(DebugSideTableBuilder::kAssumeSpilling)));
return out_of_line_code_.back().label.get();
}
......
......@@ -42,8 +42,8 @@
# Open bugs.
# https://crbug.com/v8/10929
'wasm/externref-liftoff' : [SKIP],
'wasm/externref-globals-liftoff' : [SKIP],
'wasm/externref-liftoff' : [PASS, ['arch == arm64', SKIP]],
'wasm/externref-globals-liftoff' : [PASS, ['arch == arm64', SKIP]],
# BUG(v8:2989).
'regress/regress-2989': [FAIL, NO_VARIANTS, ['lite_mode == True', SKIP]],
......
......@@ -4,5 +4,6 @@
// Flags: --expose-wasm --experimental-wasm-reftypes --expose-gc --liftoff
// Flags: --no-wasm-tier-up --experimental-liftoff-extern-ref
// Flags: --allow-natives-syntax
load("test/mjsunit/wasm/externref.js");
......@@ -3,6 +3,7 @@
// found in the LICENSE file.
// Flags: --expose-wasm --experimental-wasm-reftypes --expose-gc
// Flags: --allow-natives-syntax
load("test/mjsunit/wasm/wasm-module-builder.js");
......@@ -13,7 +14,6 @@ load("test/mjsunit/wasm/wasm-module-builder.js");
.addBody([kExprLocalGet, 0])
.exportFunc();
const instance = builder.instantiate();
let obj = {'hello' : 'world'};
......@@ -240,3 +240,33 @@ load("test/mjsunit/wasm/wasm-module-builder.js");
const main = builder.instantiate().exports.main;
assertEquals(null, main());
})();
(function testGCInStackCheck() {
print(arguments.callee.name);
const builder = new WasmModuleBuilder();
const gc_sig = builder.addType(kSig_v_v);
const func_sig = builder.addType(kSig_v_r);
const triggerGC_index = builder.addImport('q', 'triggerGC', gc_sig);
const func_index = builder.addImport('q', 'func', func_sig);
const foo = builder.addFunction('foo', kSig_v_r).addBody([
kExprLocalGet, 0, kExprCallFunction, func_index
]);
builder.addFunction('main', kSig_v_r)
.addBody([
kExprCallFunction, triggerGC_index, kExprLocalGet, 0, kExprCallFunction,
foo.index
])
.exportFunc();
const instance = builder.instantiate({
q: {
triggerGC: () => %ScheduleGCInStackCheck(),
func: (ref) => assertEquals(ref.hello, 4)
}
});
instance.exports.main({hello: 4});
})();
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment