Commit 0027218e authored by eholk's avatar eholk Committed by Commit bot

[wasm] Trap handling: ProtectedLoad instruction

This CL introduces a ProtectedLoad instruction with is needed for out
of bounds trap handling. ProtectedLoad behaves like a regular load,
but it takes a context and source position parameter as well. These
are used by an out of line code fragment to generate code to throw a
JS exception for an out of bounds memory reference in Wasm.

These changes a cleaned up subset of
https://codereview.chromium.org/2148743004/

The rest of this feature will follow in future CLs. This includes a
table mapping memory instructions to landing pads as well as the
actual signal handler.

BUG= https://bugs.chromium.org/p/v8/issues/detail?id=5277

Review-Url: https://codereview.chromium.org/2301833004
Cr-Commit-Position: refs/heads/master@{#39318}
parent ca7eaa30
...@@ -412,6 +412,10 @@ void InstructionSelector::VisitLoad(Node* node) { ...@@ -412,6 +412,10 @@ void InstructionSelector::VisitLoad(Node* node) {
EmitLoad(this, opcode, &output, base, index); EmitLoad(this, opcode, &output, base, index);
} }
void InstructionSelector::VisitProtectedLoad(Node* node) {
// TODO(eholk)
UNIMPLEMENTED();
}
void InstructionSelector::VisitStore(Node* node) { void InstructionSelector::VisitStore(Node* node) {
ArmOperandGenerator g(this); ArmOperandGenerator g(this);
......
...@@ -593,6 +593,10 @@ void InstructionSelector::VisitLoad(Node* node) { ...@@ -593,6 +593,10 @@ void InstructionSelector::VisitLoad(Node* node) {
EmitLoad(this, node, opcode, immediate_mode, rep); EmitLoad(this, node, opcode, immediate_mode, rep);
} }
void InstructionSelector::VisitProtectedLoad(Node* node) {
// TODO(eholk)
UNIMPLEMENTED();
}
void InstructionSelector::VisitStore(Node* node) { void InstructionSelector::VisitStore(Node* node) {
Arm64OperandGenerator g(this); Arm64OperandGenerator g(this);
......
...@@ -250,6 +250,10 @@ void InstructionSelector::VisitLoad(Node* node) { ...@@ -250,6 +250,10 @@ void InstructionSelector::VisitLoad(Node* node) {
Emit(code, 1, outputs, input_count, inputs); Emit(code, 1, outputs, input_count, inputs);
} }
void InstructionSelector::VisitProtectedLoad(Node* node) {
// TODO(eholk)
UNIMPLEMENTED();
}
void InstructionSelector::VisitStore(Node* node) { void InstructionSelector::VisitStore(Node* node) {
IA32OperandGenerator g(this); IA32OperandGenerator g(this);
......
...@@ -1291,6 +1291,8 @@ void InstructionSelector::VisitNode(Node* node) { ...@@ -1291,6 +1291,8 @@ void InstructionSelector::VisitNode(Node* node) {
} }
case IrOpcode::kAtomicStore: case IrOpcode::kAtomicStore:
return VisitAtomicStore(node); return VisitAtomicStore(node);
case IrOpcode::kProtectedLoad:
return VisitProtectedLoad(node);
case IrOpcode::kUnsafePointerAdd: case IrOpcode::kUnsafePointerAdd:
MarkAsRepresentation(MachineType::PointerRepresentation(), node); MarkAsRepresentation(MachineType::PointerRepresentation(), node);
return VisitUnsafePointerAdd(node); return VisitUnsafePointerAdd(node);
......
...@@ -36,6 +36,7 @@ std::ostream& operator<<(std::ostream& os, StoreRepresentation rep) { ...@@ -36,6 +36,7 @@ std::ostream& operator<<(std::ostream& os, StoreRepresentation rep) {
LoadRepresentation LoadRepresentationOf(Operator const* op) { LoadRepresentation LoadRepresentationOf(Operator const* op) {
DCHECK(IrOpcode::kLoad == op->opcode() || DCHECK(IrOpcode::kLoad == op->opcode() ||
IrOpcode::kProtectedLoad == op->opcode() ||
IrOpcode::kAtomicLoad == op->opcode()); IrOpcode::kAtomicLoad == op->opcode());
return OpParameter<LoadRepresentation>(op); return OpParameter<LoadRepresentation>(op);
} }
...@@ -500,9 +501,18 @@ struct MachineOperatorGlobalCache { ...@@ -500,9 +501,18 @@ struct MachineOperatorGlobalCache {
Operator::kNoDeopt | Operator::kNoThrow | Operator::kNoWrite, \ Operator::kNoDeopt | Operator::kNoThrow | Operator::kNoWrite, \
"CheckedLoad", 3, 1, 1, 1, 1, 0, MachineType::Type()) {} \ "CheckedLoad", 3, 1, 1, 1, 1, 0, MachineType::Type()) {} \
}; \ }; \
struct ProtectedLoad##Type##Operator final \
: public Operator1<ProtectedLoadRepresentation> { \
ProtectedLoad##Type##Operator() \
: Operator1<ProtectedLoadRepresentation>( \
IrOpcode::kProtectedLoad, \
Operator::kNoDeopt | Operator::kNoThrow | Operator::kNoWrite, \
"ProtectedLoad", 4, 1, 1, 1, 1, 0, MachineType::Type()) {} \
}; \
Load##Type##Operator kLoad##Type; \ Load##Type##Operator kLoad##Type; \
UnalignedLoad##Type##Operator kUnalignedLoad##Type; \ UnalignedLoad##Type##Operator kUnalignedLoad##Type; \
CheckedLoad##Type##Operator kCheckedLoad##Type; CheckedLoad##Type##Operator kCheckedLoad##Type; \
ProtectedLoad##Type##Operator kProtectedLoad##Type;
MACHINE_TYPE_LIST(LOAD) MACHINE_TYPE_LIST(LOAD)
#undef LOAD #undef LOAD
...@@ -697,6 +707,17 @@ const Operator* MachineOperatorBuilder::Load(LoadRepresentation rep) { ...@@ -697,6 +707,17 @@ const Operator* MachineOperatorBuilder::Load(LoadRepresentation rep) {
return nullptr; return nullptr;
} }
const Operator* MachineOperatorBuilder::ProtectedLoad(LoadRepresentation rep) {
#define LOAD(Type) \
if (rep == MachineType::Type()) { \
return &cache_.kProtectedLoad##Type; \
}
MACHINE_TYPE_LIST(LOAD)
#undef LOAD
UNREACHABLE();
return nullptr;
}
const Operator* MachineOperatorBuilder::StackSlot(MachineRepresentation rep) { const Operator* MachineOperatorBuilder::StackSlot(MachineRepresentation rep) {
#define STACKSLOT(Type) \ #define STACKSLOT(Type) \
if (rep == MachineType::Type().representation()) { \ if (rep == MachineType::Type().representation()) { \
......
...@@ -41,6 +41,7 @@ class OptionalOperator final { ...@@ -41,6 +41,7 @@ class OptionalOperator final {
// A Load needs a MachineType. // A Load needs a MachineType.
typedef MachineType LoadRepresentation; typedef MachineType LoadRepresentation;
typedef LoadRepresentation ProtectedLoadRepresentation;
LoadRepresentation LoadRepresentationOf(Operator const*); LoadRepresentation LoadRepresentationOf(Operator const*);
...@@ -601,6 +602,7 @@ class MachineOperatorBuilder final : public ZoneObject { ...@@ -601,6 +602,7 @@ class MachineOperatorBuilder final : public ZoneObject {
// load [base + index] // load [base + index]
const Operator* Load(LoadRepresentation rep); const Operator* Load(LoadRepresentation rep);
const Operator* ProtectedLoad(LoadRepresentation rep);
// store [base + index], value // store [base + index], value
const Operator* Store(StoreRepresentation rep); const Operator* Store(StoreRepresentation rep);
......
...@@ -185,6 +185,10 @@ void InstructionSelector::VisitLoad(Node* node) { ...@@ -185,6 +185,10 @@ void InstructionSelector::VisitLoad(Node* node) {
} }
} }
void InstructionSelector::VisitProtectedLoad(Node* node) {
// TODO(eholk)
UNIMPLEMENTED();
}
void InstructionSelector::VisitStore(Node* node) { void InstructionSelector::VisitStore(Node* node) {
MipsOperandGenerator g(this); MipsOperandGenerator g(this);
......
...@@ -199,6 +199,10 @@ void InstructionSelector::VisitLoad(Node* node) { ...@@ -199,6 +199,10 @@ void InstructionSelector::VisitLoad(Node* node) {
EmitLoad(this, node, opcode); EmitLoad(this, node, opcode);
} }
void InstructionSelector::VisitProtectedLoad(Node* node) {
// TODO(eholk)
UNIMPLEMENTED();
}
void InstructionSelector::VisitStore(Node* node) { void InstructionSelector::VisitStore(Node* node) {
Mips64OperandGenerator g(this); Mips64OperandGenerator g(this);
......
...@@ -486,6 +486,7 @@ ...@@ -486,6 +486,7 @@
V(Word32PairShl) \ V(Word32PairShl) \
V(Word32PairShr) \ V(Word32PairShr) \
V(Word32PairSar) \ V(Word32PairSar) \
V(ProtectedLoad) \
V(AtomicLoad) \ V(AtomicLoad) \
V(AtomicStore) \ V(AtomicStore) \
V(UnsafePointerAdd) V(UnsafePointerAdd)
......
...@@ -229,6 +229,10 @@ void InstructionSelector::VisitLoad(Node* node) { ...@@ -229,6 +229,10 @@ void InstructionSelector::VisitLoad(Node* node) {
} }
} }
void InstructionSelector::VisitProtectedLoad(Node* node) {
// TODO(eholk)
UNIMPLEMENTED();
}
void InstructionSelector::VisitStore(Node* node) { void InstructionSelector::VisitStore(Node* node) {
PPCOperandGenerator g(this); PPCOperandGenerator g(this);
......
...@@ -327,6 +327,11 @@ void InstructionSelector::VisitLoad(Node* node) { ...@@ -327,6 +327,11 @@ void InstructionSelector::VisitLoad(Node* node) {
Emit(code, 1, outputs, input_count, inputs); Emit(code, 1, outputs, input_count, inputs);
} }
void InstructionSelector::VisitProtectedLoad(Node* node) {
// TODO(eholk)
UNIMPLEMENTED();
}
void InstructionSelector::VisitStore(Node* node) { void InstructionSelector::VisitStore(Node* node) {
S390OperandGenerator g(this); S390OperandGenerator g(this);
Node* base = node->InputAt(0); Node* base = node->InputAt(0);
......
...@@ -1099,6 +1099,7 @@ void Verifier::Visitor::Check(Node* node) { ...@@ -1099,6 +1099,7 @@ void Verifier::Visitor::Check(Node* node) {
// Machine operators // Machine operators
// ----------------------- // -----------------------
case IrOpcode::kLoad: case IrOpcode::kLoad:
case IrOpcode::kProtectedLoad:
case IrOpcode::kStore: case IrOpcode::kStore:
case IrOpcode::kStackSlot: case IrOpcode::kStackSlot:
case IrOpcode::kWord32And: case IrOpcode::kWord32And:
......
...@@ -2772,15 +2772,26 @@ Node* WasmGraphBuilder::LoadMem(wasm::LocalType type, MachineType memtype, ...@@ -2772,15 +2772,26 @@ Node* WasmGraphBuilder::LoadMem(wasm::LocalType type, MachineType memtype,
Node* load; Node* load;
// WASM semantics throw on OOB. Introduce explicit bounds check. // WASM semantics throw on OOB. Introduce explicit bounds check.
BoundsCheckMem(memtype, index, offset, position); if (!FLAG_wasm_trap_handler) {
BoundsCheckMem(memtype, index, offset, position);
}
bool aligned = static_cast<int>(alignment) >= bool aligned = static_cast<int>(alignment) >=
ElementSizeLog2Of(memtype.representation()); ElementSizeLog2Of(memtype.representation());
if (aligned || if (aligned ||
jsgraph()->machine()->UnalignedLoadSupported(memtype, alignment)) { jsgraph()->machine()->UnalignedLoadSupported(memtype, alignment)) {
load = graph()->NewNode(jsgraph()->machine()->Load(memtype), if (FLAG_wasm_trap_handler) {
MemBuffer(offset), index, *effect_, *control_); Node* context = HeapConstant(module_->instance->context);
Node* position_node = jsgraph()->Int32Constant(position);
load = graph()->NewNode(jsgraph()->machine()->ProtectedLoad(memtype),
MemBuffer(offset), index, context, position_node,
*effect_, *control_);
} else {
load = graph()->NewNode(jsgraph()->machine()->Load(memtype),
MemBuffer(offset), index, *effect_, *control_);
}
} else { } else {
DCHECK(!FLAG_wasm_trap_handler);
load = graph()->NewNode(jsgraph()->machine()->UnalignedLoad(memtype), load = graph()->NewNode(jsgraph()->machine()->UnalignedLoad(memtype),
MemBuffer(offset), index, *effect_, *control_); MemBuffer(offset), index, *effect_, *control_);
} }
......
...@@ -9,6 +9,7 @@ ...@@ -9,6 +9,7 @@
#include "src/compiler/gap-resolver.h" #include "src/compiler/gap-resolver.h"
#include "src/compiler/node-matchers.h" #include "src/compiler/node-matchers.h"
#include "src/compiler/osr.h" #include "src/compiler/osr.h"
#include "src/wasm/wasm-module.h"
#include "src/x64/assembler-x64.h" #include "src/x64/assembler-x64.h"
#include "src/x64/macro-assembler-x64.h" #include "src/x64/macro-assembler-x64.h"
...@@ -260,6 +261,40 @@ class OutOfLineRecordWrite final : public OutOfLineCode { ...@@ -260,6 +261,40 @@ class OutOfLineRecordWrite final : public OutOfLineCode {
RecordWriteMode const mode_; RecordWriteMode const mode_;
}; };
class WasmOutOfLineTrap final : public OutOfLineCode {
public:
WasmOutOfLineTrap(CodeGenerator* gen, Address pc, bool frame_elided,
Register context, int32_t position)
: OutOfLineCode(gen),
pc_(pc),
frame_elided_(frame_elided),
context_(context),
position_(position) {}
void Generate() final {
// TODO(eholk): record pc_ and the current pc in a table so that
// the signal handler can find it.
USE(pc_);
if (frame_elided_) {
__ EnterFrame(StackFrame::WASM);
}
wasm::TrapReason trap_id = wasm::kTrapMemOutOfBounds;
int trap_reason = wasm::WasmOpcodes::TrapReasonToMessageId(trap_id);
__ Push(Smi::FromInt(trap_reason));
__ Push(Smi::FromInt(position_));
__ Move(rsi, context_);
__ CallRuntime(Runtime::kThrowWasmError);
}
private:
Address pc_;
bool frame_elided_;
Register context_;
int32_t position_;
};
} // namespace } // namespace
...@@ -1849,6 +1884,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -1849,6 +1884,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
break; break;
} }
case kX64Movl: case kX64Movl:
case kX64TrapMovl:
if (instr->HasOutput()) { if (instr->HasOutput()) {
if (instr->addressing_mode() == kMode_None) { if (instr->addressing_mode() == kMode_None) {
if (instr->InputAt(0)->IsRegister()) { if (instr->InputAt(0)->IsRegister()) {
...@@ -1857,7 +1893,14 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -1857,7 +1893,14 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
__ movl(i.OutputRegister(), i.InputOperand(0)); __ movl(i.OutputRegister(), i.InputOperand(0));
} }
} else { } else {
Address pc = __ pc();
__ movl(i.OutputRegister(), i.MemoryOperand()); __ movl(i.OutputRegister(), i.MemoryOperand());
if (arch_opcode == kX64TrapMovl) {
bool frame_elided = !frame_access_state()->has_frame();
new (zone()) WasmOutOfLineTrap(this, pc, frame_elided,
i.InputRegister(2), i.InputInt32(3));
}
} }
__ AssertZeroExtended(i.OutputRegister()); __ AssertZeroExtended(i.OutputRegister());
} else { } else {
......
...@@ -128,6 +128,7 @@ namespace compiler { ...@@ -128,6 +128,7 @@ namespace compiler {
V(X64Movzxwq) \ V(X64Movzxwq) \
V(X64Movw) \ V(X64Movw) \
V(X64Movl) \ V(X64Movl) \
V(X64TrapMovl) \
V(X64Movsxlq) \ V(X64Movsxlq) \
V(X64Movq) \ V(X64Movq) \
V(X64Movsd) \ V(X64Movsd) \
......
...@@ -151,6 +151,7 @@ int InstructionScheduler::GetTargetInstructionFlags( ...@@ -151,6 +151,7 @@ int InstructionScheduler::GetTargetInstructionFlags(
return kHasSideEffect; return kHasSideEffect;
case kX64Movl: case kX64Movl:
case kX64TrapMovl:
if (instr->HasOutput()) { if (instr->HasOutput()) {
DCHECK(instr->InputCount() >= 1); DCHECK(instr->InputCount() >= 1);
return instr->InputAt(0)->IsRegister() ? kNoOpcodeFlags return instr->InputAt(0)->IsRegister() ? kNoOpcodeFlags
......
...@@ -155,11 +155,8 @@ class X64OperandGenerator final : public OperandGenerator { ...@@ -155,11 +155,8 @@ class X64OperandGenerator final : public OperandGenerator {
} }
}; };
namespace {
void InstructionSelector::VisitLoad(Node* node) { ArchOpcode GetLoadOpcode(LoadRepresentation load_rep) {
LoadRepresentation load_rep = LoadRepresentationOf(node->op());
X64OperandGenerator g(this);
ArchOpcode opcode = kArchNop; ArchOpcode opcode = kArchNop;
switch (load_rep.representation()) { switch (load_rep.representation()) {
case MachineRepresentation::kFloat32: case MachineRepresentation::kFloat32:
...@@ -187,9 +184,17 @@ void InstructionSelector::VisitLoad(Node* node) { ...@@ -187,9 +184,17 @@ void InstructionSelector::VisitLoad(Node* node) {
case MachineRepresentation::kSimd128: // Fall through. case MachineRepresentation::kSimd128: // Fall through.
case MachineRepresentation::kNone: case MachineRepresentation::kNone:
UNREACHABLE(); UNREACHABLE();
return; break;
} }
return opcode;
}
} // namespace
void InstructionSelector::VisitLoad(Node* node) {
LoadRepresentation load_rep = LoadRepresentationOf(node->op());
X64OperandGenerator g(this);
ArchOpcode opcode = GetLoadOpcode(load_rep);
InstructionOperand outputs[1]; InstructionOperand outputs[1];
outputs[0] = g.DefineAsRegister(node); outputs[0] = g.DefineAsRegister(node);
InstructionOperand inputs[3]; InstructionOperand inputs[3];
...@@ -200,6 +205,24 @@ void InstructionSelector::VisitLoad(Node* node) { ...@@ -200,6 +205,24 @@ void InstructionSelector::VisitLoad(Node* node) {
Emit(code, 1, outputs, input_count, inputs); Emit(code, 1, outputs, input_count, inputs);
} }
void InstructionSelector::VisitProtectedLoad(Node* node) {
LoadRepresentation load_rep = LoadRepresentationOf(node->op());
X64OperandGenerator g(this);
ArchOpcode opcode = GetLoadOpcode(load_rep);
InstructionOperand outputs[1];
outputs[0] = g.DefineAsRegister(node);
InstructionOperand inputs[4];
size_t input_count = 0;
AddressingMode mode =
g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
// Add the context parameter as an input.
inputs[input_count++] = g.UseUniqueRegister(node->InputAt(2));
// Add the source position as an input
inputs[input_count++] = g.UseImmediate(node->InputAt(3));
InstructionCode code = opcode | AddressingModeField::encode(mode);
Emit(code, 1, outputs, input_count, inputs);
}
void InstructionSelector::VisitStore(Node* node) { void InstructionSelector::VisitStore(Node* node) {
X64OperandGenerator g(this); X64OperandGenerator g(this);
......
...@@ -211,6 +211,10 @@ void InstructionSelector::VisitLoad(Node* node) { ...@@ -211,6 +211,10 @@ void InstructionSelector::VisitLoad(Node* node) {
Emit(code, 1, outputs, input_count, inputs); Emit(code, 1, outputs, input_count, inputs);
} }
void InstructionSelector::VisitProtectedLoad(Node* node) {
// TODO(eholk)
UNIMPLEMENTED();
}
void InstructionSelector::VisitStore(Node* node) { void InstructionSelector::VisitStore(Node* node) {
X87OperandGenerator g(this); X87OperandGenerator g(this);
......
...@@ -531,6 +531,10 @@ DEFINE_BOOL(wasm_simd_prototype, false, ...@@ -531,6 +531,10 @@ DEFINE_BOOL(wasm_simd_prototype, false,
DEFINE_BOOL(wasm_eh_prototype, false, DEFINE_BOOL(wasm_eh_prototype, false,
"enable prototype exception handling opcodes for wasm") "enable prototype exception handling opcodes for wasm")
DEFINE_BOOL(wasm_trap_handler, false,
"use signal handlers to catch out of bounds memory access in wasm"
" (currently Linux x86_64 only)")
// Profiler flags. // Profiler flags.
DEFINE_INT(frame_count, 1, "number of stack frames inspected by the profiler") DEFINE_INT(frame_count, 1, "number of stack frames inspected by the profiler")
// 0x1800 fits in the immediate field of an ARM instruction. // 0x1800 fits in the immediate field of an ARM instruction.
......
...@@ -1852,6 +1852,8 @@ class Assembler : public AssemblerBase { ...@@ -1852,6 +1852,8 @@ class Assembler : public AssemblerBase {
byte byte_at(int pos) { return buffer_[pos]; } byte byte_at(int pos) { return buffer_[pos]; }
void set_byte_at(int pos, byte value) { buffer_[pos] = value; } void set_byte_at(int pos, byte value) { buffer_[pos] = value; }
Address pc() const { return pc_; }
protected: protected:
// Call near indirect // Call near indirect
void call(const Operand& operand); void call(const Operand& operand);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment