Commit ab7bd9f4 authored by Andreas Haas's avatar Andreas Haas Committed by Commit Bot

[wasm] Reduce size of the trap handler ool code

With this CL we use the same optimizations for the trap handler ool code
which we already use for trap-if.

* Call a builtin instead of calling the runtime directly.
* Use one call per ool code instead of a source position parameter.
* Do not pass the trap reason as parameter.

R=titzer@chromium.org, eholk@chromium.org

Change-Id: Ieef6da96f340269c3e91efd21ac24e61a42193f4
Reviewed-on: https://chromium-review.googlesource.com/684436Reviewed-by: 's avatarBen Titzer <titzer@chromium.org>
Commit-Queue: Andreas Haas <ahaas@chromium.org>
Cr-Commit-Position: refs/heads/master@{#48168}
parent e6d84f4e
...@@ -862,7 +862,9 @@ bool InstructionSelector::IsSourcePositionUsed(Node* node) { ...@@ -862,7 +862,9 @@ bool InstructionSelector::IsSourcePositionUsed(Node* node) {
node->opcode() == IrOpcode::kCall || node->opcode() == IrOpcode::kCall ||
node->opcode() == IrOpcode::kCallWithCallerSavedRegisters || node->opcode() == IrOpcode::kCallWithCallerSavedRegisters ||
node->opcode() == IrOpcode::kTrapIf || node->opcode() == IrOpcode::kTrapIf ||
node->opcode() == IrOpcode::kTrapUnless); node->opcode() == IrOpcode::kTrapUnless ||
node->opcode() == IrOpcode::kProtectedLoad ||
node->opcode() == IrOpcode::kProtectedStore);
} }
void InstructionSelector::VisitBlock(BasicBlock* block) { void InstructionSelector::VisitBlock(BasicBlock* block) {
......
...@@ -487,7 +487,7 @@ struct MachineOperatorGlobalCache { ...@@ -487,7 +487,7 @@ struct MachineOperatorGlobalCache {
ProtectedLoad##Type##Operator() \ ProtectedLoad##Type##Operator() \
: Operator1<LoadRepresentation>( \ : Operator1<LoadRepresentation>( \
IrOpcode::kProtectedLoad, \ IrOpcode::kProtectedLoad, \
Operator::kNoDeopt | Operator::kNoThrow, "ProtectedLoad", 3, 1, \ Operator::kNoDeopt | Operator::kNoThrow, "ProtectedLoad", 2, 1, \
1, 1, 1, 0, MachineType::Type()) {} \ 1, 1, 1, 0, MachineType::Type()) {} \
}; \ }; \
Load##Type##Operator kLoad##Type; \ Load##Type##Operator kLoad##Type; \
...@@ -562,7 +562,7 @@ struct MachineOperatorGlobalCache { ...@@ -562,7 +562,7 @@ struct MachineOperatorGlobalCache {
: Operator1<StoreRepresentation>( \ : Operator1<StoreRepresentation>( \
IrOpcode::kProtectedStore, \ IrOpcode::kProtectedStore, \
Operator::kNoDeopt | Operator::kNoRead | Operator::kNoThrow, \ Operator::kNoDeopt | Operator::kNoRead | Operator::kNoThrow, \
"Store", 4, 1, 1, 0, 1, 0, \ "Store", 3, 1, 1, 0, 1, 0, \
StoreRepresentation(MachineRepresentation::Type, \ StoreRepresentation(MachineRepresentation::Type, \
kNoWriteBarrier)) {} \ kNoWriteBarrier)) {} \
}; \ }; \
......
...@@ -3401,10 +3401,9 @@ Node* WasmGraphBuilder::LoadMem(wasm::ValueType type, MachineType memtype, ...@@ -3401,10 +3401,9 @@ Node* WasmGraphBuilder::LoadMem(wasm::ValueType type, MachineType memtype,
jsgraph()->machine()->UnalignedLoadSupported(memtype.representation())) { jsgraph()->machine()->UnalignedLoadSupported(memtype.representation())) {
if (FLAG_wasm_trap_handler && V8_TRAP_HANDLER_SUPPORTED) { if (FLAG_wasm_trap_handler && V8_TRAP_HANDLER_SUPPORTED) {
DCHECK(FLAG_wasm_guard_pages); DCHECK(FLAG_wasm_guard_pages);
Node* position_node = jsgraph()->Int32Constant(position);
load = graph()->NewNode(jsgraph()->machine()->ProtectedLoad(memtype), load = graph()->NewNode(jsgraph()->machine()->ProtectedLoad(memtype),
MemBuffer(offset), index, position_node, *effect_, MemBuffer(offset), index, *effect_, *control_);
*control_); SetSourcePosition(load, position);
} else { } else {
load = graph()->NewNode(jsgraph()->machine()->Load(memtype), load = graph()->NewNode(jsgraph()->machine()->Load(memtype),
MemBuffer(offset), index, *effect_, *control_); MemBuffer(offset), index, *effect_, *control_);
...@@ -3456,10 +3455,10 @@ Node* WasmGraphBuilder::StoreMem(MachineType memtype, Node* index, ...@@ -3456,10 +3455,10 @@ Node* WasmGraphBuilder::StoreMem(MachineType memtype, Node* index,
if (memtype.representation() == MachineRepresentation::kWord8 || if (memtype.representation() == MachineRepresentation::kWord8 ||
jsgraph()->machine()->UnalignedStoreSupported(memtype.representation())) { jsgraph()->machine()->UnalignedStoreSupported(memtype.representation())) {
if (FLAG_wasm_trap_handler && V8_TRAP_HANDLER_SUPPORTED) { if (FLAG_wasm_trap_handler && V8_TRAP_HANDLER_SUPPORTED) {
Node* position_node = jsgraph()->Int32Constant(position);
store = graph()->NewNode( store = graph()->NewNode(
jsgraph()->machine()->ProtectedStore(memtype.representation()), jsgraph()->machine()->ProtectedStore(memtype.representation()),
MemBuffer(offset), index, val, position_node, *effect_, *control_); MemBuffer(offset), index, val, *effect_, *control_);
SetSourcePosition(store, position);
} else { } else {
StoreRepresentation rep(memtype.representation(), kNoWriteBarrier); StoreRepresentation rep(memtype.representation(), kNoWriteBarrier);
store = store =
......
...@@ -279,12 +279,12 @@ class OutOfLineRecordWrite final : public OutOfLineCode { ...@@ -279,12 +279,12 @@ class OutOfLineRecordWrite final : public OutOfLineCode {
class WasmOutOfLineTrap final : public OutOfLineCode { class WasmOutOfLineTrap final : public OutOfLineCode {
public: public:
WasmOutOfLineTrap(CodeGenerator* gen, int pc, bool frame_elided, WasmOutOfLineTrap(CodeGenerator* gen, int pc, bool frame_elided,
int32_t position) Instruction* instr)
: OutOfLineCode(gen), : OutOfLineCode(gen),
gen_(gen), gen_(gen),
pc_(pc), pc_(pc),
frame_elided_(frame_elided), frame_elided_(frame_elided),
position_(position) {} instr_(instr) {}
// TODO(eholk): Refactor this method to take the code generator as a // TODO(eholk): Refactor this method to take the code generator as a
// parameter. // parameter.
...@@ -295,38 +295,31 @@ class WasmOutOfLineTrap final : public OutOfLineCode { ...@@ -295,38 +295,31 @@ class WasmOutOfLineTrap final : public OutOfLineCode {
__ EnterFrame(StackFrame::WASM_COMPILED); __ EnterFrame(StackFrame::WASM_COMPILED);
} }
wasm::TrapReason trap_id = wasm::kTrapMemOutOfBounds; gen_->AssembleSourcePosition(instr_);
int trap_reason = wasm::WasmOpcodes::TrapReasonToMessageId(trap_id); __ Call(__ isolate()->builtins()->builtin_handle(
__ Push(Smi::FromInt(trap_reason)); Builtins::kThrowWasmTrapMemOutOfBounds),
// TODO(eholk): use AssembleSourcePosition instead of passing in position_ RelocInfo::CODE_TARGET);
// as a parameter. See AssembleArchTrap as an example. Consider sharing code ReferenceMap* reference_map = new (gen_->zone()) ReferenceMap(gen_->zone());
// with AssembleArchTrap.
__ Push(Smi::FromInt(position_));
__ Move(rsi, Smi::kZero);
__ CallRuntimeDelayed(gen_->zone(), Runtime::kThrowWasmError);
ReferenceMap* reference_map =
new (gen_->code()->zone()) ReferenceMap(gen_->code()->zone());
gen_->RecordSafepoint(reference_map, Safepoint::kSimple, 0, gen_->RecordSafepoint(reference_map, Safepoint::kSimple, 0,
Safepoint::kNoLazyDeopt); Safepoint::kNoLazyDeopt);
__ AssertUnreachable(kUnexpectedReturnFromWasmTrap);
} }
private: private:
CodeGenerator* gen_; CodeGenerator* gen_;
int pc_; int pc_;
bool frame_elided_; bool frame_elided_;
int32_t position_; Instruction* instr_;
}; };
void EmitOOLTrapIfNeeded(Zone* zone, CodeGenerator* codegen, void EmitOOLTrapIfNeeded(Zone* zone, CodeGenerator* codegen,
InstructionCode opcode, size_t input_count, InstructionCode opcode, Instruction* instr,
X64OperandConverter& i, int pc) { X64OperandConverter& i, int pc) {
const X64MemoryProtection protection = const X64MemoryProtection protection =
static_cast<X64MemoryProtection>(MiscField::decode(opcode)); static_cast<X64MemoryProtection>(MiscField::decode(opcode));
if (protection == X64MemoryProtection::kProtected) { if (protection == X64MemoryProtection::kProtected) {
const bool frame_elided = !codegen->frame_access_state()->has_frame(); const bool frame_elided = !codegen->frame_access_state()->has_frame();
const int32_t position = i.InputInt32(input_count - 1); new (zone) WasmOutOfLineTrap(codegen, pc, frame_elided, instr);
new (zone) WasmOutOfLineTrap(codegen, pc, frame_elided, position);
} }
} }
} // namespace } // namespace
...@@ -1936,31 +1929,26 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -1936,31 +1929,26 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
__ Subsd(i.InputDoubleRegister(0), kScratchDoubleReg); __ Subsd(i.InputDoubleRegister(0), kScratchDoubleReg);
break; break;
case kX64Movsxbl: case kX64Movsxbl:
EmitOOLTrapIfNeeded(zone(), this, opcode, instr->InputCount(), i, EmitOOLTrapIfNeeded(zone(), this, opcode, instr, i, __ pc_offset());
__ pc_offset());
ASSEMBLE_MOVX(movsxbl); ASSEMBLE_MOVX(movsxbl);
__ AssertZeroExtended(i.OutputRegister()); __ AssertZeroExtended(i.OutputRegister());
break; break;
case kX64Movzxbl: case kX64Movzxbl:
EmitOOLTrapIfNeeded(zone(), this, opcode, instr->InputCount(), i, EmitOOLTrapIfNeeded(zone(), this, opcode, instr, i, __ pc_offset());
__ pc_offset());
ASSEMBLE_MOVX(movzxbl); ASSEMBLE_MOVX(movzxbl);
__ AssertZeroExtended(i.OutputRegister()); __ AssertZeroExtended(i.OutputRegister());
break; break;
case kX64Movsxbq: case kX64Movsxbq:
EmitOOLTrapIfNeeded(zone(), this, opcode, instr->InputCount(), i, EmitOOLTrapIfNeeded(zone(), this, opcode, instr, i, __ pc_offset());
__ pc_offset());
ASSEMBLE_MOVX(movsxbq); ASSEMBLE_MOVX(movsxbq);
break; break;
case kX64Movzxbq: case kX64Movzxbq:
EmitOOLTrapIfNeeded(zone(), this, opcode, instr->InputCount(), i, EmitOOLTrapIfNeeded(zone(), this, opcode, instr, i, __ pc_offset());
__ pc_offset());
ASSEMBLE_MOVX(movzxbq); ASSEMBLE_MOVX(movzxbq);
__ AssertZeroExtended(i.OutputRegister()); __ AssertZeroExtended(i.OutputRegister());
break; break;
case kX64Movb: { case kX64Movb: {
EmitOOLTrapIfNeeded(zone(), this, opcode, instr->InputCount(), i, EmitOOLTrapIfNeeded(zone(), this, opcode, instr, i, __ pc_offset());
__ pc_offset());
size_t index = 0; size_t index = 0;
Operand operand = i.MemoryOperand(&index); Operand operand = i.MemoryOperand(&index);
if (HasImmediateInput(instr, index)) { if (HasImmediateInput(instr, index)) {
...@@ -1971,31 +1959,26 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -1971,31 +1959,26 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
break; break;
} }
case kX64Movsxwl: case kX64Movsxwl:
EmitOOLTrapIfNeeded(zone(), this, opcode, instr->InputCount(), i, EmitOOLTrapIfNeeded(zone(), this, opcode, instr, i, __ pc_offset());
__ pc_offset());
ASSEMBLE_MOVX(movsxwl); ASSEMBLE_MOVX(movsxwl);
__ AssertZeroExtended(i.OutputRegister()); __ AssertZeroExtended(i.OutputRegister());
break; break;
case kX64Movzxwl: case kX64Movzxwl:
EmitOOLTrapIfNeeded(zone(), this, opcode, instr->InputCount(), i, EmitOOLTrapIfNeeded(zone(), this, opcode, instr, i, __ pc_offset());
__ pc_offset());
ASSEMBLE_MOVX(movzxwl); ASSEMBLE_MOVX(movzxwl);
__ AssertZeroExtended(i.OutputRegister()); __ AssertZeroExtended(i.OutputRegister());
break; break;
case kX64Movsxwq: case kX64Movsxwq:
EmitOOLTrapIfNeeded(zone(), this, opcode, instr->InputCount(), i, EmitOOLTrapIfNeeded(zone(), this, opcode, instr, i, __ pc_offset());
__ pc_offset());
ASSEMBLE_MOVX(movsxwq); ASSEMBLE_MOVX(movsxwq);
break; break;
case kX64Movzxwq: case kX64Movzxwq:
EmitOOLTrapIfNeeded(zone(), this, opcode, instr->InputCount(), i, EmitOOLTrapIfNeeded(zone(), this, opcode, instr, i, __ pc_offset());
__ pc_offset());
ASSEMBLE_MOVX(movzxwq); ASSEMBLE_MOVX(movzxwq);
__ AssertZeroExtended(i.OutputRegister()); __ AssertZeroExtended(i.OutputRegister());
break; break;
case kX64Movw: { case kX64Movw: {
EmitOOLTrapIfNeeded(zone(), this, opcode, instr->InputCount(), i, EmitOOLTrapIfNeeded(zone(), this, opcode, instr, i, __ pc_offset());
__ pc_offset());
size_t index = 0; size_t index = 0;
Operand operand = i.MemoryOperand(&index); Operand operand = i.MemoryOperand(&index);
if (HasImmediateInput(instr, index)) { if (HasImmediateInput(instr, index)) {
...@@ -2006,8 +1989,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -2006,8 +1989,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
break; break;
} }
case kX64Movl: case kX64Movl:
EmitOOLTrapIfNeeded(zone(), this, opcode, instr->InputCount(), i, EmitOOLTrapIfNeeded(zone(), this, opcode, instr, i, __ pc_offset());
__ pc_offset());
if (instr->HasOutput()) { if (instr->HasOutput()) {
if (instr->addressing_mode() == kMode_None) { if (instr->addressing_mode() == kMode_None) {
if (instr->InputAt(0)->IsRegister()) { if (instr->InputAt(0)->IsRegister()) {
...@@ -2030,13 +2012,11 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -2030,13 +2012,11 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
} }
break; break;
case kX64Movsxlq: case kX64Movsxlq:
EmitOOLTrapIfNeeded(zone(), this, opcode, instr->InputCount(), i, EmitOOLTrapIfNeeded(zone(), this, opcode, instr, i, __ pc_offset());
__ pc_offset());
ASSEMBLE_MOVX(movsxlq); ASSEMBLE_MOVX(movsxlq);
break; break;
case kX64Movq: case kX64Movq:
EmitOOLTrapIfNeeded(zone(), this, opcode, instr->InputCount(), i, EmitOOLTrapIfNeeded(zone(), this, opcode, instr, i, __ pc_offset());
__ pc_offset());
if (instr->HasOutput()) { if (instr->HasOutput()) {
__ movq(i.OutputRegister(), i.MemoryOperand()); __ movq(i.OutputRegister(), i.MemoryOperand());
} else { } else {
...@@ -2050,8 +2030,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -2050,8 +2030,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
} }
break; break;
case kX64Movss: case kX64Movss:
EmitOOLTrapIfNeeded(zone(), this, opcode, instr->InputCount(), i, EmitOOLTrapIfNeeded(zone(), this, opcode, instr, i, __ pc_offset());
__ pc_offset());
if (instr->HasOutput()) { if (instr->HasOutput()) {
__ movss(i.OutputDoubleRegister(), i.MemoryOperand()); __ movss(i.OutputDoubleRegister(), i.MemoryOperand());
} else { } else {
...@@ -2061,8 +2040,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -2061,8 +2040,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
} }
break; break;
case kX64Movsd: case kX64Movsd:
EmitOOLTrapIfNeeded(zone(), this, opcode, instr->InputCount(), i, EmitOOLTrapIfNeeded(zone(), this, opcode, instr, i, __ pc_offset());
__ pc_offset());
if (instr->HasOutput()) { if (instr->HasOutput()) {
__ Movsd(i.OutputDoubleRegister(), i.MemoryOperand()); __ Movsd(i.OutputDoubleRegister(), i.MemoryOperand());
} else { } else {
...@@ -2073,8 +2051,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction( ...@@ -2073,8 +2051,7 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
break; break;
case kX64Movdqu: { case kX64Movdqu: {
CpuFeatureScope sse_scope(tasm(), SSSE3); CpuFeatureScope sse_scope(tasm(), SSSE3);
EmitOOLTrapIfNeeded(zone(), this, opcode, instr->InputCount(), i, EmitOOLTrapIfNeeded(zone(), this, opcode, instr, i, __ pc_offset());
__ pc_offset());
if (instr->HasOutput()) { if (instr->HasOutput()) {
__ movdqu(i.OutputSimd128Register(), i.MemoryOperand()); __ movdqu(i.OutputSimd128Register(), i.MemoryOperand());
} else { } else {
......
...@@ -296,15 +296,13 @@ void InstructionSelector::VisitLoad(Node* node) { ...@@ -296,15 +296,13 @@ void InstructionSelector::VisitLoad(Node* node) {
ArchOpcode opcode = GetLoadOpcode(load_rep); ArchOpcode opcode = GetLoadOpcode(load_rep);
InstructionOperand outputs[1]; InstructionOperand outputs[1];
outputs[0] = g.DefineAsRegister(node); outputs[0] = g.DefineAsRegister(node);
InstructionOperand inputs[4]; InstructionOperand inputs[3];
size_t input_count = 0; size_t input_count = 0;
AddressingMode mode = AddressingMode mode =
g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count); g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
InstructionCode code = opcode | AddressingModeField::encode(mode); InstructionCode code = opcode | AddressingModeField::encode(mode);
if (node->opcode() == IrOpcode::kProtectedLoad) { if (node->opcode() == IrOpcode::kProtectedLoad) {
code |= MiscField::encode(X64MemoryProtection::kProtected); code |= MiscField::encode(X64MemoryProtection::kProtected);
// Add the source position as an input
inputs[input_count++] = g.UseImmediate(node->InputAt(2));
} }
Emit(code, 1, outputs, input_count, inputs); Emit(code, 1, outputs, input_count, inputs);
} }
...@@ -379,12 +377,11 @@ void InstructionSelector::VisitStore(Node* node) { ...@@ -379,12 +377,11 @@ void InstructionSelector::VisitStore(Node* node) {
void InstructionSelector::VisitProtectedStore(Node* node) { void InstructionSelector::VisitProtectedStore(Node* node) {
X64OperandGenerator g(this); X64OperandGenerator g(this);
Node* value = node->InputAt(2); Node* value = node->InputAt(2);
Node* position = node->InputAt(3);
StoreRepresentation store_rep = StoreRepresentationOf(node->op()); StoreRepresentation store_rep = StoreRepresentationOf(node->op());
ArchOpcode opcode = GetStoreOpcode(store_rep); ArchOpcode opcode = GetStoreOpcode(store_rep);
InstructionOperand inputs[5]; InstructionOperand inputs[4];
size_t input_count = 0; size_t input_count = 0;
AddressingMode addressing_mode = AddressingMode addressing_mode =
g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count); g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
...@@ -393,7 +390,6 @@ void InstructionSelector::VisitProtectedStore(Node* node) { ...@@ -393,7 +390,6 @@ void InstructionSelector::VisitProtectedStore(Node* node) {
InstructionOperand value_operand = InstructionOperand value_operand =
g.CanBeImmediate(value) ? g.UseImmediate(value) : g.UseRegister(value); g.CanBeImmediate(value) ? g.UseImmediate(value) : g.UseRegister(value);
inputs[input_count++] = value_operand; inputs[input_count++] = value_operand;
inputs[input_count++] = g.UseImmediate(position);
Emit(code, 0, static_cast<InstructionOperand*>(nullptr), input_count, inputs); Emit(code, 0, static_cast<InstructionOperand*>(nullptr), input_count, inputs);
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment