Commit 05918654 authored by Milad Farazmand's avatar Milad Farazmand Committed by Commit Bot

PPC/s390: Reland "[compiler] Optionally apply an offset to stack checks"

Port b875f466

Original Commit Message:

    This is a reland of 4a16305b

    The original CL adjust only one part of the stack check, namely the
    comparison of the stack pointer against the stack limit in generated code.
    There is a second part: Runtime::kStackGuard repeats this check to
    distinguish between a stack overflow and an interrupt request.

    This second part in runtime must apply the offset just like in generated
    code. It is implemented in this reland by the StackCheckOffset operator
    and a new StackGuardWithGap runtime function.

    Original change's description:
    > [compiler] Optionally apply an offset to stack checks
    >
    > The motivation behind this change is that the frame size of an optimized
    > function and its unoptimized version may differ, and deoptimization
    > may thus trigger a stack overflow. The solution implemented in this CL
    > is to optionally apply an offset to the stack check s.t. the check
    > becomes 'sp - offset > limit'. The offset is applied to stack checks at
    > function-entry, and is set to the difference between the optimized and
    > unoptimized frame size.
    >
    > A caveat: OSR may not be fully handled by this fix since we've already
    > passed the function-entry stack check. A possible solution would be to
    > *not* skip creation of function-entry stack checks for inlinees.
    >
    > This CL: 1. annotates stack check nodes with the stack check kind, where
    > kind is one of {function-entry,iteration-body,unknown}. 2. potentially
    > allocates a temporary register to store the result of the 'sp - offset'
    > in instruction selection (and switches input registers to 'unique'
    > mode). 3. Applies the offset in code generation.
    >
    > Drive-by: Add src/compiler/globals.h for compiler-specific globals.
    >
    > Bug: v8:9534,chromium:1000887
    > Change-Id: I257191c4a4978ccb60cfa5805ef421f30f0e9826
    > Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1762521
    > Commit-Queue: Jakob Gruber <jgruber@chromium.org>
    > Reviewed-by: Georg Neis <neis@chromium.org>
    > Cr-Commit-Position: refs/heads/master@{#63701}

R=jgruber@chromium.org, joransiu@ca.ibm.com, jyan@ca.ibm.com, michael_dawson@ca.ibm.com
BUG=
LOG=N

Change-Id: I37bf1d9157a96a5d3538108703f2d7469a11bffe
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1890535Reviewed-by: 's avatarJunliang Yan <jyan@ca.ibm.com>
Commit-Queue: Milad Farazmand <miladfar@ca.ibm.com>
Cr-Commit-Position: refs/heads/master@{#64652}
parent d88988b1
......@@ -1140,11 +1140,32 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
}
break;
case kArchStackPointerGreaterThan: {
// Potentially apply an offset to the current stack pointer before the
// comparison to consider the size difference of an optimized frame versus
// the contained unoptimized frames.
Register lhs_register = sp;
uint32_t offset;
if (ShouldApplyOffsetToStackCheck(instr, &offset)) {
lhs_register = i.TempRegister(0);
if (is_int16(offset)) {
__ subi(lhs_register, sp, Operand(offset));
} else {
__ mov(kScratchReg, Operand(kScratchReg));
__ sub(lhs_register, sp, kScratchReg);
}
}
constexpr size_t kValueIndex = 0;
DCHECK(instr->InputAt(kValueIndex)->IsRegister());
__ cmpl(sp, i.InputRegister(kValueIndex), cr0);
__ cmpl(lhs_register, i.InputRegister(kValueIndex), cr0);
break;
}
case kArchStackCheckOffset:
__ LoadSmiLiteral(i.OutputRegister(),
Smi::FromInt(GetStackCheckOffset()));
break;
case kArchTruncateDoubleToI:
__ TruncateDoubleToI(isolate(), zone(), i.OutputRegister(),
i.InputDoubleRegister(0), DetermineStubCallMode());
......
......@@ -550,11 +550,31 @@ void InstructionSelector::VisitWord32Xor(Node* node) {
void InstructionSelector::VisitStackPointerGreaterThan(
Node* node, FlagsContinuation* cont) {
Node* const value = node->InputAt(0);
InstructionCode opcode = kArchStackPointerGreaterThan;
StackCheckKind kind = StackCheckKindOf(node->op());
InstructionCode opcode =
kArchStackPointerGreaterThan | MiscField::encode(static_cast<int>(kind));
PPCOperandGenerator g(this);
EmitWithContinuation(opcode, g.UseRegister(value), cont);
// No outputs.
InstructionOperand* const outputs = nullptr;
const int output_count = 0;
// Applying an offset to this stack check requires a temp register. Offsets
// are only applied to the first stack check. If applying an offset, we must
// ensure the input and temp registers do not alias, thus kUniqueRegister.
InstructionOperand temps[] = {g.TempRegister()};
const int temp_count = (kind == StackCheckKind::kJSFunctionEntry) ? 1 : 0;
const auto register_mode = (kind == StackCheckKind::kJSFunctionEntry)
? OperandGenerator::kUniqueRegister
: OperandGenerator::kRegister;
Node* const value = node->InputAt(0);
InstructionOperand inputs[] = {g.UseRegisterWithMode(value, register_mode)};
static constexpr int input_count = arraysize(inputs);
EmitWithContinuation(opcode, output_count, outputs, input_count, inputs,
temp_count, temps, cont);
}
#if V8_TARGET_ARCH_PPC64
......
......@@ -1597,11 +1597,27 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
}
break;
case kArchStackPointerGreaterThan: {
// Potentially apply an offset to the current stack pointer before the
// comparison to consider the size difference of an optimized frame versus
// the contained unoptimized frames.
Register lhs_register = sp;
uint32_t offset;
if (ShouldApplyOffsetToStackCheck(instr, &offset)) {
lhs_register = i.TempRegister(0);
__ SubP(lhs_register, sp, Operand(offset));
}
constexpr size_t kValueIndex = 0;
DCHECK(instr->InputAt(kValueIndex)->IsRegister());
__ CmpLogicalP(sp, i.InputRegister(kValueIndex));
__ CmpLogicalP(lhs_register, i.InputRegister(kValueIndex));
break;
}
case kArchStackCheckOffset:
__ LoadSmiLiteral(i.OutputRegister(),
Smi::FromInt(GetStackCheckOffset()));
break;
case kArchTruncateDoubleToI:
__ TruncateDoubleToI(isolate(), zone(), i.OutputRegister(),
i.InputDoubleRegister(0), DetermineStubCallMode());
......
......@@ -824,11 +824,31 @@ void InstructionSelector::VisitUnalignedStore(Node* node) { UNREACHABLE(); }
void InstructionSelector::VisitStackPointerGreaterThan(
Node* node, FlagsContinuation* cont) {
Node* const value = node->InputAt(0);
InstructionCode opcode = kArchStackPointerGreaterThan;
StackCheckKind kind = StackCheckKindOf(node->op());
InstructionCode opcode =
kArchStackPointerGreaterThan | MiscField::encode(static_cast<int>(kind));
S390OperandGenerator g(this);
EmitWithContinuation(opcode, g.UseRegister(value), cont);
// No outputs.
InstructionOperand* const outputs = nullptr;
const int output_count = 0;
// Applying an offset to this stack check requires a temp register. Offsets
// are only applied to the first stack check. If applying an offset, we must
// ensure the input and temp registers do not alias, thus kUniqueRegister.
InstructionOperand temps[] = {g.TempRegister()};
const int temp_count = (kind == StackCheckKind::kJSFunctionEntry) ? 1 : 0;
const auto register_mode = (kind == StackCheckKind::kJSFunctionEntry)
? OperandGenerator::kUniqueRegister
: OperandGenerator::kRegister;
Node* const value = node->InputAt(0);
InstructionOperand inputs[] = {g.UseRegisterWithMode(value, register_mode)};
static constexpr int input_count = arraysize(inputs);
EmitWithContinuation(opcode, output_count, outputs, input_count, inputs,
temp_count, temps, cont);
}
#if 0
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment