Commit 0700bc1b authored by ishell's avatar ishell Committed by Commit bot

[stubs] Ensure CSA_ASSERT and CSA_SLOW_ASSERT do not produce unused instructions in release mode.

... and make them applicable outside of CSA.

Nice bonus is that the assert condition instructions will now appear inside [Assert / ]Assert brackets.

BUG=

Review-Url: https://codereview.chromium.org/2489743002
Cr-Commit-Position: refs/heads/master@{#40869}
parent abad9b2f
......@@ -2179,14 +2179,14 @@ void Builtins::Generate_ArrayIteratorPrototypeNext(
assembler->Bind(&if_isfastarray);
{
assembler->Assert(
assembler->Word32Equal(assembler->LoadMapInstanceType(array_map),
assembler->Int32Constant(JS_ARRAY_TYPE)));
CSA_ASSERT(assembler,
assembler->Word32Equal(assembler->LoadMapInstanceType(array_map),
assembler->Int32Constant(JS_ARRAY_TYPE)));
Node* length = assembler->LoadObjectField(array, JSArray::kLengthOffset);
assembler->Assert(assembler->TaggedIsSmi(length));
assembler->Assert(assembler->TaggedIsSmi(index));
CSA_ASSERT(assembler, assembler->TaggedIsSmi(length));
CSA_ASSERT(assembler, assembler->TaggedIsSmi(index));
assembler->GotoUnless(assembler->SmiBelow(index, length), &set_done);
......@@ -2384,8 +2384,8 @@ void Builtins::Generate_ArrayIteratorPrototypeNext(
assembler->Bind(&done);
length = var_length.value();
}
assembler->Assert(assembler->TaggedIsSmi(length));
assembler->Assert(assembler->TaggedIsSmi(index));
CSA_ASSERT(assembler, assembler->TaggedIsSmi(length));
CSA_ASSERT(assembler, assembler->TaggedIsSmi(index));
assembler->GotoUnless(assembler->SmiBelow(index, length), &set_done);
......
......@@ -1602,7 +1602,7 @@ compiler::Node* ReplaceGlobalCallableFastPath(
Node* const res_length = a->LoadJSArrayLength(res);
Node* const res_elems = a->LoadElements(res);
a->CSA_ASSERT(a->HasInstanceType(res_elems, FIXED_ARRAY_TYPE));
CSA_ASSERT(a, a->HasInstanceType(res_elems, FIXED_ARRAY_TYPE));
CodeStubAssembler::ParameterMode mode = CodeStubAssembler::INTPTR_PARAMETERS;
Node* const num_capture_registers = a->LoadFixedArrayElement(
......@@ -1677,7 +1677,7 @@ compiler::Node* ReplaceGlobalCallableFastPath(
a->Bind(&if_isstring);
{
a->Assert(a->IsStringInstanceType(a->LoadInstanceType(elem)));
CSA_ASSERT(a, a->IsStringInstanceType(a->LoadInstanceType(elem)));
Callable call_callable = CodeFactory::Call(isolate);
Node* const replacement_obj =
......@@ -1725,7 +1725,7 @@ compiler::Node* ReplaceGlobalCallableFastPath(
// elem must be an Array.
// Use the apply argument as backing for global RegExp properties.
a->CSA_ASSERT(a->HasInstanceType(elem, JS_ARRAY_TYPE));
CSA_ASSERT(a, a->HasInstanceType(elem, JS_ARRAY_TYPE));
// TODO(jgruber): Remove indirection through Call->ReflectApply.
Callable call_callable = CodeFactory::Call(isolate);
......
......@@ -1001,8 +1001,8 @@ void Builtins::Generate_StringPrototypeSubstr(CodeStubAssembler* a) {
// two cases according to the spec: if it is negative, "" is returned; if
// it is positive, then length is set to {string_length} - {start}.
a->Assert(a->WordEqual(a->LoadMap(var_length.value()),
a->HeapNumberMapConstant()));
CSA_ASSERT(a, a->WordEqual(a->LoadMap(var_length.value()),
a->HeapNumberMapConstant()));
Label if_isnegative(a), if_ispositive(a);
Node* const float_zero = a->Float64Constant(0.);
......@@ -1071,7 +1071,8 @@ compiler::Node* ToSmiBetweenZeroAnd(CodeStubAssembler* a,
a->Bind(&if_isnotsmi);
{
// {value} is a heap number - in this case, it is definitely out of bounds.
a->Assert(a->WordEqual(a->LoadMap(value_int), a->HeapNumberMapConstant()));
CSA_ASSERT(a,
a->WordEqual(a->LoadMap(value_int), a->HeapNumberMapConstant()));
Node* const float_zero = a->Float64Constant(0.);
Node* const smi_zero = a->SmiConstant(Smi::kZero);
......@@ -1301,17 +1302,16 @@ compiler::Node* LoadSurrogatePairInternal(CodeStubAssembler* assembler,
{
Node* lead = var_result.value();
Node* trail = var_trail.value();
#ifdef ENABLE_SLOW_DCHECKS
// Check that this path is only taken if a surrogate pair is found
assembler->Assert(assembler->Uint32GreaterThanOrEqual(
lead, assembler->Int32Constant(0xD800)));
assembler->Assert(
assembler->Uint32LessThan(lead, assembler->Int32Constant(0xDC00)));
assembler->Assert(assembler->Uint32GreaterThanOrEqual(
trail, assembler->Int32Constant(0xDC00)));
assembler->Assert(
assembler->Uint32LessThan(trail, assembler->Int32Constant(0xE000)));
#endif
CSA_SLOW_ASSERT(assembler, assembler->Uint32GreaterThanOrEqual(
lead, assembler->Int32Constant(0xD800)));
CSA_SLOW_ASSERT(assembler, assembler->Uint32LessThan(
lead, assembler->Int32Constant(0xDC00)));
CSA_SLOW_ASSERT(assembler, assembler->Uint32GreaterThanOrEqual(
trail, assembler->Int32Constant(0xDC00)));
CSA_SLOW_ASSERT(assembler, assembler->Uint32LessThan(
trail, assembler->Int32Constant(0xE000)));
switch (encoding) {
case UnicodeEncoding::UTF16:
......
This diff is collapsed.
......@@ -161,7 +161,8 @@ class V8_EXPORT_PRIVATE CodeStubAssembler : public compiler::CodeAssembler {
compiler::Node* offset);
compiler::Node* IsRegularHeapObjectSize(compiler::Node* size);
void Assert(compiler::Node* condition, const char* string = nullptr,
typedef std::function<compiler::Node*()> ConditionBody;
void Assert(ConditionBody condition_body, const char* string = nullptr,
const char* file = nullptr, int line = 0);
// Check a value for smi-ness
......@@ -1263,14 +1264,20 @@ class CodeStubArguments {
compiler::Node* fp_;
};
#define CSA_ASSERT(x) Assert((x), #x, __FILE__, __LINE__)
#ifdef DEBUG
#define CSA_ASSERT(csa, x) \
(csa)->Assert([&] { return (x); }, #x, __FILE__, __LINE__)
#else
#define CSA_ASSERT(csa, x) ((void)0)
#endif
#ifdef ENABLE_SLOW_DCHECKS
#define CSA_SLOW_ASSERT(x) \
if (FLAG_enable_slow_asserts) { \
Assert((x), #x, __FILE__, __LINE__); \
#define CSA_SLOW_ASSERT(csa, x) \
if (FLAG_enable_slow_asserts) { \
(csa)->Assert([&] { return (x); }, #x, __FILE__, __LINE__); \
}
#else
#define CSA_SLOW_ASSERT(x)
#define CSA_SLOW_ASSERT(csa, x) ((void)0)
#endif
DEFINE_OPERATORS_FOR_FLAGS(CodeStubAssembler::AllocationFlags);
......
......@@ -1754,9 +1754,10 @@ compiler::Node* IncStub::Generate(CodeStubAssembler* assembler,
// We do not require an Or with earlier feedback here because once we
// convert the value to a number, we cannot reach this path. We can
// only reach this path on the first pass when the feedback is kNone.
assembler->Assert(assembler->Word32Equal(
var_type_feedback.value(),
assembler->Int32Constant(BinaryOperationFeedback::kNone)));
CSA_ASSERT(assembler,
assembler->Word32Equal(var_type_feedback.value(),
assembler->Int32Constant(
BinaryOperationFeedback::kNone)));
Label if_valueisoddball(assembler), if_valuenotoddball(assembler);
Node* instance_type = assembler->LoadMapInstanceType(value_map);
......@@ -1893,9 +1894,10 @@ compiler::Node* DecStub::Generate(CodeStubAssembler* assembler,
// We do not require an Or with earlier feedback here because once we
// convert the value to a number, we cannot reach this path. We can
// only reach this path on the first pass when the feedback is kNone.
assembler->Assert(assembler->Word32Equal(
var_type_feedback.value(),
assembler->Int32Constant(BinaryOperationFeedback::kNone)));
CSA_ASSERT(assembler,
assembler->Word32Equal(var_type_feedback.value(),
assembler->Int32Constant(
BinaryOperationFeedback::kNone)));
Label if_valueisoddball(assembler), if_valuenotoddball(assembler);
Node* instance_type = assembler->LoadMapInstanceType(value_map);
......@@ -2571,12 +2573,13 @@ compiler::Node* FastNewClosureStub::Generate(CodeStubAssembler* assembler,
if (FLAG_debug_code) {
// Function must be a function without a prototype.
assembler->Assert(assembler->Word32And(
compiler_hints,
assembler->Int32Constant((FunctionKind::kAccessorFunction |
FunctionKind::kArrowFunction |
FunctionKind::kConciseMethod)
<< SharedFunctionInfo::kFunctionKindShift)));
CSA_ASSERT(assembler, assembler->Word32And(
compiler_hints,
assembler->Int32Constant(
(FunctionKind::kAccessorFunction |
FunctionKind::kArrowFunction |
FunctionKind::kConciseMethod)
<< SharedFunctionInfo::kFunctionKindShift)));
}
assembler->Goto(&if_function_without_prototype);
......
......@@ -1159,8 +1159,9 @@ Node* InterpreterAssembler::TruncateTaggedToWord32WithFeedback(
// We do not require an Or with earlier feedback here because once we
// convert the value to a number, we cannot reach this path. We can
// only reach this path on the first pass when the feedback is kNone.
Assert(Word32Equal(var_type_feedback->value(),
Int32Constant(BinaryOperationFeedback::kNone)));
CSA_ASSERT(this,
Word32Equal(var_type_feedback->value(),
Int32Constant(BinaryOperationFeedback::kNone)));
Label if_valueisoddball(this),
if_valueisnotoddball(this, Label::kDeferred);
......
......@@ -1925,9 +1925,12 @@ TEST(Arguments) {
CodeStubArguments arguments(&m, m.IntPtrConstant(3));
m.Assert(m.WordEqual(arguments.AtIndex(0), m.SmiConstant(Smi::FromInt(12))));
m.Assert(m.WordEqual(arguments.AtIndex(1), m.SmiConstant(Smi::FromInt(13))));
m.Assert(m.WordEqual(arguments.AtIndex(2), m.SmiConstant(Smi::FromInt(14))));
CSA_ASSERT(
&m, m.WordEqual(arguments.AtIndex(0), m.SmiConstant(Smi::FromInt(12))));
CSA_ASSERT(
&m, m.WordEqual(arguments.AtIndex(1), m.SmiConstant(Smi::FromInt(13))));
CSA_ASSERT(
&m, m.WordEqual(arguments.AtIndex(2), m.SmiConstant(Smi::FromInt(14))));
m.Return(arguments.GetReceiver());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment