Commit 4acb492e authored by bmeurer's avatar bmeurer Committed by Commit bot

[compiler] Introduce initial StrictEqualStub.

Initial version of a new StrictEqualStub written as TurboFan code stub,
that implements the full strict equality comparison and is usable for
both TurboFan and Ignition (and soon for the generic CompareIC case
too). The stub is not fully optimized yet, i.e. we still go to the
runtime for string comparisons, but that'll be addressed in a follow-up
CL.

R=yangguo@chromium.org

Review URL: https://codereview.chromium.org/1753173003

Cr-Commit-Position: refs/heads/master@{#34423}
parent 44e9622a
......@@ -3179,10 +3179,17 @@ void CompareICStub::GenerateStrings(MacroAssembler* masm) {
// Handle more complex cases in runtime.
__ bind(&runtime);
__ Push(left, right);
if (equality) {
__ TailCallRuntime(Runtime::kStringEquals);
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ Push(left, right);
__ CallRuntime(Runtime::kStringEqual);
}
__ LoadRoot(r1, Heap::kTrueValueRootIndex);
__ sub(r0, r0, r1);
__ Ret();
} else {
__ Push(left, right);
__ TailCallRuntime(Runtime::kStringCompare);
}
......
......@@ -2868,10 +2868,17 @@ void CompareICStub::GenerateStrings(MacroAssembler* masm) {
// Handle more complex cases in runtime.
__ Bind(&runtime);
__ Push(lhs, rhs);
if (equality) {
__ TailCallRuntime(Runtime::kStringEquals);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(lhs, rhs);
__ CallRuntime(Runtime::kStringEqual);
}
__ LoadRoot(x1, Heap::kTrueValueRootIndex);
__ Sub(x0, x0, x1);
__ Ret();
} else {
__ Push(lhs, rhs);
__ TailCallRuntime(Runtime::kStringCompare);
}
......
......@@ -194,6 +194,11 @@ Callable CodeFactory::RegExpExec(Isolate* isolate) {
return Callable(stub.GetCode(), stub.GetCallInterfaceDescriptor());
}
// static
Callable CodeFactory::StrictEqual(Isolate* isolate) {
StrictEqualStub stub(isolate);
return Callable(stub.GetCode(), stub.GetCallInterfaceDescriptor());
}
// static
Callable CodeFactory::StringAdd(Isolate* isolate, StringAddFlags flags,
......
......@@ -75,6 +75,8 @@ class CodeFactory final {
static Callable RegExpConstructResult(Isolate* isolate);
static Callable RegExpExec(Isolate* isolate);
static Callable StrictEqual(Isolate* isolate);
static Callable StringAdd(Isolate* isolate, StringAddFlags flags,
PretenureFlag pretenure_flag);
static Callable StringCompare(Isolate* isolate);
......
This diff is collapsed.
......@@ -101,6 +101,7 @@ namespace internal {
V(LoadIC) \
/* TurboFanCodeStubs */ \
V(StringLength) \
V(StrictEqual) \
V(ToBoolean) \
/* IC Handler stubs */ \
V(ArrayBufferViewLoadField) \
......@@ -631,6 +632,16 @@ class StringLengthStub : public TurboFanCodeStub {
DEFINE_CODE_STUB(StringLength, TurboFanCodeStub);
};
class StrictEqualStub final : public TurboFanCodeStub {
public:
explicit StrictEqualStub(Isolate* isolate) : TurboFanCodeStub(isolate) {}
void GenerateAssembly(compiler::CodeStubAssembler* assembler) const final;
DEFINE_CALL_INTERFACE_DESCRIPTOR(Compare);
DEFINE_CODE_STUB(StrictEqual, TurboFanCodeStub);
};
class ToBooleanStub final : public TurboFanCodeStub {
public:
explicit ToBooleanStub(Isolate* isolate) : TurboFanCodeStub(isolate) {}
......
......@@ -105,6 +105,10 @@ Node* CodeStubAssembler::Float64Constant(double value) {
return raw_assembler_->Float64Constant(value);
}
Node* CodeStubAssembler::HeapNumberMapConstant() {
return HeapConstant(isolate()->factory()->heap_number_map());
}
Node* CodeStubAssembler::Parameter(int value) {
return raw_assembler_->Parameter(value);
}
......@@ -139,7 +143,6 @@ Node* CodeStubAssembler::SmiTag(Node* value) {
return raw_assembler_->WordShl(value, SmiShiftBitsConstant());
}
Node* CodeStubAssembler::SmiUntag(Node* value) {
return raw_assembler_->WordSar(value, SmiShiftBitsConstant());
}
......@@ -152,6 +155,10 @@ Node* CodeStubAssembler::SmiToInt32(Node* value) {
return result;
}
Node* CodeStubAssembler::SmiToFloat64(Node* value) {
return ChangeInt32ToFloat64(SmiUntag(value));
}
Node* CodeStubAssembler::SmiAdd(Node* a, Node* b) { return IntPtrAdd(a, b); }
Node* CodeStubAssembler::SmiEqual(Node* a, Node* b) { return WordEqual(a, b); }
......@@ -188,9 +195,13 @@ Node* CodeStubAssembler::LoadObjectField(Node* object, int offset) {
}
Node* CodeStubAssembler::LoadHeapNumberValue(Node* object) {
return raw_assembler_->Load(
MachineType::Float64(), object,
IntPtrConstant(HeapNumber::kValueOffset - kHeapObjectTag));
return Load(MachineType::Float64(), object,
IntPtrConstant(HeapNumber::kValueOffset - kHeapObjectTag));
}
Node* CodeStubAssembler::LoadMapInstanceType(Node* map) {
return Load(MachineType::Uint8(), map,
IntPtrConstant(Map::kInstanceTypeOffset - kHeapObjectTag));
}
Node* CodeStubAssembler::LoadFixedArrayElementSmiIndex(Node* object,
......@@ -279,10 +290,7 @@ Node* CodeStubAssembler::Projection(int index, Node* value) {
}
Node* CodeStubAssembler::LoadInstanceType(Node* object) {
return raw_assembler_->Word32And(
LoadObjectField(LoadObjectField(object, HeapObject::kMapOffset),
Map::kInstanceTypeOffset),
raw_assembler_->Int32Constant(255));
return LoadMapInstanceType(LoadObjectField(object, HeapObject::kMapOffset));
}
Node* CodeStubAssembler::BitFieldDecode(Node* word32, uint32_t shift,
......@@ -292,6 +300,16 @@ Node* CodeStubAssembler::BitFieldDecode(Node* word32, uint32_t shift,
raw_assembler_->Int32Constant(shift));
}
void CodeStubAssembler::BranchIfFloat64Equal(Node* a, Node* b, Label* if_true,
Label* if_false) {
Label if_equal(this), if_notequal(this);
Branch(Float64Equal(a, b), &if_equal, &if_notequal);
Bind(&if_equal);
Goto(if_true);
Bind(&if_notequal);
Goto(if_false);
}
Node* CodeStubAssembler::CallN(CallDescriptor* descriptor, Node* code_target,
Node** args) {
CallPrologue();
......@@ -468,12 +486,20 @@ Node* CodeStubAssembler::CallStub(const CallInterfaceDescriptor& descriptor,
return CallN(call_descriptor, target, args);
}
Node* CodeStubAssembler::TailCallStub(CodeStub& stub, Node** args) {
Node* code_target = HeapConstant(stub.GetCode());
CallDescriptor* descriptor = Linkage::GetStubCallDescriptor(
isolate(), zone(), stub.GetCallInterfaceDescriptor(),
stub.GetStackParameterCount(), CallDescriptor::kSupportsTailCalls);
return raw_assembler_->TailCallN(descriptor, code_target, args);
Node* CodeStubAssembler::TailCallStub(const CallInterfaceDescriptor& descriptor,
Node* target, Node* context, Node* arg1,
Node* arg2, size_t result_size) {
CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor(
isolate(), zone(), descriptor, descriptor.GetStackParameterCount(),
CallDescriptor::kSupportsTailCalls, Operator::kNoProperties,
MachineType::AnyTagged(), result_size);
Node** args = zone()->NewArray<Node*>(3);
args[0] = arg1;
args[1] = arg2;
args[2] = context;
return raw_assembler_->TailCallN(call_descriptor, target, args);
}
Node* CodeStubAssembler::TailCall(
......@@ -517,11 +543,15 @@ void CodeStubAssembler::Switch(Node* index, Label* default_label,
}
// RawMachineAssembler delegate helpers:
Isolate* CodeStubAssembler::isolate() { return raw_assembler_->isolate(); }
Isolate* CodeStubAssembler::isolate() const {
return raw_assembler_->isolate();
}
Factory* CodeStubAssembler::factory() const { return isolate()->factory(); }
Graph* CodeStubAssembler::graph() { return raw_assembler_->graph(); }
Graph* CodeStubAssembler::graph() const { return raw_assembler_->graph(); }
Zone* CodeStubAssembler::zone() { return raw_assembler_->zone(); }
Zone* CodeStubAssembler::zone() const { return raw_assembler_->zone(); }
// The core implementation of Variable is stored through an indirection so
// that it can outlive the often block-scoped Variable declarations. This is
......
......@@ -21,6 +21,7 @@ namespace internal {
class CallInterfaceDescriptor;
class Isolate;
class Factory;
class Zone;
namespace compiler {
......@@ -74,6 +75,7 @@ class Schedule;
#define CODE_STUB_ASSEMBLER_UNARY_OP_LIST(V) \
V(ChangeFloat64ToUint32) \
V(ChangeInt32ToFloat64) \
V(ChangeInt32ToInt64) \
V(ChangeUint32ToFloat64) \
V(ChangeUint32ToUint64)
......@@ -124,6 +126,7 @@ class CodeStubAssembler {
Node* BooleanConstant(bool value);
Node* ExternalConstant(ExternalReference address);
Node* Float64Constant(double value);
Node* HeapNumberMapConstant();
Node* Parameter(int value);
void Return(Node* value);
......@@ -204,7 +207,10 @@ class CodeStubAssembler {
Node* context, Node* arg1, Node* arg2, Node* arg3, Node* arg4,
Node* arg5, size_t result_size = 1);
Node* TailCallStub(CodeStub& stub, Node** args);
Node* TailCallStub(const CallInterfaceDescriptor& descriptor, Node* target,
Node* context, Node* arg1, Node* arg2,
size_t result_size = 1);
Node* TailCall(const CallInterfaceDescriptor& descriptor, Node* target,
Node** args, size_t result_size = 1);
......@@ -216,7 +222,9 @@ class CodeStubAssembler {
Node* SmiTag(Node* value);
// Untag a Smi value as a Word.
Node* SmiUntag(Node* value);
// Untag an Smi value as a 32-bit value.
// Smi conversions.
Node* SmiToFloat64(Node* value);
Node* SmiToInt32(Node* value);
// Smi operations.
......@@ -233,8 +241,10 @@ class CodeStubAssembler {
Node* LoadBufferObject(Node* buffer, int offset);
// Load a field from an object on the heap.
Node* LoadObjectField(Node* object, int offset);
// Load the HeapNumber value from a HeapNumber object.
// Load the floating point value of a HeapNumber.
Node* LoadHeapNumberValue(Node* object);
// Load the instance type of a Map.
Node* LoadMapInstanceType(Node* map);
// Load an array element from a FixedArray.
Node* LoadFixedArrayElementSmiIndex(Node* object, Node* smi_index,
......@@ -254,11 +264,19 @@ class CodeStubAssembler {
Node* BitFieldDecode(Node* word32, uint32_t shift, uint32_t mask);
// Branching helpers.
// TODO(danno): Can we be more cleverish wrt. edge-split?
void BranchIfFloat64Equal(Node* a, Node* b, Label* if_true, Label* if_false);
void BranchIfFloat64IsNaN(Node* value, Label* if_true, Label* if_false) {
BranchIfFloat64Equal(value, value, if_false, if_true);
}
protected:
// Protected helpers which delegate to RawMachineAssembler.
Graph* graph();
Isolate* isolate();
Zone* zone();
Graph* graph() const;
Factory* factory() const;
Isolate* isolate() const;
Zone* zone() const;
// Enables subclasses to perform operations before and after a call.
virtual void CallPrologue();
......
......@@ -89,7 +89,6 @@ REPLACE_BINARY_OP_IC_CALL(JSModulus, Token::MOD)
}
REPLACE_RUNTIME_CALL(JSEqual, Runtime::kEqual)
REPLACE_RUNTIME_CALL(JSNotEqual, Runtime::kNotEqual)
REPLACE_RUNTIME_CALL(JSStrictEqual, Runtime::kStrictEqual)
REPLACE_RUNTIME_CALL(JSStrictNotEqual, Runtime::kStrictNotEqual)
REPLACE_RUNTIME_CALL(JSLessThan, Runtime::kLessThan)
REPLACE_RUNTIME_CALL(JSGreaterThan, Runtime::kGreaterThan)
......@@ -100,6 +99,15 @@ REPLACE_RUNTIME_CALL(JSCreateModuleContext, Runtime::kPushModuleContext)
REPLACE_RUNTIME_CALL(JSConvertReceiver, Runtime::kConvertReceiver)
#undef REPLACE_RUNTIME_CALL
#define REPLACE_STUB_CALL(Op, Stub) \
void JSGenericLowering::Lower##Op(Node* node) { \
CallDescriptor::Flags flags = AdjustFrameStatesForCall(node); \
Callable callable = CodeFactory::Stub(isolate()); \
ReplaceWithStubCall(node, callable, flags); \
}
REPLACE_STUB_CALL(JSStrictEqual, StrictEqual)
#undef REPLACE_STUB_CALL
void JSGenericLowering::ReplaceWithStubCall(Node* node, Callable callable,
CallDescriptor::Flags flags) {
Operator::Properties properties = node->op()->properties();
......
......@@ -160,7 +160,7 @@ int Linkage::FrameStateInputCount(Runtime::FunctionId function) {
case Runtime::kPushCatchContext:
case Runtime::kReThrow:
case Runtime::kStringCompare:
case Runtime::kStringEquals:
case Runtime::kStringEqual:
case Runtime::kToFastProperties: // TODO(jarin): Is it safe?
case Runtime::kTraceEnter:
case Runtime::kTraceExit:
......
......@@ -3232,13 +3232,20 @@ void CompareICStub::GenerateStrings(MacroAssembler* masm) {
// Handle more complex cases in runtime.
__ bind(&runtime);
__ pop(tmp1); // Return address.
__ push(left);
__ push(right);
__ push(tmp1);
if (equality) {
__ TailCallRuntime(Runtime::kStringEquals);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(left);
__ Push(right);
__ CallRuntime(Runtime::kStringEqual);
}
__ sub(eax, Immediate(masm->isolate()->factory()->true_value()));
__ Ret();
} else {
__ pop(tmp1); // Return address.
__ push(left);
__ push(right);
__ push(tmp1);
__ TailCallRuntime(Runtime::kStringCompare);
}
......
......@@ -744,6 +744,20 @@ void Interpreter::DoPopContext(InterpreterAssembler* assembler) {
__ Dispatch();
}
void Interpreter::DoBinaryOp(Callable callable,
InterpreterAssembler* assembler) {
// TODO(bmeurer): Collect definition side type feedback for various
// binary operations.
Node* target = __ HeapConstant(callable.code());
Node* reg_index = __ BytecodeOperandReg(0);
Node* lhs = __ LoadRegister(reg_index);
Node* rhs = __ GetAccumulator();
Node* context = __ GetContext();
Node* result = __ CallStub(callable.descriptor(), target, context, lhs, rhs);
__ SetAccumulator(result);
__ Dispatch();
}
void Interpreter::DoBinaryOp(Runtime::FunctionId function_id,
InterpreterAssembler* assembler) {
// TODO(rmcilroy): Call ICs which back-patch bytecode with type specialized
......@@ -1174,7 +1188,7 @@ void Interpreter::DoTestNotEqual(InterpreterAssembler* assembler) {
//
// Test if the value in the <src> register is strictly equal to the accumulator.
void Interpreter::DoTestEqualStrict(InterpreterAssembler* assembler) {
DoBinaryOp(Runtime::kStrictEqual, assembler);
DoBinaryOp(CodeFactory::StrictEqual(isolate_), assembler);
}
......
......@@ -58,6 +58,9 @@ class Interpreter {
BYTECODE_LIST(DECLARE_BYTECODE_HANDLER_GENERATOR)
#undef DECLARE_BYTECODE_HANDLER_GENERATOR
// Generates code to perform the binary operations via |callable|.
void DoBinaryOp(Callable callable, InterpreterAssembler* assembler);
// Generates code to perform the binary operations via |function_id|.
void DoBinaryOp(Runtime::FunctionId function_id,
InterpreterAssembler* assembler);
......
......@@ -3358,10 +3358,17 @@ void CompareICStub::GenerateStrings(MacroAssembler* masm) {
// Handle more complex cases in runtime.
__ bind(&runtime);
__ Push(left, right);
if (equality) {
__ TailCallRuntime(Runtime::kStringEquals);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(left, right);
__ CallRuntime(Runtime::kStringEqual);
}
__ LoadRoot(a0, Heap::kTrueValueRootIndex);
__ Ret(USE_DELAY_SLOT);
__ Subu(v0, v0, a0); // In delay slot.
} else {
__ Push(left, right);
__ TailCallRuntime(Runtime::kStringCompare);
}
......
......@@ -3362,10 +3362,17 @@ void CompareICStub::GenerateStrings(MacroAssembler* masm) {
// Handle more complex cases in runtime.
__ bind(&runtime);
__ Push(left, right);
if (equality) {
__ TailCallRuntime(Runtime::kStringEquals);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(left, right);
__ CallRuntime(Runtime::kStringEqual);
}
__ LoadRoot(a0, Heap::kTrueValueRootIndex);
__ Ret(USE_DELAY_SLOT);
__ Subu(v0, v0, a0); // In delay slot.
} else {
__ Push(left, right);
__ TailCallRuntime(Runtime::kStringCompare);
}
......
......@@ -1145,22 +1145,12 @@ RUNTIME_FUNCTION(Runtime_NewString) {
return *result;
}
RUNTIME_FUNCTION(Runtime_StringEquals) {
RUNTIME_FUNCTION(Runtime_StringEqual) {
HandleScope handle_scope(isolate);
DCHECK(args.length() == 2);
DCHECK_EQ(2, args.length());
CONVERT_ARG_HANDLE_CHECKED(String, x, 0);
CONVERT_ARG_HANDLE_CHECKED(String, y, 1);
bool not_equal = !String::Equals(x, y);
// This is slightly convoluted because the value that signifies
// equality is 0 and inequality is 1 so we have to negate the result
// from String::Equals.
DCHECK(not_equal == 0 || not_equal == 1);
STATIC_ASSERT(EQUAL == 0);
STATIC_ASSERT(NOT_EQUAL == 1);
return Smi::FromInt(not_equal);
return isolate->heap()->ToBoolean(String::Equals(x, y));
}
......
......@@ -842,7 +842,6 @@ namespace internal {
F(Bool8x16Equal, 2, 1) \
F(Bool8x16NotEqual, 2, 1)
#define FOR_EACH_INTRINSIC_STRINGS(F) \
F(StringReplaceOneCharWithString, 3, 1) \
F(StringIndexOf, 3, 1) \
......@@ -863,7 +862,7 @@ namespace internal {
F(StringTrim, 3, 1) \
F(TruncateString, 2, 1) \
F(NewString, 2, 1) \
F(StringEquals, 2, 1) \
F(StringEqual, 2, 1) \
F(FlattenString, 1, 1) \
F(StringCharFromCode, 1, 1) \
F(StringCharAt, 2, 1) \
......@@ -873,7 +872,6 @@ namespace internal {
F(TwoByteSeqStringSetChar, 3, 1) \
F(StringCharCodeAt, 2, 1)
#define FOR_EACH_INTRINSIC_SYMBOL(F) \
F(CreateSymbol, 1, 1) \
F(CreatePrivateSymbol, 1, 1) \
......
......@@ -3172,13 +3172,21 @@ void CompareICStub::GenerateStrings(MacroAssembler* masm) {
// Handle more complex cases in runtime.
__ bind(&runtime);
__ PopReturnAddressTo(tmp1);
__ Push(left);
__ Push(right);
__ PushReturnAddressFrom(tmp1);
if (equality) {
__ TailCallRuntime(Runtime::kStringEquals);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(left);
__ Push(right);
__ CallRuntime(Runtime::kStringEqual);
}
__ LoadRoot(rdx, Heap::kTrueValueRootIndex);
__ subp(rax, rdx);
__ Ret();
} else {
__ PopReturnAddressTo(tmp1);
__ Push(left);
__ Push(right);
__ PushReturnAddressFrom(tmp1);
__ TailCallRuntime(Runtime::kStringCompare);
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment