Commit 4acb492e authored by bmeurer's avatar bmeurer Committed by Commit bot

[compiler] Introduce initial StrictEqualStub.

Initial version of a new StrictEqualStub written as TurboFan code stub,
that implements the full strict equality comparison and is usable for
both TurboFan and Ignition (and soon for the generic CompareIC case
too). The stub is not fully optimized yet, i.e. we still go to the
runtime for string comparisons, but that'll be addressed in a follow-up
CL.

R=yangguo@chromium.org

Review URL: https://codereview.chromium.org/1753173003

Cr-Commit-Position: refs/heads/master@{#34423}
parent 44e9622a
...@@ -3179,10 +3179,17 @@ void CompareICStub::GenerateStrings(MacroAssembler* masm) { ...@@ -3179,10 +3179,17 @@ void CompareICStub::GenerateStrings(MacroAssembler* masm) {
// Handle more complex cases in runtime. // Handle more complex cases in runtime.
__ bind(&runtime); __ bind(&runtime);
__ Push(left, right);
if (equality) { if (equality) {
__ TailCallRuntime(Runtime::kStringEquals); {
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ Push(left, right);
__ CallRuntime(Runtime::kStringEqual);
}
__ LoadRoot(r1, Heap::kTrueValueRootIndex);
__ sub(r0, r0, r1);
__ Ret();
} else { } else {
__ Push(left, right);
__ TailCallRuntime(Runtime::kStringCompare); __ TailCallRuntime(Runtime::kStringCompare);
} }
......
...@@ -2868,10 +2868,17 @@ void CompareICStub::GenerateStrings(MacroAssembler* masm) { ...@@ -2868,10 +2868,17 @@ void CompareICStub::GenerateStrings(MacroAssembler* masm) {
// Handle more complex cases in runtime. // Handle more complex cases in runtime.
__ Bind(&runtime); __ Bind(&runtime);
__ Push(lhs, rhs);
if (equality) { if (equality) {
__ TailCallRuntime(Runtime::kStringEquals); {
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(lhs, rhs);
__ CallRuntime(Runtime::kStringEqual);
}
__ LoadRoot(x1, Heap::kTrueValueRootIndex);
__ Sub(x0, x0, x1);
__ Ret();
} else { } else {
__ Push(lhs, rhs);
__ TailCallRuntime(Runtime::kStringCompare); __ TailCallRuntime(Runtime::kStringCompare);
} }
......
...@@ -194,6 +194,11 @@ Callable CodeFactory::RegExpExec(Isolate* isolate) { ...@@ -194,6 +194,11 @@ Callable CodeFactory::RegExpExec(Isolate* isolate) {
return Callable(stub.GetCode(), stub.GetCallInterfaceDescriptor()); return Callable(stub.GetCode(), stub.GetCallInterfaceDescriptor());
} }
// static
Callable CodeFactory::StrictEqual(Isolate* isolate) {
StrictEqualStub stub(isolate);
return Callable(stub.GetCode(), stub.GetCallInterfaceDescriptor());
}
// static // static
Callable CodeFactory::StringAdd(Isolate* isolate, StringAddFlags flags, Callable CodeFactory::StringAdd(Isolate* isolate, StringAddFlags flags,
......
...@@ -75,6 +75,8 @@ class CodeFactory final { ...@@ -75,6 +75,8 @@ class CodeFactory final {
static Callable RegExpConstructResult(Isolate* isolate); static Callable RegExpConstructResult(Isolate* isolate);
static Callable RegExpExec(Isolate* isolate); static Callable RegExpExec(Isolate* isolate);
static Callable StrictEqual(Isolate* isolate);
static Callable StringAdd(Isolate* isolate, StringAddFlags flags, static Callable StringAdd(Isolate* isolate, StringAddFlags flags,
PretenureFlag pretenure_flag); PretenureFlag pretenure_flag);
static Callable StringCompare(Isolate* isolate); static Callable StringCompare(Isolate* isolate);
......
...@@ -473,6 +473,290 @@ void StringLengthStub::GenerateAssembly( ...@@ -473,6 +473,290 @@ void StringLengthStub::GenerateAssembly(
assembler->Return(result); assembler->Return(result);
} }
void StrictEqualStub::GenerateAssembly(
compiler::CodeStubAssembler* assembler) const {
// Here's pseudo-code for the algorithm below:
//
// if (lhs == rhs) {
// if (lhs->IsHeapNumber()) return HeapNumber::cast(lhs)->value() != NaN;
// return true;
// }
// if (!lhs->IsSmi()) {
// if (lhs->IsHeapNumber()) {
// if (rhs->IsSmi()) {
// return Smi::cast(rhs)->value() == HeapNumber::cast(lhs)->value();
// } else if (rhs->IsHeapNumber()) {
// return HeapNumber::cast(rhs)->value() ==
// HeapNumber::cast(lhs)->value();
// } else {
// return false;
// }
// } else {
// if (rhs->IsSmi()) {
// return false;
// } else {
// if (lhs->IsString()) {
// if (rhs->IsString()) {
// return %StringEqual(lhs, rhs);
// } else {
// return false;
// }
// } else if (lhs->IsSimd128()) {
// if (rhs->IsSimd128()) {
// return %StrictEqual(lhs, rhs);
// }
// } else {
// return false;
// }
// }
// }
// } else {
// if (rhs->IsSmi()) {
// return false;
// } else {
// if (rhs->IsHeapNumber()) {
// return Smi::cast(lhs)->value() == HeapNumber::cast(rhs)->value();
// } else {
// return false;
// }
// }
// }
typedef compiler::CodeStubAssembler::Label Label;
typedef compiler::Node Node;
Node* lhs = assembler->Parameter(0);
Node* rhs = assembler->Parameter(1);
Node* context = assembler->Parameter(2);
Label if_true(assembler), if_false(assembler);
// Check if {lhs} and {rhs} refer to the same object.
Label if_same(assembler), if_notsame(assembler);
assembler->Branch(assembler->WordEqual(lhs, rhs), &if_same, &if_notsame);
assembler->Bind(&if_same);
{
// The {lhs} and {rhs} reference the exact same value, yet we need special
// treatment for HeapNumber, as NaN is not equal to NaN.
// TODO(bmeurer): This seems to violate the SIMD.js specification, but it
// seems to be what is tested in the current SIMD.js testsuite.
// Check if {lhs} (and therefore {rhs}) is a Smi or a HeapObject.
Label if_lhsissmi(assembler), if_lhsisnotsmi(assembler);
assembler->Branch(assembler->WordIsSmi(lhs), &if_lhsissmi, &if_lhsisnotsmi);
assembler->Bind(&if_lhsisnotsmi);
{
// Load the map of {lhs}.
Node* lhs_map = assembler->LoadObjectField(lhs, HeapObject::kMapOffset);
// Check if {lhs} (and therefore {rhs}) is a HeapNumber.
Node* number_map = assembler->HeapNumberMapConstant();
Label if_lhsisnumber(assembler), if_lhsisnotnumber(assembler);
assembler->Branch(assembler->WordEqual(lhs_map, number_map),
&if_lhsisnumber, &if_lhsisnotnumber);
assembler->Bind(&if_lhsisnumber);
{
// Convert {lhs} (and therefore {rhs}) to floating point value.
Node* lhs_value = assembler->LoadHeapNumberValue(lhs);
// Check if the HeapNumber value is a NaN.
assembler->BranchIfFloat64IsNaN(lhs_value, &if_false, &if_true);
}
assembler->Bind(&if_lhsisnotnumber);
assembler->Goto(&if_true);
}
assembler->Bind(&if_lhsissmi);
assembler->Goto(&if_true);
}
assembler->Bind(&if_notsame);
{
// The {lhs} and {rhs} reference different objects, yet for Smi, HeapNumber,
// String and Simd128Value they can still be considered equal.
Node* number_map = assembler->HeapNumberMapConstant();
// Check if {lhs} is a Smi or a HeapObject.
Label if_lhsissmi(assembler), if_lhsisnotsmi(assembler);
assembler->Branch(assembler->WordIsSmi(lhs), &if_lhsissmi, &if_lhsisnotsmi);
assembler->Bind(&if_lhsisnotsmi);
{
// Load the map of {lhs}.
Node* lhs_map = assembler->LoadObjectField(lhs, HeapObject::kMapOffset);
// Check if {lhs} is a HeapNumber.
Label if_lhsisnumber(assembler), if_lhsisnotnumber(assembler);
assembler->Branch(assembler->WordEqual(lhs_map, number_map),
&if_lhsisnumber, &if_lhsisnotnumber);
assembler->Bind(&if_lhsisnumber);
{
// Check if {rhs} is a Smi or a HeapObject.
Label if_rhsissmi(assembler), if_rhsisnotsmi(assembler);
assembler->Branch(assembler->WordIsSmi(rhs), &if_rhsissmi,
&if_rhsisnotsmi);
assembler->Bind(&if_rhsissmi);
{
// Convert {lhs} and {rhs} to floating point values.
Node* lhs_value = assembler->LoadHeapNumberValue(lhs);
Node* rhs_value = assembler->SmiToFloat64(rhs);
// Perform a floating point comparison of {lhs} and {rhs}.
assembler->BranchIfFloat64Equal(lhs_value, rhs_value, &if_true,
&if_false);
}
assembler->Bind(&if_rhsisnotsmi);
{
// Load the map of {rhs}.
Node* rhs_map =
assembler->LoadObjectField(rhs, HeapObject::kMapOffset);
// Check if {rhs} is also a HeapNumber.
Label if_rhsisnumber(assembler), if_rhsisnotnumber(assembler);
assembler->Branch(assembler->WordEqual(rhs_map, number_map),
&if_rhsisnumber, &if_rhsisnotnumber);
assembler->Bind(&if_rhsisnumber);
{
// Convert {lhs} and {rhs} to floating point values.
Node* lhs_value = assembler->LoadHeapNumberValue(lhs);
Node* rhs_value = assembler->LoadHeapNumberValue(rhs);
// Perform a floating point comparison of {lhs} and {rhs}.
assembler->BranchIfFloat64Equal(lhs_value, rhs_value, &if_true,
&if_false);
}
assembler->Bind(&if_rhsisnotnumber);
assembler->Goto(&if_false);
}
}
assembler->Bind(&if_lhsisnotnumber);
{
// Check if {rhs} is a Smi or a HeapObject.
Label if_rhsissmi(assembler), if_rhsisnotsmi(assembler);
assembler->Branch(assembler->WordIsSmi(rhs), &if_rhsissmi,
&if_rhsisnotsmi);
assembler->Bind(&if_rhsissmi);
assembler->Goto(&if_false);
assembler->Bind(&if_rhsisnotsmi);
{
// Load the instance type of {lhs}.
Node* lhs_instance_type = assembler->LoadMapInstanceType(lhs_map);
// Check if {lhs} is a String.
Label if_lhsisstring(assembler), if_lhsisnotstring(assembler);
assembler->Branch(assembler->Int32LessThan(
lhs_instance_type,
assembler->Int32Constant(FIRST_NONSTRING_TYPE)),
&if_lhsisstring, &if_lhsisnotstring);
assembler->Bind(&if_lhsisstring);
{
// Load the instance type of {rhs}.
Node* rhs_instance_type = assembler->LoadInstanceType(rhs);
// Check if {rhs} is also a String.
Label if_rhsisstring(assembler), if_rhsisnotstring(assembler);
assembler->Branch(assembler->Int32LessThan(
rhs_instance_type, assembler->Int32Constant(
FIRST_NONSTRING_TYPE)),
&if_rhsisstring, &if_rhsisnotstring);
assembler->Bind(&if_rhsisstring);
{
// TODO(bmeurer): Optimize this further once the StringEqual
// functionality is available in TurboFan land.
assembler->TailCallRuntime(Runtime::kStringEqual, context, lhs,
rhs);
}
assembler->Bind(&if_rhsisnotstring);
assembler->Goto(&if_false);
}
assembler->Bind(&if_lhsisnotstring);
{
// Check if {lhs} is a Simd128Value.
Label if_lhsissimd128value(assembler),
if_lhsisnotsimd128value(assembler);
assembler->Branch(assembler->Word32Equal(
lhs_instance_type,
assembler->Int32Constant(SIMD128_VALUE_TYPE)),
&if_lhsissimd128value, &if_lhsisnotsimd128value);
assembler->Bind(&if_lhsissimd128value);
{
// TODO(bmeurer): Inline the Simd128Value equality check.
assembler->TailCallRuntime(Runtime::kStrictEqual, context, lhs,
rhs);
}
assembler->Bind(&if_lhsisnotsimd128value);
assembler->Goto(&if_false);
}
}
}
}
assembler->Bind(&if_lhsissmi);
{
// We already know that {lhs} and {rhs} are not reference equal, and {lhs}
// is a Smi; so {lhs} and {rhs} can only be strictly equal if {rhs} is a
// HeapNumber with an equal floating point value.
// Check if {rhs} is a Smi or a HeapObject.
Label if_rhsissmi(assembler), if_rhsisnotsmi(assembler);
assembler->Branch(assembler->WordIsSmi(rhs), &if_rhsissmi,
&if_rhsisnotsmi);
assembler->Bind(&if_rhsissmi);
assembler->Goto(&if_false);
assembler->Bind(&if_rhsisnotsmi);
{
// Load the map of the {rhs}.
Node* rhs_map = assembler->LoadObjectField(rhs, HeapObject::kMapOffset);
// The {rhs} could be a HeapNumber with the same value as {lhs}.
Label if_rhsisnumber(assembler), if_rhsisnotnumber(assembler);
assembler->Branch(assembler->WordEqual(rhs_map, number_map),
&if_rhsisnumber, &if_rhsisnotnumber);
assembler->Bind(&if_rhsisnumber);
{
// Convert {lhs} and {rhs} to floating point values.
Node* lhs_value = assembler->SmiToFloat64(lhs);
Node* rhs_value = assembler->LoadHeapNumberValue(rhs);
// Perform a floating point comparison of {lhs} and {rhs}.
assembler->BranchIfFloat64Equal(lhs_value, rhs_value, &if_true,
&if_false);
}
assembler->Bind(&if_rhsisnotnumber);
assembler->Goto(&if_false);
}
}
}
assembler->Bind(&if_true);
assembler->Return(assembler->BooleanConstant(true));
assembler->Bind(&if_false);
assembler->Return(assembler->BooleanConstant(false));
}
void ToBooleanStub::GenerateAssembly( void ToBooleanStub::GenerateAssembly(
compiler::CodeStubAssembler* assembler) const { compiler::CodeStubAssembler* assembler) const {
typedef compiler::Node Node; typedef compiler::Node Node;
......
...@@ -101,6 +101,7 @@ namespace internal { ...@@ -101,6 +101,7 @@ namespace internal {
V(LoadIC) \ V(LoadIC) \
/* TurboFanCodeStubs */ \ /* TurboFanCodeStubs */ \
V(StringLength) \ V(StringLength) \
V(StrictEqual) \
V(ToBoolean) \ V(ToBoolean) \
/* IC Handler stubs */ \ /* IC Handler stubs */ \
V(ArrayBufferViewLoadField) \ V(ArrayBufferViewLoadField) \
...@@ -631,6 +632,16 @@ class StringLengthStub : public TurboFanCodeStub { ...@@ -631,6 +632,16 @@ class StringLengthStub : public TurboFanCodeStub {
DEFINE_CODE_STUB(StringLength, TurboFanCodeStub); DEFINE_CODE_STUB(StringLength, TurboFanCodeStub);
}; };
class StrictEqualStub final : public TurboFanCodeStub {
public:
explicit StrictEqualStub(Isolate* isolate) : TurboFanCodeStub(isolate) {}
void GenerateAssembly(compiler::CodeStubAssembler* assembler) const final;
DEFINE_CALL_INTERFACE_DESCRIPTOR(Compare);
DEFINE_CODE_STUB(StrictEqual, TurboFanCodeStub);
};
class ToBooleanStub final : public TurboFanCodeStub { class ToBooleanStub final : public TurboFanCodeStub {
public: public:
explicit ToBooleanStub(Isolate* isolate) : TurboFanCodeStub(isolate) {} explicit ToBooleanStub(Isolate* isolate) : TurboFanCodeStub(isolate) {}
......
...@@ -105,6 +105,10 @@ Node* CodeStubAssembler::Float64Constant(double value) { ...@@ -105,6 +105,10 @@ Node* CodeStubAssembler::Float64Constant(double value) {
return raw_assembler_->Float64Constant(value); return raw_assembler_->Float64Constant(value);
} }
Node* CodeStubAssembler::HeapNumberMapConstant() {
return HeapConstant(isolate()->factory()->heap_number_map());
}
Node* CodeStubAssembler::Parameter(int value) { Node* CodeStubAssembler::Parameter(int value) {
return raw_assembler_->Parameter(value); return raw_assembler_->Parameter(value);
} }
...@@ -139,7 +143,6 @@ Node* CodeStubAssembler::SmiTag(Node* value) { ...@@ -139,7 +143,6 @@ Node* CodeStubAssembler::SmiTag(Node* value) {
return raw_assembler_->WordShl(value, SmiShiftBitsConstant()); return raw_assembler_->WordShl(value, SmiShiftBitsConstant());
} }
Node* CodeStubAssembler::SmiUntag(Node* value) { Node* CodeStubAssembler::SmiUntag(Node* value) {
return raw_assembler_->WordSar(value, SmiShiftBitsConstant()); return raw_assembler_->WordSar(value, SmiShiftBitsConstant());
} }
...@@ -152,6 +155,10 @@ Node* CodeStubAssembler::SmiToInt32(Node* value) { ...@@ -152,6 +155,10 @@ Node* CodeStubAssembler::SmiToInt32(Node* value) {
return result; return result;
} }
Node* CodeStubAssembler::SmiToFloat64(Node* value) {
return ChangeInt32ToFloat64(SmiUntag(value));
}
Node* CodeStubAssembler::SmiAdd(Node* a, Node* b) { return IntPtrAdd(a, b); } Node* CodeStubAssembler::SmiAdd(Node* a, Node* b) { return IntPtrAdd(a, b); }
Node* CodeStubAssembler::SmiEqual(Node* a, Node* b) { return WordEqual(a, b); } Node* CodeStubAssembler::SmiEqual(Node* a, Node* b) { return WordEqual(a, b); }
...@@ -188,11 +195,15 @@ Node* CodeStubAssembler::LoadObjectField(Node* object, int offset) { ...@@ -188,11 +195,15 @@ Node* CodeStubAssembler::LoadObjectField(Node* object, int offset) {
} }
Node* CodeStubAssembler::LoadHeapNumberValue(Node* object) { Node* CodeStubAssembler::LoadHeapNumberValue(Node* object) {
return raw_assembler_->Load( return Load(MachineType::Float64(), object,
MachineType::Float64(), object,
IntPtrConstant(HeapNumber::kValueOffset - kHeapObjectTag)); IntPtrConstant(HeapNumber::kValueOffset - kHeapObjectTag));
} }
Node* CodeStubAssembler::LoadMapInstanceType(Node* map) {
return Load(MachineType::Uint8(), map,
IntPtrConstant(Map::kInstanceTypeOffset - kHeapObjectTag));
}
Node* CodeStubAssembler::LoadFixedArrayElementSmiIndex(Node* object, Node* CodeStubAssembler::LoadFixedArrayElementSmiIndex(Node* object,
Node* smi_index, Node* smi_index,
int additional_offset) { int additional_offset) {
...@@ -279,10 +290,7 @@ Node* CodeStubAssembler::Projection(int index, Node* value) { ...@@ -279,10 +290,7 @@ Node* CodeStubAssembler::Projection(int index, Node* value) {
} }
Node* CodeStubAssembler::LoadInstanceType(Node* object) { Node* CodeStubAssembler::LoadInstanceType(Node* object) {
return raw_assembler_->Word32And( return LoadMapInstanceType(LoadObjectField(object, HeapObject::kMapOffset));
LoadObjectField(LoadObjectField(object, HeapObject::kMapOffset),
Map::kInstanceTypeOffset),
raw_assembler_->Int32Constant(255));
} }
Node* CodeStubAssembler::BitFieldDecode(Node* word32, uint32_t shift, Node* CodeStubAssembler::BitFieldDecode(Node* word32, uint32_t shift,
...@@ -292,6 +300,16 @@ Node* CodeStubAssembler::BitFieldDecode(Node* word32, uint32_t shift, ...@@ -292,6 +300,16 @@ Node* CodeStubAssembler::BitFieldDecode(Node* word32, uint32_t shift,
raw_assembler_->Int32Constant(shift)); raw_assembler_->Int32Constant(shift));
} }
void CodeStubAssembler::BranchIfFloat64Equal(Node* a, Node* b, Label* if_true,
Label* if_false) {
Label if_equal(this), if_notequal(this);
Branch(Float64Equal(a, b), &if_equal, &if_notequal);
Bind(&if_equal);
Goto(if_true);
Bind(&if_notequal);
Goto(if_false);
}
Node* CodeStubAssembler::CallN(CallDescriptor* descriptor, Node* code_target, Node* CodeStubAssembler::CallN(CallDescriptor* descriptor, Node* code_target,
Node** args) { Node** args) {
CallPrologue(); CallPrologue();
...@@ -468,12 +486,20 @@ Node* CodeStubAssembler::CallStub(const CallInterfaceDescriptor& descriptor, ...@@ -468,12 +486,20 @@ Node* CodeStubAssembler::CallStub(const CallInterfaceDescriptor& descriptor,
return CallN(call_descriptor, target, args); return CallN(call_descriptor, target, args);
} }
Node* CodeStubAssembler::TailCallStub(CodeStub& stub, Node** args) { Node* CodeStubAssembler::TailCallStub(const CallInterfaceDescriptor& descriptor,
Node* code_target = HeapConstant(stub.GetCode()); Node* target, Node* context, Node* arg1,
CallDescriptor* descriptor = Linkage::GetStubCallDescriptor( Node* arg2, size_t result_size) {
isolate(), zone(), stub.GetCallInterfaceDescriptor(), CallDescriptor* call_descriptor = Linkage::GetStubCallDescriptor(
stub.GetStackParameterCount(), CallDescriptor::kSupportsTailCalls); isolate(), zone(), descriptor, descriptor.GetStackParameterCount(),
return raw_assembler_->TailCallN(descriptor, code_target, args); CallDescriptor::kSupportsTailCalls, Operator::kNoProperties,
MachineType::AnyTagged(), result_size);
Node** args = zone()->NewArray<Node*>(3);
args[0] = arg1;
args[1] = arg2;
args[2] = context;
return raw_assembler_->TailCallN(call_descriptor, target, args);
} }
Node* CodeStubAssembler::TailCall( Node* CodeStubAssembler::TailCall(
...@@ -517,11 +543,15 @@ void CodeStubAssembler::Switch(Node* index, Label* default_label, ...@@ -517,11 +543,15 @@ void CodeStubAssembler::Switch(Node* index, Label* default_label,
} }
// RawMachineAssembler delegate helpers: // RawMachineAssembler delegate helpers:
Isolate* CodeStubAssembler::isolate() { return raw_assembler_->isolate(); } Isolate* CodeStubAssembler::isolate() const {
return raw_assembler_->isolate();
}
Factory* CodeStubAssembler::factory() const { return isolate()->factory(); }
Graph* CodeStubAssembler::graph() { return raw_assembler_->graph(); } Graph* CodeStubAssembler::graph() const { return raw_assembler_->graph(); }
Zone* CodeStubAssembler::zone() { return raw_assembler_->zone(); } Zone* CodeStubAssembler::zone() const { return raw_assembler_->zone(); }
// The core implementation of Variable is stored through an indirection so // The core implementation of Variable is stored through an indirection so
// that it can outlive the often block-scoped Variable declarations. This is // that it can outlive the often block-scoped Variable declarations. This is
......
...@@ -21,6 +21,7 @@ namespace internal { ...@@ -21,6 +21,7 @@ namespace internal {
class CallInterfaceDescriptor; class CallInterfaceDescriptor;
class Isolate; class Isolate;
class Factory;
class Zone; class Zone;
namespace compiler { namespace compiler {
...@@ -74,6 +75,7 @@ class Schedule; ...@@ -74,6 +75,7 @@ class Schedule;
#define CODE_STUB_ASSEMBLER_UNARY_OP_LIST(V) \ #define CODE_STUB_ASSEMBLER_UNARY_OP_LIST(V) \
V(ChangeFloat64ToUint32) \ V(ChangeFloat64ToUint32) \
V(ChangeInt32ToFloat64) \
V(ChangeInt32ToInt64) \ V(ChangeInt32ToInt64) \
V(ChangeUint32ToFloat64) \ V(ChangeUint32ToFloat64) \
V(ChangeUint32ToUint64) V(ChangeUint32ToUint64)
...@@ -124,6 +126,7 @@ class CodeStubAssembler { ...@@ -124,6 +126,7 @@ class CodeStubAssembler {
Node* BooleanConstant(bool value); Node* BooleanConstant(bool value);
Node* ExternalConstant(ExternalReference address); Node* ExternalConstant(ExternalReference address);
Node* Float64Constant(double value); Node* Float64Constant(double value);
Node* HeapNumberMapConstant();
Node* Parameter(int value); Node* Parameter(int value);
void Return(Node* value); void Return(Node* value);
...@@ -204,7 +207,10 @@ class CodeStubAssembler { ...@@ -204,7 +207,10 @@ class CodeStubAssembler {
Node* context, Node* arg1, Node* arg2, Node* arg3, Node* arg4, Node* context, Node* arg1, Node* arg2, Node* arg3, Node* arg4,
Node* arg5, size_t result_size = 1); Node* arg5, size_t result_size = 1);
Node* TailCallStub(CodeStub& stub, Node** args); Node* TailCallStub(const CallInterfaceDescriptor& descriptor, Node* target,
Node* context, Node* arg1, Node* arg2,
size_t result_size = 1);
Node* TailCall(const CallInterfaceDescriptor& descriptor, Node* target, Node* TailCall(const CallInterfaceDescriptor& descriptor, Node* target,
Node** args, size_t result_size = 1); Node** args, size_t result_size = 1);
...@@ -216,7 +222,9 @@ class CodeStubAssembler { ...@@ -216,7 +222,9 @@ class CodeStubAssembler {
Node* SmiTag(Node* value); Node* SmiTag(Node* value);
// Untag a Smi value as a Word. // Untag a Smi value as a Word.
Node* SmiUntag(Node* value); Node* SmiUntag(Node* value);
// Untag an Smi value as a 32-bit value.
// Smi conversions.
Node* SmiToFloat64(Node* value);
Node* SmiToInt32(Node* value); Node* SmiToInt32(Node* value);
// Smi operations. // Smi operations.
...@@ -233,8 +241,10 @@ class CodeStubAssembler { ...@@ -233,8 +241,10 @@ class CodeStubAssembler {
Node* LoadBufferObject(Node* buffer, int offset); Node* LoadBufferObject(Node* buffer, int offset);
// Load a field from an object on the heap. // Load a field from an object on the heap.
Node* LoadObjectField(Node* object, int offset); Node* LoadObjectField(Node* object, int offset);
// Load the HeapNumber value from a HeapNumber object. // Load the floating point value of a HeapNumber.
Node* LoadHeapNumberValue(Node* object); Node* LoadHeapNumberValue(Node* object);
// Load the instance type of a Map.
Node* LoadMapInstanceType(Node* map);
// Load an array element from a FixedArray. // Load an array element from a FixedArray.
Node* LoadFixedArrayElementSmiIndex(Node* object, Node* smi_index, Node* LoadFixedArrayElementSmiIndex(Node* object, Node* smi_index,
...@@ -254,11 +264,19 @@ class CodeStubAssembler { ...@@ -254,11 +264,19 @@ class CodeStubAssembler {
Node* BitFieldDecode(Node* word32, uint32_t shift, uint32_t mask); Node* BitFieldDecode(Node* word32, uint32_t shift, uint32_t mask);
// Branching helpers.
// TODO(danno): Can we be more cleverish wrt. edge-split?
void BranchIfFloat64Equal(Node* a, Node* b, Label* if_true, Label* if_false);
void BranchIfFloat64IsNaN(Node* value, Label* if_true, Label* if_false) {
BranchIfFloat64Equal(value, value, if_false, if_true);
}
protected: protected:
// Protected helpers which delegate to RawMachineAssembler. // Protected helpers which delegate to RawMachineAssembler.
Graph* graph(); Graph* graph() const;
Isolate* isolate(); Factory* factory() const;
Zone* zone(); Isolate* isolate() const;
Zone* zone() const;
// Enables subclasses to perform operations before and after a call. // Enables subclasses to perform operations before and after a call.
virtual void CallPrologue(); virtual void CallPrologue();
......
...@@ -89,7 +89,6 @@ REPLACE_BINARY_OP_IC_CALL(JSModulus, Token::MOD) ...@@ -89,7 +89,6 @@ REPLACE_BINARY_OP_IC_CALL(JSModulus, Token::MOD)
} }
REPLACE_RUNTIME_CALL(JSEqual, Runtime::kEqual) REPLACE_RUNTIME_CALL(JSEqual, Runtime::kEqual)
REPLACE_RUNTIME_CALL(JSNotEqual, Runtime::kNotEqual) REPLACE_RUNTIME_CALL(JSNotEqual, Runtime::kNotEqual)
REPLACE_RUNTIME_CALL(JSStrictEqual, Runtime::kStrictEqual)
REPLACE_RUNTIME_CALL(JSStrictNotEqual, Runtime::kStrictNotEqual) REPLACE_RUNTIME_CALL(JSStrictNotEqual, Runtime::kStrictNotEqual)
REPLACE_RUNTIME_CALL(JSLessThan, Runtime::kLessThan) REPLACE_RUNTIME_CALL(JSLessThan, Runtime::kLessThan)
REPLACE_RUNTIME_CALL(JSGreaterThan, Runtime::kGreaterThan) REPLACE_RUNTIME_CALL(JSGreaterThan, Runtime::kGreaterThan)
...@@ -100,6 +99,15 @@ REPLACE_RUNTIME_CALL(JSCreateModuleContext, Runtime::kPushModuleContext) ...@@ -100,6 +99,15 @@ REPLACE_RUNTIME_CALL(JSCreateModuleContext, Runtime::kPushModuleContext)
REPLACE_RUNTIME_CALL(JSConvertReceiver, Runtime::kConvertReceiver) REPLACE_RUNTIME_CALL(JSConvertReceiver, Runtime::kConvertReceiver)
#undef REPLACE_RUNTIME_CALL #undef REPLACE_RUNTIME_CALL
#define REPLACE_STUB_CALL(Op, Stub) \
void JSGenericLowering::Lower##Op(Node* node) { \
CallDescriptor::Flags flags = AdjustFrameStatesForCall(node); \
Callable callable = CodeFactory::Stub(isolate()); \
ReplaceWithStubCall(node, callable, flags); \
}
REPLACE_STUB_CALL(JSStrictEqual, StrictEqual)
#undef REPLACE_STUB_CALL
void JSGenericLowering::ReplaceWithStubCall(Node* node, Callable callable, void JSGenericLowering::ReplaceWithStubCall(Node* node, Callable callable,
CallDescriptor::Flags flags) { CallDescriptor::Flags flags) {
Operator::Properties properties = node->op()->properties(); Operator::Properties properties = node->op()->properties();
......
...@@ -160,7 +160,7 @@ int Linkage::FrameStateInputCount(Runtime::FunctionId function) { ...@@ -160,7 +160,7 @@ int Linkage::FrameStateInputCount(Runtime::FunctionId function) {
case Runtime::kPushCatchContext: case Runtime::kPushCatchContext:
case Runtime::kReThrow: case Runtime::kReThrow:
case Runtime::kStringCompare: case Runtime::kStringCompare:
case Runtime::kStringEquals: case Runtime::kStringEqual:
case Runtime::kToFastProperties: // TODO(jarin): Is it safe? case Runtime::kToFastProperties: // TODO(jarin): Is it safe?
case Runtime::kTraceEnter: case Runtime::kTraceEnter:
case Runtime::kTraceExit: case Runtime::kTraceExit:
......
...@@ -3232,13 +3232,20 @@ void CompareICStub::GenerateStrings(MacroAssembler* masm) { ...@@ -3232,13 +3232,20 @@ void CompareICStub::GenerateStrings(MacroAssembler* masm) {
// Handle more complex cases in runtime. // Handle more complex cases in runtime.
__ bind(&runtime); __ bind(&runtime);
if (equality) {
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(left);
__ Push(right);
__ CallRuntime(Runtime::kStringEqual);
}
__ sub(eax, Immediate(masm->isolate()->factory()->true_value()));
__ Ret();
} else {
__ pop(tmp1); // Return address. __ pop(tmp1); // Return address.
__ push(left); __ push(left);
__ push(right); __ push(right);
__ push(tmp1); __ push(tmp1);
if (equality) {
__ TailCallRuntime(Runtime::kStringEquals);
} else {
__ TailCallRuntime(Runtime::kStringCompare); __ TailCallRuntime(Runtime::kStringCompare);
} }
......
...@@ -744,6 +744,20 @@ void Interpreter::DoPopContext(InterpreterAssembler* assembler) { ...@@ -744,6 +744,20 @@ void Interpreter::DoPopContext(InterpreterAssembler* assembler) {
__ Dispatch(); __ Dispatch();
} }
void Interpreter::DoBinaryOp(Callable callable,
InterpreterAssembler* assembler) {
// TODO(bmeurer): Collect definition side type feedback for various
// binary operations.
Node* target = __ HeapConstant(callable.code());
Node* reg_index = __ BytecodeOperandReg(0);
Node* lhs = __ LoadRegister(reg_index);
Node* rhs = __ GetAccumulator();
Node* context = __ GetContext();
Node* result = __ CallStub(callable.descriptor(), target, context, lhs, rhs);
__ SetAccumulator(result);
__ Dispatch();
}
void Interpreter::DoBinaryOp(Runtime::FunctionId function_id, void Interpreter::DoBinaryOp(Runtime::FunctionId function_id,
InterpreterAssembler* assembler) { InterpreterAssembler* assembler) {
// TODO(rmcilroy): Call ICs which back-patch bytecode with type specialized // TODO(rmcilroy): Call ICs which back-patch bytecode with type specialized
...@@ -1174,7 +1188,7 @@ void Interpreter::DoTestNotEqual(InterpreterAssembler* assembler) { ...@@ -1174,7 +1188,7 @@ void Interpreter::DoTestNotEqual(InterpreterAssembler* assembler) {
// //
// Test if the value in the <src> register is strictly equal to the accumulator. // Test if the value in the <src> register is strictly equal to the accumulator.
void Interpreter::DoTestEqualStrict(InterpreterAssembler* assembler) { void Interpreter::DoTestEqualStrict(InterpreterAssembler* assembler) {
DoBinaryOp(Runtime::kStrictEqual, assembler); DoBinaryOp(CodeFactory::StrictEqual(isolate_), assembler);
} }
......
...@@ -58,6 +58,9 @@ class Interpreter { ...@@ -58,6 +58,9 @@ class Interpreter {
BYTECODE_LIST(DECLARE_BYTECODE_HANDLER_GENERATOR) BYTECODE_LIST(DECLARE_BYTECODE_HANDLER_GENERATOR)
#undef DECLARE_BYTECODE_HANDLER_GENERATOR #undef DECLARE_BYTECODE_HANDLER_GENERATOR
// Generates code to perform the binary operations via |callable|.
void DoBinaryOp(Callable callable, InterpreterAssembler* assembler);
// Generates code to perform the binary operations via |function_id|. // Generates code to perform the binary operations via |function_id|.
void DoBinaryOp(Runtime::FunctionId function_id, void DoBinaryOp(Runtime::FunctionId function_id,
InterpreterAssembler* assembler); InterpreterAssembler* assembler);
......
...@@ -3358,10 +3358,17 @@ void CompareICStub::GenerateStrings(MacroAssembler* masm) { ...@@ -3358,10 +3358,17 @@ void CompareICStub::GenerateStrings(MacroAssembler* masm) {
// Handle more complex cases in runtime. // Handle more complex cases in runtime.
__ bind(&runtime); __ bind(&runtime);
__ Push(left, right);
if (equality) { if (equality) {
__ TailCallRuntime(Runtime::kStringEquals); {
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(left, right);
__ CallRuntime(Runtime::kStringEqual);
}
__ LoadRoot(a0, Heap::kTrueValueRootIndex);
__ Ret(USE_DELAY_SLOT);
__ Subu(v0, v0, a0); // In delay slot.
} else { } else {
__ Push(left, right);
__ TailCallRuntime(Runtime::kStringCompare); __ TailCallRuntime(Runtime::kStringCompare);
} }
......
...@@ -3362,10 +3362,17 @@ void CompareICStub::GenerateStrings(MacroAssembler* masm) { ...@@ -3362,10 +3362,17 @@ void CompareICStub::GenerateStrings(MacroAssembler* masm) {
// Handle more complex cases in runtime. // Handle more complex cases in runtime.
__ bind(&runtime); __ bind(&runtime);
__ Push(left, right);
if (equality) { if (equality) {
__ TailCallRuntime(Runtime::kStringEquals); {
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(left, right);
__ CallRuntime(Runtime::kStringEqual);
}
__ LoadRoot(a0, Heap::kTrueValueRootIndex);
__ Ret(USE_DELAY_SLOT);
__ Subu(v0, v0, a0); // In delay slot.
} else { } else {
__ Push(left, right);
__ TailCallRuntime(Runtime::kStringCompare); __ TailCallRuntime(Runtime::kStringCompare);
} }
......
...@@ -1145,22 +1145,12 @@ RUNTIME_FUNCTION(Runtime_NewString) { ...@@ -1145,22 +1145,12 @@ RUNTIME_FUNCTION(Runtime_NewString) {
return *result; return *result;
} }
RUNTIME_FUNCTION(Runtime_StringEqual) {
RUNTIME_FUNCTION(Runtime_StringEquals) {
HandleScope handle_scope(isolate); HandleScope handle_scope(isolate);
DCHECK(args.length() == 2); DCHECK_EQ(2, args.length());
CONVERT_ARG_HANDLE_CHECKED(String, x, 0); CONVERT_ARG_HANDLE_CHECKED(String, x, 0);
CONVERT_ARG_HANDLE_CHECKED(String, y, 1); CONVERT_ARG_HANDLE_CHECKED(String, y, 1);
return isolate->heap()->ToBoolean(String::Equals(x, y));
bool not_equal = !String::Equals(x, y);
// This is slightly convoluted because the value that signifies
// equality is 0 and inequality is 1 so we have to negate the result
// from String::Equals.
DCHECK(not_equal == 0 || not_equal == 1);
STATIC_ASSERT(EQUAL == 0);
STATIC_ASSERT(NOT_EQUAL == 1);
return Smi::FromInt(not_equal);
} }
......
...@@ -842,7 +842,6 @@ namespace internal { ...@@ -842,7 +842,6 @@ namespace internal {
F(Bool8x16Equal, 2, 1) \ F(Bool8x16Equal, 2, 1) \
F(Bool8x16NotEqual, 2, 1) F(Bool8x16NotEqual, 2, 1)
#define FOR_EACH_INTRINSIC_STRINGS(F) \ #define FOR_EACH_INTRINSIC_STRINGS(F) \
F(StringReplaceOneCharWithString, 3, 1) \ F(StringReplaceOneCharWithString, 3, 1) \
F(StringIndexOf, 3, 1) \ F(StringIndexOf, 3, 1) \
...@@ -863,7 +862,7 @@ namespace internal { ...@@ -863,7 +862,7 @@ namespace internal {
F(StringTrim, 3, 1) \ F(StringTrim, 3, 1) \
F(TruncateString, 2, 1) \ F(TruncateString, 2, 1) \
F(NewString, 2, 1) \ F(NewString, 2, 1) \
F(StringEquals, 2, 1) \ F(StringEqual, 2, 1) \
F(FlattenString, 1, 1) \ F(FlattenString, 1, 1) \
F(StringCharFromCode, 1, 1) \ F(StringCharFromCode, 1, 1) \
F(StringCharAt, 2, 1) \ F(StringCharAt, 2, 1) \
...@@ -873,7 +872,6 @@ namespace internal { ...@@ -873,7 +872,6 @@ namespace internal {
F(TwoByteSeqStringSetChar, 3, 1) \ F(TwoByteSeqStringSetChar, 3, 1) \
F(StringCharCodeAt, 2, 1) F(StringCharCodeAt, 2, 1)
#define FOR_EACH_INTRINSIC_SYMBOL(F) \ #define FOR_EACH_INTRINSIC_SYMBOL(F) \
F(CreateSymbol, 1, 1) \ F(CreateSymbol, 1, 1) \
F(CreatePrivateSymbol, 1, 1) \ F(CreatePrivateSymbol, 1, 1) \
......
...@@ -3172,13 +3172,21 @@ void CompareICStub::GenerateStrings(MacroAssembler* masm) { ...@@ -3172,13 +3172,21 @@ void CompareICStub::GenerateStrings(MacroAssembler* masm) {
// Handle more complex cases in runtime. // Handle more complex cases in runtime.
__ bind(&runtime); __ bind(&runtime);
if (equality) {
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(left);
__ Push(right);
__ CallRuntime(Runtime::kStringEqual);
}
__ LoadRoot(rdx, Heap::kTrueValueRootIndex);
__ subp(rax, rdx);
__ Ret();
} else {
__ PopReturnAddressTo(tmp1); __ PopReturnAddressTo(tmp1);
__ Push(left); __ Push(left);
__ Push(right); __ Push(right);
__ PushReturnAddressFrom(tmp1); __ PushReturnAddressFrom(tmp1);
if (equality) {
__ TailCallRuntime(Runtime::kStringEquals);
} else {
__ TailCallRuntime(Runtime::kStringCompare); __ TailCallRuntime(Runtime::kStringCompare);
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment