Commit 7873f35e authored by olivf@chromium.org's avatar olivf@chromium.org

Hydrogenisation of binops

BUG=
R=mstarzinger@chromium.org

Review URL: https://codereview.chromium.org/24072013

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@17052 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 30a6e218
This diff is collapsed.
......@@ -1968,7 +1968,7 @@ class CountOperation V8_FINAL : public Expression {
virtual KeyedAccessStoreMode GetStoreMode() V8_OVERRIDE {
return store_mode_;
}
TypeInfo type() const { return type_; }
Handle<Type> type() const { return type_; }
BailoutId AssignmentId() const { return assignment_id_; }
......@@ -1997,7 +1997,7 @@ class CountOperation V8_FINAL : public Expression {
bool is_monomorphic_ : 1;
KeyedAccessStoreMode store_mode_ : 5; // Windows treats as signed,
// must have extra bit.
TypeInfo type_;
Handle<Type> type_;
Expression* expression_;
int pos_;
......
......@@ -841,6 +841,101 @@ Handle<Code> CompareNilICStub::GenerateCode(Isolate* isolate) {
}
template <>
HValue* CodeStubGraphBuilder<BinaryOpStub>::BuildCodeInitializedStub() {
BinaryOpStub* stub = casted_stub();
HValue* left = GetParameter(0);
HValue* right = GetParameter(1);
Handle<Type> left_type = stub->GetLeftType(isolate());
Handle<Type> right_type = stub->GetRightType(isolate());
Handle<Type> result_type = stub->GetResultType(isolate());
ASSERT(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) &&
(stub->HasSideEffects(isolate()) || !result_type->Is(Type::None())));
HValue* result = NULL;
if (stub->operation() == Token::ADD &&
(left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) &&
!left_type->Is(Type::String()) && !right_type->Is(Type::String())) {
// For the generic add stub a fast case for String add is performance
// critical.
if (left_type->Maybe(Type::String())) {
IfBuilder left_string(this);
left_string.IfNot<HIsSmiAndBranch>(left);
left_string.AndIf<HIsStringAndBranch>(left);
left_string.Then();
Push(Add<HStringAdd>(left, right, STRING_ADD_CHECK_RIGHT));
left_string.Else();
Push(AddInstruction(BuildBinaryOperation(stub->operation(),
left, right, left_type, right_type, result_type,
stub->fixed_right_arg(), true)));
left_string.End();
result = Pop();
} else {
IfBuilder right_string(this);
right_string.IfNot<HIsSmiAndBranch>(right);
right_string.AndIf<HIsStringAndBranch>(right);
right_string.Then();
Push(Add<HStringAdd>(left, right, STRING_ADD_CHECK_LEFT));
right_string.Else();
Push(AddInstruction(BuildBinaryOperation(stub->operation(),
left, right, left_type, right_type, result_type,
stub->fixed_right_arg(), true)));
right_string.End();
result = Pop();
}
} else {
result = AddInstruction(BuildBinaryOperation(stub->operation(),
left, right, left_type, right_type, result_type,
stub->fixed_right_arg(), true));
}
// If we encounter a generic argument, the number conversion is
// observable, thus we cannot afford to bail out after the fact.
if (!stub->HasSideEffects(isolate())) {
if (result_type->Is(Type::Smi())) {
if (stub->operation() == Token::SHR) {
// TODO(olivf) Replace this by a SmiTagU Instruction.
// 0x40000000: this number would convert to negative when interpreting
// the register as signed value;
IfBuilder if_of(this);
if_of.IfNot<HCompareNumericAndBranch>(result,
Add<HConstant>(static_cast<int>(0x40000000)), Token::EQ_STRICT);
if_of.Then();
if_of.ElseDeopt("UInt->Smi oveflow");
if_of.End();
}
}
result = EnforceNumberType(result, result_type);
}
// Reuse the double box if we are allowed to (i.e. chained binops).
if (stub->CanReuseDoubleBox()) {
HValue* reuse = (stub->mode() == OVERWRITE_LEFT) ? left : right;
IfBuilder if_heap_number(this);
if_heap_number.IfNot<HIsSmiAndBranch>(reuse);
if_heap_number.Then();
HValue* res_val = Add<HForceRepresentation>(result,
Representation::Double());
HObjectAccess access = HObjectAccess::ForHeapNumberValue();
Add<HStoreNamedField>(reuse, access, res_val);
Push(reuse);
if_heap_number.Else();
Push(result);
if_heap_number.End();
result = Pop();
}
return result;
}
Handle<Code> BinaryOpStub::GenerateCode(Isolate* isolate) {
return DoGenerateCode(isolate, this);
}
template <>
HValue* CodeStubGraphBuilder<ToBooleanStub>::BuildCodeInitializedStub() {
ToBooleanStub* stub = casted_stub();
......
This diff is collapsed.
This diff is collapsed.
......@@ -763,6 +763,9 @@ class HValue : public ZoneObject {
void SetFlag(Flag f) { flags_ |= (1 << f); }
void ClearFlag(Flag f) { flags_ &= ~(1 << f); }
bool CheckFlag(Flag f) const { return (flags_ & (1 << f)) != 0; }
void CopyFlag(Flag f, HValue* other) {
if (other->CheckFlag(f)) SetFlag(f);
}
// Returns true if the flag specified is set for all uses, false otherwise.
bool CheckUsesForFlag(Flag f) const;
......
......@@ -7501,7 +7501,7 @@ HInstruction* HOptimizedGraphBuilder::BuildIncrement(
bool returns_original_input,
CountOperation* expr) {
// The input to the count operation is on top of the expression stack.
TypeInfo info = expr->type();
Handle<Type> info = expr->type();
Representation rep = Representation::FromType(info);
if (rep.IsNone() || rep.IsTagged()) {
rep = Representation::Smi();
......@@ -7865,7 +7865,8 @@ HInstruction* HGraphBuilder::BuildBinaryOperation(
Handle<Type> left_type,
Handle<Type> right_type,
Handle<Type> result_type,
Maybe<int> fixed_right_arg) {
Maybe<int> fixed_right_arg,
bool binop_stub) {
Representation left_rep = Representation::FromType(left_type);
Representation right_rep = Representation::FromType(right_type);
......@@ -7894,75 +7895,92 @@ HInstruction* HGraphBuilder::BuildBinaryOperation(
right_rep = Representation::FromType(right_type);
}
if (binop_stub) {
left = EnforceNumberType(left, left_type);
right = EnforceNumberType(right, right_type);
}
Representation result_rep = Representation::FromType(result_type);
bool is_string_add = op == Token::ADD &&
(left_type->Is(Type::String()) ||
right_type->Is(Type::String()));
bool is_non_primitive = (left_rep.IsTagged() && !left_rep.IsSmi()) ||
(right_rep.IsTagged() && !right_rep.IsSmi());
bool is_string_add = op == Token::ADD &&
(left_type->Is(Type::String()) ||
right_type->Is(Type::String()));
HInstruction* instr = NULL;
switch (op) {
case Token::ADD:
if (is_string_add) {
StringAddFlags flags = STRING_ADD_CHECK_BOTH;
if (left_type->Is(Type::String())) {
BuildCheckHeapObject(left);
AddInstruction(HCheckInstanceType::NewIsString(left, zone()));
flags = STRING_ADD_CHECK_RIGHT;
}
if (right_type->Is(Type::String())) {
BuildCheckHeapObject(right);
AddInstruction(HCheckInstanceType::NewIsString(right, zone()));
flags = (flags == STRING_ADD_CHECK_BOTH)
? STRING_ADD_CHECK_LEFT : STRING_ADD_CHECK_NONE;
// Only the stub is allowed to call into the runtime, since otherwise we would
// inline several instructions (including the two pushes) for every tagged
// operation in optimized code, which is more expensive, than a stub call.
if (binop_stub && is_non_primitive && !is_string_add) {
HValue* function = AddLoadJSBuiltin(BinaryOpIC::TokenToJSBuiltin(op));
Add<HPushArgument>(left);
Add<HPushArgument>(right);
instr = NewUncasted<HInvokeFunction>(function, 2);
} else {
switch (op) {
case Token::ADD:
if (is_string_add) {
StringAddFlags flags = STRING_ADD_CHECK_BOTH;
if (left_type->Is(Type::String())) {
BuildCheckHeapObject(left);
AddInstruction(HCheckInstanceType::NewIsString(left, zone()));
flags = STRING_ADD_CHECK_RIGHT;
}
if (right_type->Is(Type::String())) {
BuildCheckHeapObject(right);
AddInstruction(HCheckInstanceType::NewIsString(right, zone()));
flags = (flags == STRING_ADD_CHECK_BOTH)
? STRING_ADD_CHECK_LEFT : STRING_ADD_CHECK_NONE;
}
instr = NewUncasted<HStringAdd>(left, right, flags);
} else {
instr = NewUncasted<HAdd>(left, right);
}
instr = NewUncasted<HStringAdd>(left, right, flags);
} else {
instr = NewUncasted<HAdd>(left, right);
}
break;
case Token::SUB:
instr = NewUncasted<HSub>(left, right);
break;
case Token::MUL:
instr = NewUncasted<HMul>(left, right);
break;
case Token::MOD:
instr = NewUncasted<HMod>(left, right, fixed_right_arg);
break;
case Token::DIV:
instr = NewUncasted<HDiv>(left, right);
break;
case Token::BIT_XOR:
case Token::BIT_AND:
instr = NewUncasted<HBitwise>(op, left, right);
break;
case Token::BIT_OR: {
HValue* operand, *shift_amount;
if (left_type->Is(Type::Signed32()) &&
right_type->Is(Type::Signed32()) &&
MatchRotateRight(left, right, &operand, &shift_amount)) {
instr = NewUncasted<HRor>(operand, shift_amount);
} else {
break;
case Token::SUB:
instr = NewUncasted<HSub>(left, right);
break;
case Token::MUL:
instr = NewUncasted<HMul>(left, right);
break;
case Token::MOD:
instr = NewUncasted<HMod>(left, right, fixed_right_arg);
break;
case Token::DIV:
instr = NewUncasted<HDiv>(left, right);
break;
case Token::BIT_XOR:
case Token::BIT_AND:
instr = NewUncasted<HBitwise>(op, left, right);
break;
case Token::BIT_OR: {
HValue* operand, *shift_amount;
if (left_type->Is(Type::Signed32()) &&
right_type->Is(Type::Signed32()) &&
MatchRotateRight(left, right, &operand, &shift_amount)) {
instr = NewUncasted<HRor>(operand, shift_amount);
} else {
instr = NewUncasted<HBitwise>(op, left, right);
}
break;
}
break;
case Token::SAR:
instr = NewUncasted<HSar>(left, right);
break;
case Token::SHR:
instr = NewUncasted<HShr>(left, right);
if (FLAG_opt_safe_uint32_operations && instr->IsShr() &&
CanBeZero(right)) {
graph()->RecordUint32Instruction(instr);
}
break;
case Token::SHL:
instr = NewUncasted<HShl>(left, right);
break;
default:
UNREACHABLE();
}
case Token::SAR:
instr = NewUncasted<HSar>(left, right);
break;
case Token::SHR:
instr = NewUncasted<HShr>(left, right);
if (FLAG_opt_safe_uint32_operations && instr->IsShr() &&
CanBeZero(right)) {
graph()->RecordUint32Instruction(instr);
}
break;
case Token::SHL:
instr = NewUncasted<HShl>(left, right);
break;
default:
UNREACHABLE();
}
if (instr->IsBinaryOperation()) {
......@@ -7970,6 +7988,19 @@ HInstruction* HGraphBuilder::BuildBinaryOperation(
binop->set_observed_input_representation(1, left_rep);
binop->set_observed_input_representation(2, right_rep);
binop->initialize_output_representation(result_rep);
if (binop_stub) {
// Stub should not call into stub.
instr->SetFlag(HValue::kCannotBeTagged);
// And should truncate on HForceRepresentation already.
if (left->IsForceRepresentation()) {
left->CopyFlag(HValue::kTruncatingToSmi, instr);
left->CopyFlag(HValue::kTruncatingToInt32, instr);
}
if (right->IsForceRepresentation()) {
right->CopyFlag(HValue::kTruncatingToSmi, instr);
right->CopyFlag(HValue::kTruncatingToInt32, instr);
}
}
}
return instr;
}
......
......@@ -1291,7 +1291,8 @@ class HGraphBuilder {
Handle<Type> left_type,
Handle<Type> right_type,
Handle<Type> result_type,
Maybe<int> fixed_right_arg);
Maybe<int> fixed_right_arg,
bool binop_stub = false);
HLoadNamedField* AddLoadFixedArrayLength(HValue *object);
......
This diff is collapsed.
......@@ -212,6 +212,7 @@ void Deoptimizer::SetPlatformCompiledStubRegisters(
void Deoptimizer::CopyDoubleRegisters(FrameDescription* output_frame) {
if (!CpuFeatures::IsSupported(SSE2)) return;
for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; ++i) {
double double_value = input_->GetDoubleRegister(i);
output_frame->SetDoubleRegister(i, double_value);
......
......@@ -253,8 +253,8 @@ void MacroAssembler::X87TOSToI(Register result_reg,
Label::Distance dst) {
Label done;
sub(esp, Immediate(kPointerSize));
fist_s(MemOperand(esp, 0));
fld(0);
fist_s(MemOperand(esp, 0));
fild_s(MemOperand(esp, 0));
pop(result_reg);
FCmp();
......@@ -453,6 +453,7 @@ static double kUint32Bias =
void MacroAssembler::LoadUint32(XMMRegister dst,
Register src,
XMMRegister scratch) {
ASSERT(!Serializer::enabled());
Label done;
cmp(src, Immediate(0));
movdbl(scratch,
......
This diff is collapsed.
......@@ -57,8 +57,8 @@ namespace internal {
ICU(LoadPropertyWithInterceptorForCall) \
ICU(KeyedLoadPropertyWithInterceptor) \
ICU(StoreInterceptorProperty) \
ICU(BinaryOp_Patch) \
ICU(CompareIC_Miss) \
ICU(BinaryOpIC_Miss) \
ICU(CompareNilIC_Miss) \
ICU(Unreachable) \
ICU(ToBooleanIC_Miss)
......@@ -735,22 +735,14 @@ class BinaryOpIC: public IC {
GENERIC
};
static void StubInfoToType(int minor_key,
Handle<Type>* left,
Handle<Type>* right,
Handle<Type>* result,
Isolate* isolate);
explicit BinaryOpIC(Isolate* isolate) : IC(NO_EXTRA_FRAME, isolate) { }
explicit BinaryOpIC(Isolate* isolate) : IC(EXTRA_CALL_FRAME, isolate) { }
void patch(Code* code);
static Builtins::JavaScript TokenToJSBuiltin(Token::Value op);
static const char* GetName(TypeInfo type_info);
static State ToState(TypeInfo type_info);
private:
static Handle<Type> TypeInfoToType(TypeInfo binary_type, Isolate* isolate);
MUST_USE_RESULT MaybeObject* Transition(Handle<Object> left,
Handle<Object> right);
};
......@@ -857,6 +849,7 @@ DECLARE_RUNTIME_FUNCTION(MaybeObject*, KeyedStoreIC_MissFromStubFailure);
DECLARE_RUNTIME_FUNCTION(MaybeObject*, UnaryOpIC_Miss);
DECLARE_RUNTIME_FUNCTION(MaybeObject*, StoreIC_MissFromStubFailure);
DECLARE_RUNTIME_FUNCTION(MaybeObject*, ElementsTransitionAndStoreIC_Miss);
DECLARE_RUNTIME_FUNCTION(MaybeObject*, BinaryOpIC_Miss);
DECLARE_RUNTIME_FUNCTION(MaybeObject*, CompareNilIC_Miss);
DECLARE_RUNTIME_FUNCTION(MaybeObject*, ToBooleanIC_Miss);
......
......@@ -2338,6 +2338,7 @@ bool Isolate::Init(Deserializer* des) {
DONT_TRACK_ALLOCATION_SITE, 0);
stub.InitializeInterfaceDescriptor(
this, code_stub_interface_descriptor(CodeStub::FastCloneShallowArray));
BinaryOpStub::InitializeForIsolate(this);
CompareNilICStub::InitializeForIsolate(this);
ToBooleanStub::InitializeForIsolate(this);
ArrayConstructorStubBase::InstallDescriptors(this);
......
......@@ -1610,7 +1610,12 @@ void Logger::LogCodeObject(Object* object) {
case Code::FUNCTION:
case Code::OPTIMIZED_FUNCTION:
return; // We log this later using LogCompiledFunctions.
case Code::BINARY_OP_IC: // fall through
case Code::BINARY_OP_IC: {
BinaryOpStub stub(code_object->extended_extra_ic_state());
description = stub.GetName().Detach();
tag = Logger::STUB_TAG;
break;
}
case Code::COMPARE_IC: // fall through
case Code::COMPARE_NIL_IC: // fall through
case Code::TO_BOOLEAN_IC: // fall through
......
......@@ -4883,7 +4883,8 @@ class Code: public HeapObject {
// TODO(danno): This is a bit of a hack right now since there are still
// clients of this API that pass "extra" values in for argc. These clients
// should be retrofitted to used ExtendedExtraICState.
return kind == COMPARE_NIL_IC || kind == TO_BOOLEAN_IC;
return kind == COMPARE_NIL_IC || kind == TO_BOOLEAN_IC ||
kind == BINARY_OP_IC;
}
inline StubType type(); // Only valid for monomorphic IC stubs.
......
......@@ -381,20 +381,29 @@ void TypeFeedbackOracle::BinaryType(TypeFeedbackId id,
Handle<Type>* left,
Handle<Type>* right,
Handle<Type>* result,
Maybe<int>* fixed_right_arg) {
Maybe<int>* fixed_right_arg,
Token::Value operation) {
Handle<Object> object = GetInfo(id);
if (!object->IsCode()) {
// For some binary ops we don't have ICs, e.g. Token::COMMA.
// For some binary ops we don't have ICs, e.g. Token::COMMA, but for the
// operations covered by the BinaryOpStub we should always have them.
ASSERT(!(operation >= BinaryOpStub::FIRST_TOKEN &&
operation <= BinaryOpStub::LAST_TOKEN));
*left = *right = *result = handle(Type::None(), isolate_);
return;
}
Handle<Code> code = Handle<Code>::cast(object);
ASSERT(code->is_binary_op_stub());
int minor_key = code->stub_info();
BinaryOpIC::StubInfoToType(minor_key, left, right, result, isolate());
*fixed_right_arg =
BinaryOpStub::decode_fixed_right_arg_from_minor_key(minor_key);
BinaryOpStub stub(code->extended_extra_ic_state());
// Sanity check.
ASSERT(stub.operation() == operation);
*left = stub.GetLeftType(isolate());
*right = stub.GetRightType(isolate());
*result = stub.GetResultType(isolate());
*fixed_right_arg = stub.fixed_right_arg();
}
......@@ -410,36 +419,16 @@ Handle<Type> TypeFeedbackOracle::ClauseType(TypeFeedbackId id) {
}
TypeInfo TypeFeedbackOracle::IncrementType(CountOperation* expr) {
Handle<Type> TypeFeedbackOracle::IncrementType(CountOperation* expr) {
Handle<Object> object = GetInfo(expr->CountBinOpFeedbackId());
TypeInfo unknown = TypeInfo::Unknown();
Handle<Type> unknown(Type::None(), isolate_);
ASSERT(object->IsCode());
if (!object->IsCode()) return unknown;
Handle<Code> code = Handle<Code>::cast(object);
if (!code->is_binary_op_stub()) return unknown;
BinaryOpIC::TypeInfo left_type, right_type, unused_result_type;
BinaryOpStub::decode_types_from_minor_key(code->stub_info(), &left_type,
&right_type, &unused_result_type);
// CountOperations should always have +1 or -1 as their right input.
ASSERT(right_type == BinaryOpIC::SMI ||
right_type == BinaryOpIC::UNINITIALIZED);
switch (left_type) {
case BinaryOpIC::UNINITIALIZED:
case BinaryOpIC::SMI:
return TypeInfo::Smi();
case BinaryOpIC::INT32:
return TypeInfo::Integer32();
case BinaryOpIC::NUMBER:
return TypeInfo::Double();
case BinaryOpIC::STRING:
case BinaryOpIC::GENERIC:
return unknown;
default:
return unknown;
}
UNREACHABLE();
return unknown;
BinaryOpStub stub(code->extended_extra_ic_state());
return stub.GetLeftType(isolate());
}
......
......@@ -301,7 +301,8 @@ class TypeFeedbackOracle: public ZoneObject {
Handle<Type>* left,
Handle<Type>* right,
Handle<Type>* result,
Maybe<int>* fixed_right_arg);
Maybe<int>* fixed_right_arg,
Token::Value operation);
void CompareType(TypeFeedbackId id,
Handle<Type>* left,
......@@ -310,7 +311,7 @@ class TypeFeedbackOracle: public ZoneObject {
Handle<Type> ClauseType(TypeFeedbackId id);
TypeInfo IncrementType(CountOperation* expr);
Handle<Type> IncrementType(CountOperation* expr);
Zone* zone() const { return zone_; }
Isolate* isolate() const { return isolate_; }
......
......@@ -543,7 +543,7 @@ void AstTyper::VisitBinaryOperation(BinaryOperation* expr) {
Handle<Type> type, left_type, right_type;
Maybe<int> fixed_right_arg;
oracle()->BinaryType(expr->BinaryOperationFeedbackId(),
&left_type, &right_type, &type, &fixed_right_arg);
&left_type, &right_type, &type, &fixed_right_arg, expr->op());
NarrowLowerType(expr, type);
NarrowLowerType(expr->left(), left_type);
NarrowLowerType(expr->right(), right_type);
......
......@@ -204,7 +204,6 @@ namespace internal {
SC(enum_cache_hits, V8.EnumCacheHits) \
SC(enum_cache_misses, V8.EnumCacheMisses) \
SC(zone_segment_bytes, V8.ZoneSegmentBytes) \
SC(generic_binary_stub_calls, V8.GenericBinaryStubCalls) \
SC(fast_new_closure_total, V8.FastNewClosureTotal) \
SC(fast_new_closure_try_optimized, V8.FastNewClosureTryOptimized) \
SC(fast_new_closure_install_optimized, V8.FastNewClosureInstallOptimized) \
......
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment