Commit 6ca58764 authored by kasperl@chromium.org's avatar kasperl@chromium.org

Add fast case stub for BIT_NOT.

Review URL: http://codereview.chromium.org/503079

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@3513 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent af6971d8
...@@ -3698,7 +3698,7 @@ void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) { ...@@ -3698,7 +3698,7 @@ void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
bool overwrite = bool overwrite =
(node->expression()->AsBinaryOperation() != NULL && (node->expression()->AsBinaryOperation() != NULL &&
node->expression()->AsBinaryOperation()->ResultOverwriteAllowed()); node->expression()->AsBinaryOperation()->ResultOverwriteAllowed());
UnarySubStub stub(overwrite); GenericUnaryOpStub stub(Token::SUB, overwrite);
frame_->CallStub(&stub, 0); frame_->CallStub(&stub, 0);
break; break;
} }
...@@ -5940,7 +5940,9 @@ void StackCheckStub::Generate(MacroAssembler* masm) { ...@@ -5940,7 +5940,9 @@ void StackCheckStub::Generate(MacroAssembler* masm) {
} }
void UnarySubStub::Generate(MacroAssembler* masm) { void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
ASSERT(op_ == Token::SUB);
Label undo; Label undo;
Label slow; Label slow;
Label not_smi; Label not_smi;
......
...@@ -46,7 +46,7 @@ namespace internal { ...@@ -46,7 +46,7 @@ namespace internal {
V(FastNewClosure) \ V(FastNewClosure) \
V(FastNewContext) \ V(FastNewContext) \
V(FastCloneShallowArray) \ V(FastCloneShallowArray) \
V(UnarySub) \ V(GenericUnaryOp) \
V(RevertToNumber) \ V(RevertToNumber) \
V(ToBoolean) \ V(ToBoolean) \
V(Instanceof) \ V(Instanceof) \
......
...@@ -448,6 +448,23 @@ const char* RuntimeStub::GetName() { ...@@ -448,6 +448,23 @@ const char* RuntimeStub::GetName() {
} }
const char* GenericUnaryOpStub::GetName() {
switch (op_) {
case Token::SUB:
return overwrite_
? "GenericUnaryOpStub_SUB_Overwrite"
: "GenericUnaryOpStub_SUB_Alloc";
case Token::BIT_NOT:
return overwrite_
? "GenericUnaryOpStub_BIT_NOT_Overwrite"
: "GenericUnaryOpStub_BIT_NOT_Alloc";
default:
UNREACHABLE();
return "<unknown>";
}
}
void RuntimeStub::Generate(MacroAssembler* masm) { void RuntimeStub::Generate(MacroAssembler* masm) {
Runtime::Function* f = Runtime::FunctionForId(id_); Runtime::Function* f = Runtime::FunctionForId(id_);
masm->TailCallRuntime(ExternalReference(f), masm->TailCallRuntime(ExternalReference(f),
......
...@@ -294,20 +294,26 @@ class InstanceofStub: public CodeStub { ...@@ -294,20 +294,26 @@ class InstanceofStub: public CodeStub {
}; };
class UnarySubStub : public CodeStub { class GenericUnaryOpStub : public CodeStub {
public: public:
explicit UnarySubStub(bool overwrite) GenericUnaryOpStub(Token::Value op, bool overwrite)
: overwrite_(overwrite) { } : op_(op), overwrite_(overwrite) { }
private: private:
Token::Value op_;
bool overwrite_; bool overwrite_;
Major MajorKey() { return UnarySub; }
int MinorKey() { return overwrite_ ? 1 : 0; }
void Generate(MacroAssembler* masm);
const char* GetName() { class OverwriteField: public BitField<int, 0, 1> {};
return overwrite_ ? "UnarySubStub_Overwrite" : "UnarySubStub_Alloc"; class OpField: public BitField<Token::Value, 1, kMinorBits - 1> {};
Major MajorKey() { return GenericUnaryOp; }
int MinorKey() {
return OpField::encode(op_) | OverwriteField::encode(overwrite_);
} }
void Generate(MacroAssembler* masm);
const char* GetName();
}; };
......
...@@ -5515,12 +5515,12 @@ void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) { ...@@ -5515,12 +5515,12 @@ void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
} else { } else {
Load(node->expression()); Load(node->expression());
bool overwrite =
(node->expression()->AsBinaryOperation() != NULL &&
node->expression()->AsBinaryOperation()->ResultOverwriteAllowed());
switch (op) { switch (op) {
case Token::SUB: { case Token::SUB: {
bool overwrite = GenericUnaryOpStub stub(Token::SUB, overwrite);
(node->expression()->AsBinaryOperation() != NULL &&
node->expression()->AsBinaryOperation()->ResultOverwriteAllowed());
UnarySubStub stub(overwrite);
// TODO(1222589): remove dependency of TOS being cached inside stub // TODO(1222589): remove dependency of TOS being cached inside stub
Result operand = frame_->Pop(); Result operand = frame_->Pop();
Result answer = frame_->CallStub(&stub, &operand); Result answer = frame_->CallStub(&stub, &operand);
...@@ -5537,16 +5537,16 @@ void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) { ...@@ -5537,16 +5537,16 @@ void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
__ test(operand.reg(), Immediate(kSmiTagMask)); __ test(operand.reg(), Immediate(kSmiTagMask));
smi_label.Branch(zero, &operand, taken); smi_label.Branch(zero, &operand, taken);
frame_->Push(&operand); // undo popping of TOS GenericUnaryOpStub stub(Token::BIT_NOT, overwrite);
Result answer = frame_->InvokeBuiltin(Builtins::BIT_NOT, Result answer = frame_->CallStub(&stub, &operand);
CALL_FUNCTION, 1);
continue_label.Jump(&answer); continue_label.Jump(&answer);
smi_label.Bind(&answer); smi_label.Bind(&answer);
answer.ToRegister(); answer.ToRegister();
frame_->Spill(answer.reg()); frame_->Spill(answer.reg());
__ not_(answer.reg()); __ not_(answer.reg());
__ and_(answer.reg(), ~kSmiTagMask); // Remove inverted smi-tag. __ and_(answer.reg(), ~kSmiTagMask); // Remove inverted smi-tag.
continue_label.Bind(&answer); continue_label.Bind(&answer);
frame_->Push(&answer); frame_->Push(&answer);
break; break;
...@@ -7282,9 +7282,15 @@ void GenericBinaryOpStub::Generate(MacroAssembler* masm) { ...@@ -7282,9 +7282,15 @@ void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
default: UNREACHABLE(); default: UNREACHABLE();
} }
// Store the result in the HeapNumber and return. // Store the result in the HeapNumber and return.
__ mov(Operand(esp, 1 * kPointerSize), ebx); if (CpuFeatures::IsSupported(SSE2)) {
__ fild_s(Operand(esp, 1 * kPointerSize)); CpuFeatures::Scope use_sse2(SSE2);
__ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset)); __ cvtsi2sd(xmm0, Operand(ebx));
__ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
} else {
__ mov(Operand(esp, 1 * kPointerSize), ebx);
__ fild_s(Operand(esp, 1 * kPointerSize));
__ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
}
GenerateReturn(masm); GenerateReturn(masm);
} }
...@@ -7711,67 +7717,119 @@ void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm, ...@@ -7711,67 +7717,119 @@ void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
} }
void UnarySubStub::Generate(MacroAssembler* masm) { void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
Label undo; Label slow, done;
Label slow;
Label done;
Label try_float;
// Check whether the value is a smi.
__ test(eax, Immediate(kSmiTagMask));
__ j(not_zero, &try_float, not_taken);
// Enter runtime system if the value of the expression is zero if (op_ == Token::SUB) {
// to make sure that we switch between 0 and -0. // Check whether the value is a smi.
__ test(eax, Operand(eax)); Label try_float;
__ j(zero, &slow, not_taken); __ test(eax, Immediate(kSmiTagMask));
__ j(not_zero, &try_float, not_taken);
// The value of the expression is a smi that is not zero. Try // Go slow case if the value of the expression is zero
// optimistic subtraction '0 - value'. // to make sure that we switch between 0 and -0.
__ mov(edx, Operand(eax)); __ test(eax, Operand(eax));
__ Set(eax, Immediate(0)); __ j(zero, &slow, not_taken);
__ sub(eax, Operand(edx));
__ j(overflow, &undo, not_taken);
// If result is a smi we are done. // The value of the expression is a smi that is not zero. Try
__ test(eax, Immediate(kSmiTagMask)); // optimistic subtraction '0 - value'.
__ j(zero, &done, taken); Label undo;
__ mov(edx, Operand(eax));
// Restore eax and enter runtime system. __ Set(eax, Immediate(0));
__ bind(&undo); __ sub(eax, Operand(edx));
__ mov(eax, Operand(edx)); __ j(overflow, &undo, not_taken);
// Enter runtime system. // If result is a smi we are done.
__ bind(&slow); __ test(eax, Immediate(kSmiTagMask));
__ pop(ecx); // pop return address __ j(zero, &done, taken);
__ push(eax);
__ push(ecx); // push return address // Restore eax and go slow case.
__ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION); __ bind(&undo);
__ mov(eax, Operand(edx));
__ jmp(&slow);
// Try floating point case.
__ bind(&try_float);
__ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
__ cmp(edx, Factory::heap_number_map());
__ j(not_equal, &slow);
if (overwrite_) {
__ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset));
__ xor_(edx, HeapNumber::kSignMask); // Flip sign.
__ mov(FieldOperand(eax, HeapNumber::kExponentOffset), edx);
} else {
__ mov(edx, Operand(eax));
// edx: operand
__ AllocateHeapNumber(eax, ebx, ecx, &undo);
// eax: allocated 'empty' number
__ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset));
__ xor_(ecx, HeapNumber::kSignMask); // Flip sign.
__ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ecx);
__ mov(ecx, FieldOperand(edx, HeapNumber::kMantissaOffset));
__ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx);
}
} else if (op_ == Token::BIT_NOT) {
// Check if the operand is a heap number.
__ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
__ cmp(edx, Factory::heap_number_map());
__ j(not_equal, &slow, not_taken);
// Convert the heap number in eax to an untagged integer in ecx.
IntegerConvert(masm, eax, CpuFeatures::IsSupported(SSE3), &slow);
// Do the bitwise operation and check if the result fits in a smi.
Label try_float;
__ not_(ecx);
__ cmp(ecx, 0xc0000000);
__ j(sign, &try_float, not_taken);
// Tag the result as a smi and we're done.
ASSERT(kSmiTagSize == 1);
__ lea(eax, Operand(ecx, times_2, kSmiTag));
__ jmp(&done);
// Try floating point case. // Try to store the result in a heap number.
__ bind(&try_float); __ bind(&try_float);
__ mov(edx, FieldOperand(eax, HeapObject::kMapOffset)); if (!overwrite_) {
__ cmp(edx, Factory::heap_number_map()); // Allocate a fresh heap number, but don't overwrite eax until
__ j(not_equal, &slow); // we're sure we can do it without going through the slow case
if (overwrite_) { // that needs the value in eax.
__ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset)); __ AllocateHeapNumber(ebx, edx, edi, &slow);
__ xor_(edx, HeapNumber::kSignMask); // Flip sign. __ mov(eax, Operand(ebx));
__ mov(FieldOperand(eax, HeapNumber::kExponentOffset), edx); }
if (CpuFeatures::IsSupported(SSE2)) {
CpuFeatures::Scope use_sse2(SSE2);
__ cvtsi2sd(xmm0, Operand(ecx));
__ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
} else {
__ push(ecx);
__ fild_s(Operand(esp, 0));
__ pop(ecx);
__ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
}
} else { } else {
__ mov(edx, Operand(eax)); UNIMPLEMENTED();
// edx: operand
__ AllocateHeapNumber(eax, ebx, ecx, &undo);
// eax: allocated 'empty' number
__ mov(ecx, FieldOperand(edx, HeapNumber::kExponentOffset));
__ xor_(ecx, HeapNumber::kSignMask); // Flip sign.
__ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ecx);
__ mov(ecx, FieldOperand(edx, HeapNumber::kMantissaOffset));
__ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx);
} }
// Return from the stub.
__ bind(&done); __ bind(&done);
__ StubReturn(1); __ StubReturn(1);
// Handle the slow case by jumping to the JavaScript builtin.
__ bind(&slow);
__ pop(ecx); // pop return address.
__ push(eax);
__ push(ecx); // push return address
switch (op_) {
case Token::SUB:
__ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
break;
case Token::BIT_NOT:
__ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
break;
default:
UNREACHABLE();
}
} }
......
...@@ -3109,7 +3109,7 @@ void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) { ...@@ -3109,7 +3109,7 @@ void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
bool overwrite = bool overwrite =
(node->expression()->AsBinaryOperation() != NULL && (node->expression()->AsBinaryOperation() != NULL &&
node->expression()->AsBinaryOperation()->ResultOverwriteAllowed()); node->expression()->AsBinaryOperation()->ResultOverwriteAllowed());
UnarySubStub stub(overwrite); GenericUnaryOpStub stub(Token::SUB, overwrite);
// TODO(1222589): remove dependency of TOS being cached inside stub // TODO(1222589): remove dependency of TOS being cached inside stub
Result operand = frame_->Pop(); Result operand = frame_->Pop();
Result answer = frame_->CallStub(&stub, &operand); Result answer = frame_->CallStub(&stub, &operand);
...@@ -6272,7 +6272,9 @@ bool CodeGenerator::FoldConstantSmis(Token::Value op, int left, int right) { ...@@ -6272,7 +6272,9 @@ bool CodeGenerator::FoldConstantSmis(Token::Value op, int left, int right) {
// End of CodeGenerator implementation. // End of CodeGenerator implementation.
void UnarySubStub::Generate(MacroAssembler* masm) { void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
ASSERT(op_ == Token::SUB);
Label slow; Label slow;
Label done; Label done;
Label try_float; Label try_float;
......
// Copyright 2009 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
function testBitNot(x) {
// The VM constant folds so we use that to check the result.
var expected = eval("~(" + x + ")");
var actual = ~x;
assertEquals(expected, actual, "x: " + x);
// Test the path where we can overwrite the result. Use -
// to avoid concatenating strings.
expected = eval("~(" + x + " - 0.01)");
actual = ~(x - 0.01);
assertEquals(expected, actual, "x - 0.01: " + x);
}
testBitNot(0);
testBitNot(1);
testBitNot(-1);
testBitNot(100);
testBitNot(0x40000000);
testBitNot(0x7fffffff);
testBitNot(0x80000000);
testBitNot(2.2);
testBitNot(-2.3);
testBitNot(Infinity);
testBitNot(NaN);
testBitNot(-Infinity);
testBitNot(0x40000000 + 0.12345);
testBitNot(0x40000000 - 0.12345);
testBitNot(0x7fffffff + 0.12345);
testBitNot(0x7fffffff - 0.12345);
testBitNot(0x80000000 + 0.12345);
testBitNot(0x80000000 - 0.12345);
testBitNot("0");
testBitNot("2.3");
testBitNot("-9.4");
// Try to test that we can deal with allocation failures in
// the fast path and just use the slow path instead.
function TryToGC() {
var x = 0x40000000;
for (var i = 0; i < 1000000; i++) {
assertEquals(~0x40000000, ~x);
}
}
TryToGC();
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment