Remove obsolete support for an experimental multipass compiler.

Review URL: http://codereview.chromium.org/255022

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@2996 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 87fbbd5d
......@@ -36,49 +36,48 @@ Import('context')
SOURCES = {
'all': [
'accessors.cc', 'allocation.cc', 'api.cc', 'assembler.cc', 'ast.cc',
'bootstrapper.cc', 'builtins.cc', 'checks.cc', 'cfg.cc',
'code-stubs.cc', 'codegen.cc', 'compilation-cache.cc', 'compiler.cc',
'contexts.cc', 'conversions.cc', 'counters.cc', 'dateparser.cc',
'debug.cc', 'debug-agent.cc', 'disassembler.cc', 'execution.cc',
'factory.cc', 'flags.cc', 'frame-element.cc', 'frames.cc',
'func-name-inferrer.cc', 'global-handles.cc', 'handles.cc',
'hashmap.cc', 'heap.cc', 'heap-profiler.cc', 'ic.cc',
'interpreter-irregexp.cc', 'jsregexp.cc', 'jump-target.cc',
'log.cc', 'log-utils.cc', 'mark-compact.cc', 'messages.cc',
'objects.cc', 'oprofile-agent.cc', 'parser.cc', 'property.cc',
'regexp-macro-assembler.cc', 'regexp-macro-assembler-irregexp.cc',
'regexp-stack.cc', 'register-allocator.cc', 'rewriter.cc',
'runtime.cc', 'scanner.cc', 'scopeinfo.cc', 'scopes.cc',
'serialize.cc', 'snapshot-common.cc', 'spaces.cc',
'string-stream.cc', 'stub-cache.cc', 'token.cc', 'top.cc',
'bootstrapper.cc', 'builtins.cc', 'checks.cc', 'code-stubs.cc',
'codegen.cc', 'compilation-cache.cc', 'compiler.cc', 'contexts.cc',
'conversions.cc', 'counters.cc', 'dateparser.cc', 'debug.cc',
'debug-agent.cc', 'disassembler.cc', 'execution.cc', 'factory.cc',
'flags.cc', 'frame-element.cc', 'frames.cc', 'func-name-inferrer.cc',
'global-handles.cc', 'handles.cc', 'hashmap.cc', 'heap.cc',
'heap-profiler.cc', 'ic.cc', 'interpreter-irregexp.cc', 'jsregexp.cc',
'jump-target.cc', 'log.cc', 'log-utils.cc', 'mark-compact.cc',
'messages.cc', 'objects.cc', 'oprofile-agent.cc', 'parser.cc',
'property.cc', 'regexp-macro-assembler.cc',
'regexp-macro-assembler-irregexp.cc', 'regexp-stack.cc',
'register-allocator.cc', 'rewriter.cc', 'runtime.cc', 'scanner.cc',
'scopeinfo.cc', 'scopes.cc', 'serialize.cc', 'snapshot-common.cc',
'spaces.cc', 'string-stream.cc', 'stub-cache.cc', 'token.cc', 'top.cc',
'unicode.cc', 'usage-analyzer.cc', 'utils.cc', 'v8-counters.cc',
'v8.cc', 'v8threads.cc', 'variables.cc', 'version.cc',
'virtual-frame.cc', 'zone.cc'
],
'arch:arm': [
'arm/assembler-arm.cc', 'arm/builtins-arm.cc', 'arm/cfg-arm.cc',
'arm/codegen-arm.cc', 'arm/constants-arm.cc', 'arm/cpu-arm.cc',
'arm/disasm-arm.cc', 'arm/debug-arm.cc', 'arm/frames-arm.cc',
'arm/ic-arm.cc', 'arm/jump-target-arm.cc', 'arm/macro-assembler-arm.cc',
'arm/regexp-macro-assembler-arm.cc',
'arm/register-allocator-arm.cc', 'arm/stub-cache-arm.cc',
'arm/virtual-frame-arm.cc'
'arm/assembler-arm.cc', 'arm/builtins-arm.cc', 'arm/codegen-arm.cc',
'arm/constants-arm.cc', 'arm/cpu-arm.cc', 'arm/disasm-arm.cc',
'arm/debug-arm.cc', 'arm/frames-arm.cc', 'arm/ic-arm.cc',
'arm/jump-target-arm.cc', 'arm/macro-assembler-arm.cc',
'arm/regexp-macro-assembler-arm.cc', 'arm/register-allocator-arm.cc',
'arm/stub-cache-arm.cc', 'arm/virtual-frame-arm.cc'
],
'arch:ia32': [
'ia32/assembler-ia32.cc', 'ia32/builtins-ia32.cc', 'ia32/cfg-ia32.cc',
'ia32/assembler-ia32.cc', 'ia32/builtins-ia32.cc',
'ia32/codegen-ia32.cc', 'ia32/cpu-ia32.cc', 'ia32/disasm-ia32.cc',
'ia32/debug-ia32.cc', 'ia32/frames-ia32.cc', 'ia32/ic-ia32.cc',
'ia32/jump-target-ia32.cc', 'ia32/macro-assembler-ia32.cc',
'ia32/regexp-macro-assembler-ia32.cc', 'ia32/register-allocator-ia32.cc',
'ia32/stub-cache-ia32.cc', 'ia32/virtual-frame-ia32.cc'
'ia32/regexp-macro-assembler-ia32.cc',
'ia32/register-allocator-ia32.cc', 'ia32/stub-cache-ia32.cc',
'ia32/virtual-frame-ia32.cc'
],
'arch:x64': [
'x64/assembler-x64.cc', 'x64/builtins-x64.cc', 'x64/cfg-x64.cc',
'x64/codegen-x64.cc', 'x64/cpu-x64.cc', 'x64/disasm-x64.cc',
'x64/debug-x64.cc', 'x64/frames-x64.cc', 'x64/ic-x64.cc',
'x64/jump-target-x64.cc', 'x64/macro-assembler-x64.cc',
'x64/regexp-macro-assembler-x64.cc', 'x64/register-allocator-x64.cc',
'x64/stub-cache-x64.cc', 'x64/virtual-frame-x64.cc'
'x64/assembler-x64.cc', 'x64/builtins-x64.cc', 'x64/codegen-x64.cc',
'x64/cpu-x64.cc', 'x64/disasm-x64.cc', 'x64/debug-x64.cc',
'x64/frames-x64.cc', 'x64/ic-x64.cc', 'x64/jump-target-x64.cc',
'x64/macro-assembler-x64.cc', 'x64/regexp-macro-assembler-x64.cc',
'x64/register-allocator-x64.cc', 'x64/stub-cache-x64.cc',
'x64/virtual-frame-x64.cc'
],
'simulator:arm': ['arm/simulator-arm.cc'],
'os:freebsd': ['platform-freebsd.cc', 'platform-posix.cc'],
......
// Copyright 2009 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "v8.h"
#include "cfg.h"
#include "codegen-inl.h"
#include "codegen-arm.h" // Include after codegen-inl.h.
#include "macro-assembler-arm.h"
namespace v8 {
namespace internal {
#define __ ACCESS_MASM(masm)
void InstructionBlock::Compile(MacroAssembler* masm) {
ASSERT(!is_marked());
is_marked_ = true;
{
Comment cmt(masm, "[ InstructionBlock");
for (int i = 0, len = instructions_.length(); i < len; i++) {
// If the location of the current instruction is a temp, then the
// instruction cannot be in tail position in the block. Allocate the
// temp based on peeking ahead to the next instruction.
Instruction* instr = instructions_[i];
Location* loc = instr->location();
if (loc->is_temporary()) {
instructions_[i+1]->FastAllocate(TempLocation::cast(loc));
}
instructions_[i]->Compile(masm);
}
}
successor_->Compile(masm);
}
void EntryNode::Compile(MacroAssembler* masm) {
ASSERT(!is_marked());
is_marked_ = true;
{
Comment cmnt(masm, "[ EntryNode");
__ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
__ add(fp, sp, Operand(2 * kPointerSize));
int count = CfgGlobals::current()->fun()->scope()->num_stack_slots();
if (count > 0) {
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
for (int i = 0; i < count; i++) {
__ push(ip);
}
}
if (FLAG_trace) {
__ CallRuntime(Runtime::kTraceEnter, 0);
}
if (FLAG_check_stack) {
StackCheckStub stub;
__ CallStub(&stub);
}
}
successor_->Compile(masm);
}
void ExitNode::Compile(MacroAssembler* masm) {
ASSERT(!is_marked());
is_marked_ = true;
Comment cmnt(masm, "[ ExitNode");
if (FLAG_trace) {
__ push(r0);
__ CallRuntime(Runtime::kTraceExit, 1);
}
__ mov(sp, fp);
__ ldm(ia_w, sp, fp.bit() | lr.bit());
int count = CfgGlobals::current()->fun()->scope()->num_parameters();
__ add(sp, sp, Operand((count + 1) * kPointerSize));
__ Jump(lr);
}
void PropLoadInstr::Compile(MacroAssembler* masm) {
// The key should not be on the stack---if it is a compiler-generated
// temporary it is in the accumulator.
ASSERT(!key()->is_on_stack());
Comment cmnt(masm, "[ Load from Property");
// If the key is known at compile-time we may be able to use a load IC.
bool is_keyed_load = true;
if (key()->is_constant()) {
// Still use the keyed load IC if the key can be parsed as an integer so
// we will get into the case that handles [] on string objects.
Handle<Object> key_val = Constant::cast(key())->handle();
uint32_t ignored;
if (key_val->IsSymbol() &&
!String::cast(*key_val)->AsArrayIndex(&ignored)) {
is_keyed_load = false;
}
}
if (!object()->is_on_stack()) object()->Push(masm);
if (is_keyed_load) {
key()->Push(masm);
Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
__ Call(ic, RelocInfo::CODE_TARGET);
// Discard key and receiver.
__ add(sp, sp, Operand(2 * kPointerSize));
} else {
key()->Get(masm, r2);
Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
__ Call(ic, RelocInfo::CODE_TARGET);
__ pop(); // Discard receiver.
}
location()->Set(masm, r0);
}
void BinaryOpInstr::Compile(MacroAssembler* masm) {
// The right-hand value should not be on the stack---if it is a
// compiler-generated temporary it is in the accumulator.
ASSERT(!right()->is_on_stack());
Comment cmnt(masm, "[ BinaryOpInstr");
// We can overwrite one of the operands if it is a temporary.
OverwriteMode mode = NO_OVERWRITE;
if (left()->is_temporary()) {
mode = OVERWRITE_LEFT;
} else if (right()->is_temporary()) {
mode = OVERWRITE_RIGHT;
}
// Move left to r1 and right to r0.
left()->Get(masm, r1);
right()->Get(masm, r0);
GenericBinaryOpStub stub(op(), mode);
__ CallStub(&stub);
location()->Set(masm, r0);
}
void ReturnInstr::Compile(MacroAssembler* masm) {
// The location should be 'Effect'. As a side effect, move the value to
// the accumulator.
Comment cmnt(masm, "[ ReturnInstr");
value()->Get(masm, r0);
}
void Constant::Get(MacroAssembler* masm, Register reg) {
__ mov(reg, Operand(handle_));
}
void Constant::Push(MacroAssembler* masm) {
__ mov(ip, Operand(handle_));
__ push(ip);
}
static MemOperand ToMemOperand(SlotLocation* loc) {
switch (loc->type()) {
case Slot::PARAMETER: {
int count = CfgGlobals::current()->fun()->scope()->num_parameters();
return MemOperand(fp, (1 + count - loc->index()) * kPointerSize);
}
case Slot::LOCAL: {
const int kOffset = JavaScriptFrameConstants::kLocal0Offset;
return MemOperand(fp, kOffset - loc->index() * kPointerSize);
}
default:
UNREACHABLE();
return MemOperand(r0);
}
}
void Constant::MoveToSlot(MacroAssembler* masm, SlotLocation* loc) {
__ mov(ip, Operand(handle_));
__ str(ip, ToMemOperand(loc));
}
void SlotLocation::Get(MacroAssembler* masm, Register reg) {
__ ldr(reg, ToMemOperand(this));
}
void SlotLocation::Set(MacroAssembler* masm, Register reg) {
__ str(reg, ToMemOperand(this));
}
void SlotLocation::Push(MacroAssembler* masm) {
__ ldr(ip, ToMemOperand(this));
__ push(ip); // Push will not destroy ip.
}
void SlotLocation::Move(MacroAssembler* masm, Value* value) {
// Double dispatch.
value->MoveToSlot(masm, this);
}
void SlotLocation::MoveToSlot(MacroAssembler* masm, SlotLocation* loc) {
__ ldr(ip, ToMemOperand(this));
__ str(ip, ToMemOperand(loc));
}
void TempLocation::Get(MacroAssembler* masm, Register reg) {
switch (where_) {
case ACCUMULATOR:
if (!reg.is(r0)) __ mov(reg, r0);
break;
case STACK:
__ pop(reg);
break;
case NOT_ALLOCATED:
UNREACHABLE();
}
}
void TempLocation::Set(MacroAssembler* masm, Register reg) {
switch (where_) {
case ACCUMULATOR:
if (!reg.is(r0)) __ mov(r0, reg);
break;
case STACK:
__ push(reg);
break;
case NOT_ALLOCATED:
UNREACHABLE();
}
}
void TempLocation::Push(MacroAssembler* masm) {
switch (where_) {
case ACCUMULATOR:
__ push(r0);
break;
case STACK:
case NOT_ALLOCATED:
UNREACHABLE();
}
}
void TempLocation::Move(MacroAssembler* masm, Value* value) {
switch (where_) {
case ACCUMULATOR:
value->Get(masm, r0);
case STACK:
value->Push(masm);
break;
case NOT_ALLOCATED:
UNREACHABLE();
}
}
void TempLocation::MoveToSlot(MacroAssembler* masm, SlotLocation* loc) {
switch (where_) {
case ACCUMULATOR:
__ str(r0, ToMemOperand(loc));
case STACK:
__ pop(ip);
__ str(ip, ToMemOperand(loc));
break;
case NOT_ALLOCATED:
UNREACHABLE();
}
}
#undef __
} } // namespace v8::internal
This diff is collapsed.
This diff is collapsed.
......@@ -28,7 +28,6 @@
#include "v8.h"
#include "bootstrapper.h"
#include "cfg.h"
#include "codegen-inl.h"
#include "compilation-cache.h"
#include "compiler.h"
......@@ -79,22 +78,6 @@ static Handle<Code> MakeCode(FunctionLiteral* literal,
return Handle<Code>::null();
}
if (FLAG_multipass) {
CfgGlobals scope(literal);
Cfg* cfg = Cfg::Build();
#ifdef DEBUG
if (FLAG_print_cfg && cfg != NULL) {
SmartPointer<char> name = literal->name()->ToCString();
PrintF("Function \"%s\":\n", *name);
cfg->Print();
PrintF("\n");
}
#endif
if (cfg != NULL) {
return cfg->Compile(script);
}
}
// Generate code and return it.
Handle<Code> result = CodeGenerator::MakeCode(literal, script, is_eval);
return result;
......
......@@ -133,7 +133,6 @@ DEFINE_bool(debug_info, true, "add debug information to compiled functions")
DEFINE_bool(strict, false, "strict error checking")
DEFINE_int(min_preparse_length, 1024,
"Minimum length for automatic enable preparsing")
DEFINE_bool(multipass, false, "use the multipass code generator")
// compilation-cache.cc
DEFINE_bool(compilation_cache, true, "enable compilation cache")
......
// Copyright 2009 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "v8.h"
#include "cfg.h"
#include "codegen-inl.h"
#include "codegen-ia32.h"
#include "macro-assembler-ia32.h"
namespace v8 {
namespace internal {
#define __ ACCESS_MASM(masm)
void InstructionBlock::Compile(MacroAssembler* masm) {
ASSERT(!is_marked());
is_marked_ = true;
{
Comment cmt(masm, "[ InstructionBlock");
for (int i = 0, len = instructions_.length(); i < len; i++) {
// If the location of the current instruction is a temp, then the
// instruction cannot be in tail position in the block. Allocate the
// temp based on peeking ahead to the next instruction.
Instruction* instr = instructions_[i];
Location* loc = instr->location();
if (loc->is_temporary()) {
instructions_[i+1]->FastAllocate(TempLocation::cast(loc));
}
instructions_[i]->Compile(masm);
}
}
successor_->Compile(masm);
}
void EntryNode::Compile(MacroAssembler* masm) {
ASSERT(!is_marked());
is_marked_ = true;
Label deferred_enter, deferred_exit;
{
Comment cmnt(masm, "[ EntryNode");
__ push(ebp);
__ mov(ebp, esp);
__ push(esi);
__ push(edi);
int count = CfgGlobals::current()->fun()->scope()->num_stack_slots();
if (count > 0) {
__ Set(eax, Immediate(Factory::undefined_value()));
for (int i = 0; i < count; i++) {
__ push(eax);
}
}
if (FLAG_trace) {
__ CallRuntime(Runtime::kTraceEnter, 0);
}
if (FLAG_check_stack) {
ExternalReference stack_limit =
ExternalReference::address_of_stack_guard_limit();
__ cmp(esp, Operand::StaticVariable(stack_limit));
__ j(below, &deferred_enter);
__ bind(&deferred_exit);
}
}
successor_->Compile(masm);
if (FLAG_check_stack) {
Comment cmnt(masm, "[ Deferred Stack Check");
__ bind(&deferred_enter);
StackCheckStub stub;
__ CallStub(&stub);
__ jmp(&deferred_exit);
}
}
void ExitNode::Compile(MacroAssembler* masm) {
ASSERT(!is_marked());
is_marked_ = true;
Comment cmnt(masm, "[ ExitNode");
if (FLAG_trace) {
__ push(eax);
__ CallRuntime(Runtime::kTraceExit, 1);
}
__ RecordJSReturn();
__ mov(esp, ebp);
__ pop(ebp);
int count = CfgGlobals::current()->fun()->scope()->num_parameters();
__ ret((count + 1) * kPointerSize);
}
void PropLoadInstr::Compile(MacroAssembler* masm) {
// The key should not be on the stack---if it is a compiler-generated
// temporary it is in the accumulator.
ASSERT(!key()->is_on_stack());
Comment cmnt(masm, "[ Load from Property");
// If the key is known at compile-time we may be able to use a load IC.
bool is_keyed_load = true;
if (key()->is_constant()) {
// Still use the keyed load IC if the key can be parsed as an integer so
// we will get into the case that handles [] on string objects.
Handle<Object> key_val = Constant::cast(key())->handle();
uint32_t ignored;
if (key_val->IsSymbol() &&
!String::cast(*key_val)->AsArrayIndex(&ignored)) {
is_keyed_load = false;
}
}
if (!object()->is_on_stack()) object()->Push(masm);
// A test eax instruction after the call indicates to the IC code that it
// was inlined. Ensure there is not one here.
if (is_keyed_load) {
key()->Push(masm);
Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
__ call(ic, RelocInfo::CODE_TARGET);
__ pop(ebx); // Discard key.
} else {
key()->Get(masm, ecx);
Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
__ call(ic, RelocInfo::CODE_TARGET);
}
__ pop(ebx); // Discard receiver.
location()->Set(masm, eax);
}
void BinaryOpInstr::Compile(MacroAssembler* masm) {
// The right-hand value should not be on the stack---if it is a
// compiler-generated temporary it is in the accumulator.
ASSERT(!right()->is_on_stack());
Comment cmnt(masm, "[ BinaryOpInstr");
// We can overwrite one of the operands if it is a temporary.
OverwriteMode mode = NO_OVERWRITE;
if (left()->is_temporary()) {
mode = OVERWRITE_LEFT;
} else if (right()->is_temporary()) {
mode = OVERWRITE_RIGHT;
}
// Push both operands and call the specialized stub.
if (!left()->is_on_stack()) left()->Push(masm);
right()->Push(masm);
GenericBinaryOpStub stub(op(), mode, SMI_CODE_IN_STUB);
__ CallStub(&stub);
location()->Set(masm, eax);
}
void ReturnInstr::Compile(MacroAssembler* masm) {
// The location should be 'Effect'. As a side effect, move the value to
// the accumulator.
Comment cmnt(masm, "[ ReturnInstr");
value_->Get(masm, eax);
}
void Constant::Get(MacroAssembler* masm, Register reg) {
__ mov(reg, Immediate(handle_));
}
void Constant::Push(MacroAssembler* masm) {
__ push(Immediate(handle_));
}
static Operand ToOperand(SlotLocation* loc) {
switch (loc->type()) {
case Slot::PARAMETER: {
int count = CfgGlobals::current()->fun()->scope()->num_parameters();
return Operand(ebp, (1 + count - loc->index()) * kPointerSize);
}
case Slot::LOCAL: {
const int kOffset = JavaScriptFrameConstants::kLocal0Offset;
return Operand(ebp, kOffset - loc->index() * kPointerSize);
}
default:
UNREACHABLE();
return Operand(eax);
}
}
void Constant::MoveToSlot(MacroAssembler* masm, SlotLocation* loc) {
__ mov(ToOperand(loc), Immediate(handle_));
}
void SlotLocation::Get(MacroAssembler* masm, Register reg) {
__ mov(reg, ToOperand(this));
}
void SlotLocation::Set(MacroAssembler* masm, Register reg) {
__ mov(ToOperand(this), reg);
}
void SlotLocation::Push(MacroAssembler* masm) {
__ push(ToOperand(this));
}
void SlotLocation::Move(MacroAssembler* masm, Value* value) {
// We dispatch to the value because in some cases (temp or constant)
// we can use a single instruction.
value->MoveToSlot(masm, this);
}
void SlotLocation::MoveToSlot(MacroAssembler* masm, SlotLocation* loc) {
// The accumulator is not live across a MoveInstr.
__ mov(eax, ToOperand(this));
__ mov(ToOperand(loc), eax);
}
void TempLocation::Get(MacroAssembler* masm, Register reg) {
switch (where_) {
case ACCUMULATOR:
if (!reg.is(eax)) __ mov(reg, eax);
break;
case STACK:
__ pop(reg);
break;
case NOT_ALLOCATED:
UNREACHABLE();
}
}
void TempLocation::Set(MacroAssembler* masm, Register reg) {
switch (where_) {
case ACCUMULATOR:
if (!reg.is(eax)) __ mov(eax, reg);
break;
case STACK:
__ push(reg);
break;
case NOT_ALLOCATED:
UNREACHABLE();
}
}
void TempLocation::Push(MacroAssembler* masm) {
switch (where_) {
case ACCUMULATOR:
__ push(eax);
break;
case STACK:
case NOT_ALLOCATED:
UNREACHABLE();
}
}
void TempLocation::Move(MacroAssembler* masm, Value* value) {
switch (where_) {
case ACCUMULATOR:
value->Get(masm, eax);
break;
case STACK:
value->Push(masm);
break;
case NOT_ALLOCATED:
UNREACHABLE();
}
}
void TempLocation::MoveToSlot(MacroAssembler* masm, SlotLocation* loc) {
switch (where_) {
case ACCUMULATOR:
__ mov(ToOperand(loc), eax);
break;
case STACK:
__ pop(ToOperand(loc));
break;
case NOT_ALLOCATED:
UNREACHABLE();
}
}
#undef __
} } // namespace v8::internal
// Copyright 2009 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "v8.h"
#include "cfg.h"
#include "codegen-inl.h"
#include "codegen-x64.h"
#include "debug.h"
#include "macro-assembler-x64.h"
namespace v8 {
namespace internal {
#define __ ACCESS_MASM(masm)
void InstructionBlock::Compile(MacroAssembler* masm) {
ASSERT(!is_marked());
is_marked_ = true;
{
Comment cmt(masm, "[ InstructionBlock");
for (int i = 0, len = instructions_.length(); i < len; i++) {
// If the location of the current instruction is a temp, then the
// instruction cannot be in tail position in the block. Allocate the
// temp based on peeking ahead to the next instruction.
Instruction* instr = instructions_[i];
Location* loc = instr->location();
if (loc->is_temporary()) {
instructions_[i+1]->FastAllocate(TempLocation::cast(loc));
}
instructions_[i]->Compile(masm);
}
}
successor_->Compile(masm);
}
void EntryNode::Compile(MacroAssembler* masm) {
ASSERT(!is_marked());
is_marked_ = true;
Label deferred_enter, deferred_exit;
{
Comment cmnt(masm, "[ EntryNode");
__ push(rbp);
__ movq(rbp, rsp);
__ push(rsi);
__ push(rdi);
int count = CfgGlobals::current()->fun()->scope()->num_stack_slots();
if (count > 0) {
__ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
for (int i = 0; i < count; i++) {
__ push(kScratchRegister);
}
}
if (FLAG_trace) {
__ CallRuntime(Runtime::kTraceEnter, 0);
}
if (FLAG_check_stack) {
ExternalReference stack_limit =
ExternalReference::address_of_stack_guard_limit();
__ movq(kScratchRegister, stack_limit);
__ cmpq(rsp, Operand(kScratchRegister, 0));
__ j(below, &deferred_enter);
__ bind(&deferred_exit);
}
}
successor_->Compile(masm);
if (FLAG_check_stack) {
Comment cmnt(masm, "[ Deferred Stack Check");
__ bind(&deferred_enter);
StackCheckStub stub;
__ CallStub(&stub);
__ jmp(&deferred_exit);
}
}
void ExitNode::Compile(MacroAssembler* masm) {
ASSERT(!is_marked());
is_marked_ = true;
Comment cmnt(masm, "[ ExitNode");
if (FLAG_trace) {
__ push(rax);
__ CallRuntime(Runtime::kTraceExit, 1);
}
__ RecordJSReturn();
__ movq(rsp, rbp);
__ pop(rbp);
int count = CfgGlobals::current()->fun()->scope()->num_parameters();
__ ret((count + 1) * kPointerSize);
#ifdef ENABLE_DEBUGGER_SUPPORT
// Add padding that will be overwritten by a debugger breakpoint.
// "movq rsp, rbp; pop rbp" has length 4. "ret k" has length 3.
const int kPadding = Debug::kX64JSReturnSequenceLength - 4 - 3;
for (int i = 0; i < kPadding; ++i) {
__ int3();
}
#endif
}
void PropLoadInstr::Compile(MacroAssembler* masm) {
// The key should not be on the stack---if it is a compiler-generated
// temporary it is in the accumulator.
ASSERT(!key()->is_on_stack());
Comment cmnt(masm, "[ Load from Property");
// If the key is known at compile-time we may be able to use a load IC.
bool is_keyed_load = true;
if (key()->is_constant()) {
// Still use the keyed load IC if the key can be parsed as an integer so
// we will get into the case that handles [] on string objects.
Handle<Object> key_val = Constant::cast(key())->handle();
uint32_t ignored;
if (key_val->IsSymbol() &&
!String::cast(*key_val)->AsArrayIndex(&ignored)) {
is_keyed_load = false;
}
}
if (!object()->is_on_stack()) object()->Push(masm);
// A test rax instruction after the call indicates to the IC code that it
// was inlined. Ensure there is not one after the call below.
if (is_keyed_load) {
key()->Push(masm);
Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
__ Call(ic, RelocInfo::CODE_TARGET);
__ pop(rbx); // Discard key.
} else {
key()->Get(masm, rcx);
Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
__ Call(ic, RelocInfo::CODE_TARGET);
}
__ pop(rbx); // Discard receiver.
location()->Set(masm, rax);
}
void BinaryOpInstr::Compile(MacroAssembler* masm) {
// The right-hand value should not be on the stack---if it is a
// compiler-generated temporary it is in the accumulator.
ASSERT(!right()->is_on_stack());
Comment cmnt(masm, "[ BinaryOpInstr");
// We can overwrite one of the operands if it is a temporary.
OverwriteMode mode = NO_OVERWRITE;
if (left()->is_temporary()) {
mode = OVERWRITE_LEFT;
} else if (right()->is_temporary()) {
mode = OVERWRITE_RIGHT;
}
// Push both operands and call the specialized stub.
if (!left()->is_on_stack()) left()->Push(masm);
right()->Push(masm);
GenericBinaryOpStub stub(op(), mode, SMI_CODE_IN_STUB);
__ CallStub(&stub);
location()->Set(masm, rax);
}
void ReturnInstr::Compile(MacroAssembler* masm) {
// The location should be 'Effect'. As a side effect, move the value to
// the accumulator.
Comment cmnt(masm, "[ ReturnInstr");
value()->Get(masm, rax);
}
void Constant::Get(MacroAssembler* masm, Register reg) {
__ Move(reg, handle_);
}
void Constant::Push(MacroAssembler* masm) {
__ Push(handle_);
}
static Operand ToOperand(SlotLocation* loc) {
switch (loc->type()) {
case Slot::PARAMETER: {
int count = CfgGlobals::current()->fun()->scope()->num_parameters();
return Operand(rbp, (1 + count - loc->index()) * kPointerSize);
}
case Slot::LOCAL: {
const int kOffset = JavaScriptFrameConstants::kLocal0Offset;
return Operand(rbp, kOffset - loc->index() * kPointerSize);
}
default:
UNREACHABLE();
return Operand(rax, 0);
}
}
void Constant::MoveToSlot(MacroAssembler* masm, SlotLocation* loc) {
__ Move(ToOperand(loc), handle_);
}
void SlotLocation::Get(MacroAssembler* masm, Register reg) {
__ movq(reg, ToOperand(this));
}
void SlotLocation::Set(MacroAssembler* masm, Register reg) {
__ movq(ToOperand(this), reg);
}
void SlotLocation::Push(MacroAssembler* masm) {
__ push(ToOperand(this));
}
void SlotLocation::Move(MacroAssembler* masm, Value* value) {
// We dispatch to the value because in some cases (temp or constant) we
// can use special instruction sequences.
value->MoveToSlot(masm, this);
}
void SlotLocation::MoveToSlot(MacroAssembler* masm, SlotLocation* loc) {
__ movq(kScratchRegister, ToOperand(this));
__ movq(ToOperand(loc), kScratchRegister);
}
void TempLocation::Get(MacroAssembler* masm, Register reg) {
switch (where_) {
case ACCUMULATOR:
if (!reg.is(rax)) __ movq(reg, rax);
break;
case STACK:
__ pop(reg);
break;
case NOT_ALLOCATED:
UNREACHABLE();
}
}
void TempLocation::Set(MacroAssembler* masm, Register reg) {
switch (where_) {
case ACCUMULATOR:
if (!reg.is(rax)) __ movq(rax, reg);
break;
case STACK:
__ push(reg);
break;
case NOT_ALLOCATED:
UNREACHABLE();
}
}
void TempLocation::Push(MacroAssembler* masm) {
switch (where_) {
case ACCUMULATOR:
__ push(rax);
break;
case STACK:
case NOT_ALLOCATED:
UNREACHABLE();
}
}
void TempLocation::Move(MacroAssembler* masm, Value* value) {
switch (where_) {
case ACCUMULATOR:
value->Get(masm, rax);
break;
case STACK:
value->Push(masm);
break;
case NOT_ALLOCATED:
UNREACHABLE();
}
}
void TempLocation::MoveToSlot(MacroAssembler* masm, SlotLocation* loc) {
switch (where_) {
case ACCUMULATOR:
__ movq(ToOperand(loc), rax);
break;
case STACK:
__ pop(ToOperand(loc));
break;
case NOT_ALLOCATED:
UNREACHABLE();
}
}
#undef __
} } // namespace v8::internal
......@@ -219,8 +219,6 @@
'../../src/builtins.cc',
'../../src/builtins.h',
'../../src/bytecodes-irregexp.h',
'../../src/cfg.cc',
'../../src/cfg.h',
'../../src/char-predicates-inl.h',
'../../src/char-predicates.h',
'../../src/checks.cc',
......@@ -390,7 +388,6 @@
'../../src/arm/assembler-arm.cc',
'../../src/arm/assembler-arm.h',
'../../src/arm/builtins-arm.cc',
'../../src/arm/cfg-arm.cc',
'../../src/arm/codegen-arm.cc',
'../../src/arm/codegen-arm.h',
'../../src/arm/constants-arm.h',
......@@ -421,7 +418,6 @@
'../../src/ia32/assembler-ia32.cc',
'../../src/ia32/assembler-ia32.h',
'../../src/ia32/builtins-ia32.cc',
'../../src/ia32/cfg-ia32.cc',
'../../src/ia32/codegen-ia32.cc',
'../../src/ia32/codegen-ia32.h',
'../../src/ia32/cpu-ia32.cc',
......@@ -450,7 +446,6 @@
'../../src/x64/assembler-x64.cc',
'../../src/x64/assembler-x64.h',
'../../src/x64/builtins-x64.cc',
'../../src/x64/cfg-x64.cc',
'../../src/x64/codegen-x64.cc',
'../../src/x64/codegen-x64.h',
'../../src/x64/cpu-x64.cc',
......
......@@ -236,18 +236,6 @@
RelativePath="..\..\src\bytecodes-irregexp.h"
>
</File>
<File
RelativePath="..\..\src\ia32\cfg-ia32.cc"
>
</File>
<File
RelativePath="..\..\src\cfg.cc"
>
</File>
<File
RelativePath="..\..\src\cfg.h"
>
</File>
<File
RelativePath="..\..\src\char-predicates-inl.h"
>
......
......@@ -236,18 +236,6 @@
RelativePath="..\..\src\bytecodes-irregexp.h"
>
</File>
<File
RelativePath="..\..\src\arm\cfg-arm.cc"
>
</File>
<File
RelativePath="..\..\src\cfg.cc"
>
</File>
<File
RelativePath="..\..\src\cfg.h"
>
</File>
<File
RelativePath="..\..\src\char-predicates-inl.h"
>
......
......@@ -236,18 +236,6 @@
RelativePath="..\..\src\bytecodes-irregexp.h"
>
</File>
<File
RelativePath="..\..\src\x64\cfg-x64.cc"
>
</File>
<File
RelativePath="..\..\src\cfg.cc"
>
</File>
<File
RelativePath="..\..\src\cfg.h"
>
</File>
<File
RelativePath="..\..\src\char-predicates-inl.h"
>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment