Commit 68868c73 authored by mstarzinger's avatar mstarzinger Committed by Commit bot

[compiler] Remove compiler internals from CodeAssembler.

This removes some compiler internals as well as some JavaScript specific
helper from the CodeAssembler, by either hiding or moving the support
into the CodeStubAssembler.

R=bmeurer@chromium.org

Review-Url: https://codereview.chromium.org/2246463002
Cr-Commit-Position: refs/heads/master@{#38617}
parent cc758711
...@@ -92,7 +92,7 @@ void Builtins::Generate_CopyFastSmiOrObjectElements( ...@@ -92,7 +92,7 @@ void Builtins::Generate_CopyFastSmiOrObjectElements(
assembler->Bind(&if_oldspace); assembler->Bind(&if_oldspace);
{ {
Node* target = assembler->AllocateFixedArray( Node* target = assembler->AllocateFixedArray(
kind, length, mode, compiler::CodeAssembler::kPretenured); kind, length, mode, CodeStubAssembler::kPretenured);
assembler->CopyFixedArrayElements(kind, source, target, length, assembler->CopyFixedArrayElements(kind, source, target, length,
UPDATE_WRITE_BARRIER, mode); UPDATE_WRITE_BARRIER, mode);
assembler->StoreObjectField(object, JSObject::kElementsOffset, target); assembler->StoreObjectField(object, JSObject::kElementsOffset, target);
......
...@@ -278,6 +278,10 @@ Node* CodeStubAssembler::Float64Trunc(Node* x) { ...@@ -278,6 +278,10 @@ Node* CodeStubAssembler::Float64Trunc(Node* x) {
return var_x.value(); return var_x.value();
} }
Node* CodeStubAssembler::SmiShiftBitsConstant() {
return IntPtrConstant(kSmiShiftSize + kSmiTagSize);
}
Node* CodeStubAssembler::SmiFromWord32(Node* value) { Node* CodeStubAssembler::SmiFromWord32(Node* value) {
value = ChangeInt32ToIntPtr(value); value = ChangeInt32ToIntPtr(value);
return WordShl(value, SmiShiftBitsConstant()); return WordShl(value, SmiShiftBitsConstant());
...@@ -4000,7 +4004,7 @@ Node* CodeStubAssembler::CreateWeakCellInFeedbackVector(Node* feedback_vector, ...@@ -4000,7 +4004,7 @@ Node* CodeStubAssembler::CreateWeakCellInFeedbackVector(Node* feedback_vector,
Node* slot, Node* slot,
Node* value) { Node* value) {
Node* size = IntPtrConstant(WeakCell::kSize); Node* size = IntPtrConstant(WeakCell::kSize);
Node* cell = Allocate(size, compiler::CodeAssembler::kPretenured); Node* cell = Allocate(size, CodeStubAssembler::kPretenured);
// Initialize the WeakCell. // Initialize the WeakCell.
StoreObjectFieldRoot(cell, WeakCell::kMapOffset, Heap::kWeakCellMapRootIndex); StoreObjectFieldRoot(cell, WeakCell::kMapOffset, Heap::kWeakCellMapRootIndex);
......
...@@ -38,6 +38,14 @@ class CodeStubAssembler : public compiler::CodeAssembler { ...@@ -38,6 +38,14 @@ class CodeStubAssembler : public compiler::CodeAssembler {
CodeStubAssembler(Isolate* isolate, Zone* zone, int parameter_count, CodeStubAssembler(Isolate* isolate, Zone* zone, int parameter_count,
Code::Flags flags, const char* name); Code::Flags flags, const char* name);
enum AllocationFlag : uint8_t {
kNone = 0,
kDoubleAlignment = 1,
kPretenured = 1 << 1
};
typedef base::Flags<AllocationFlag> AllocationFlags;
enum ParameterMode { INTEGER_PARAMETERS, SMI_PARAMETERS }; enum ParameterMode { INTEGER_PARAMETERS, SMI_PARAMETERS };
compiler::Node* BooleanMapConstant(); compiler::Node* BooleanMapConstant();
...@@ -589,9 +597,13 @@ class CodeStubAssembler : public compiler::CodeAssembler { ...@@ -589,9 +597,13 @@ class CodeStubAssembler : public compiler::CodeAssembler {
compiler::Node* top_adddress, compiler::Node* top_adddress,
compiler::Node* limit_address); compiler::Node* limit_address);
compiler::Node* SmiShiftBitsConstant();
static const int kElementLoopUnrollThreshold = 8; static const int kElementLoopUnrollThreshold = 8;
}; };
DEFINE_OPERATORS_FOR_FLAGS(CodeStubAssembler::AllocationFlags);
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
#endif // V8_CODE_STUB_ASSEMBLER_H_ #endif // V8_CODE_STUB_ASSEMBLER_H_
...@@ -5567,7 +5567,7 @@ void CreateAllocationSiteStub::GenerateAssembly( ...@@ -5567,7 +5567,7 @@ void CreateAllocationSiteStub::GenerateAssembly(
CodeStubAssembler* assembler) const { CodeStubAssembler* assembler) const {
typedef compiler::Node Node; typedef compiler::Node Node;
Node* size = assembler->IntPtrConstant(AllocationSite::kSize); Node* size = assembler->IntPtrConstant(AllocationSite::kSize);
Node* site = assembler->Allocate(size, compiler::CodeAssembler::kPretenured); Node* site = assembler->Allocate(size, CodeStubAssembler::kPretenured);
// Store the map // Store the map
assembler->StoreObjectFieldRoot(site, AllocationSite::kMapOffset, assembler->StoreObjectFieldRoot(site, AllocationSite::kMapOffset,
......
...@@ -69,8 +69,8 @@ Handle<Code> CodeAssembler::GenerateCode() { ...@@ -69,8 +69,8 @@ Handle<Code> CodeAssembler::GenerateCode() {
Schedule* schedule = raw_assembler_->Export(); Schedule* schedule = raw_assembler_->Export();
Handle<Code> code = Pipeline::GenerateCodeForCodeStub( Handle<Code> code = Pipeline::GenerateCodeForCodeStub(
isolate(), raw_assembler_->call_descriptor(), graph(), schedule, flags_, isolate(), raw_assembler_->call_descriptor(), raw_assembler_->graph(),
name_); schedule, flags_, name_);
code_generated_ = true; code_generated_ = true;
return code; return code;
...@@ -198,10 +198,6 @@ Node* CodeAssembler::LoadStackPointer() { ...@@ -198,10 +198,6 @@ Node* CodeAssembler::LoadStackPointer() {
return raw_assembler_->LoadStackPointer(); return raw_assembler_->LoadStackPointer();
} }
Node* CodeAssembler::SmiShiftBitsConstant() {
return IntPtrConstant(kSmiShiftSize + kSmiTagSize);
}
#define DEFINE_CODE_ASSEMBLER_BINARY_OP(name) \ #define DEFINE_CODE_ASSEMBLER_BINARY_OP(name) \
Node* CodeAssembler::name(Node* a, Node* b) { \ Node* CodeAssembler::name(Node* a, Node* b) { \
return raw_assembler_->name(a, b); \ return raw_assembler_->name(a, b); \
...@@ -909,8 +905,6 @@ Isolate* CodeAssembler::isolate() const { return raw_assembler_->isolate(); } ...@@ -909,8 +905,6 @@ Isolate* CodeAssembler::isolate() const { return raw_assembler_->isolate(); }
Factory* CodeAssembler::factory() const { return isolate()->factory(); } Factory* CodeAssembler::factory() const { return isolate()->factory(); }
Graph* CodeAssembler::graph() const { return raw_assembler_->graph(); }
Zone* CodeAssembler::zone() const { return raw_assembler_->zone(); } Zone* CodeAssembler::zone() const { return raw_assembler_->zone(); }
// The core implementation of Variable is stored through an indirection so // The core implementation of Variable is stored through an indirection so
......
...@@ -29,12 +29,9 @@ class Zone; ...@@ -29,12 +29,9 @@ class Zone;
namespace compiler { namespace compiler {
class CallDescriptor; class CallDescriptor;
class Graph;
class Node; class Node;
class Operator;
class RawMachineAssembler; class RawMachineAssembler;
class RawMachineLabel; class RawMachineLabel;
class Schedule;
#define CODE_ASSEMBLER_COMPARE_BINARY_OP_LIST(V) \ #define CODE_ASSEMBLER_COMPARE_BINARY_OP_LIST(V) \
V(Float32Equal) \ V(Float32Equal) \
...@@ -207,14 +204,6 @@ class CodeAssembler { ...@@ -207,14 +204,6 @@ class CodeAssembler {
CodeAssembler* assembler_; CodeAssembler* assembler_;
}; };
enum AllocationFlag : uint8_t {
kNone = 0,
kDoubleAlignment = 1,
kPretenured = 1 << 1
};
typedef base::Flags<AllocationFlag> AllocationFlags;
// =========================================================================== // ===========================================================================
// Base Assembler // Base Assembler
// =========================================================================== // ===========================================================================
...@@ -433,11 +422,6 @@ class CodeAssembler { ...@@ -433,11 +422,6 @@ class CodeAssembler {
Zone* zone() const; Zone* zone() const;
protected: protected:
// Protected helpers which delegate to RawMachineAssembler.
Graph* graph() const;
Node* SmiShiftBitsConstant();
// Enables subclasses to perform operations before and after a call. // Enables subclasses to perform operations before and after a call.
virtual void CallPrologue(); virtual void CallPrologue();
virtual void CallEpilogue(); virtual void CallEpilogue();
...@@ -458,8 +442,6 @@ class CodeAssembler { ...@@ -458,8 +442,6 @@ class CodeAssembler {
DISALLOW_COPY_AND_ASSIGN(CodeAssembler); DISALLOW_COPY_AND_ASSIGN(CodeAssembler);
}; };
DEFINE_OPERATORS_FOR_FLAGS(CodeAssembler::AllocationFlags);
class CodeAssembler::Label { class CodeAssembler::Label {
public: public:
enum Type { kDeferred, kNonDeferred }; enum Type { kDeferred, kNonDeferred };
......
...@@ -51,12 +51,6 @@ class CodeAssemblerTesterImpl : private ZoneHolder, public CodeAssemblerT { ...@@ -51,12 +51,6 @@ class CodeAssemblerTesterImpl : private ZoneHolder, public CodeAssemblerT {
return scope_.CloseAndEscape(CodeAssemblerT::GenerateCode()); return scope_.CloseAndEscape(CodeAssemblerT::GenerateCode());
} }
// Expose some internal methods.
Node* SmiShiftBitsConstant() {
return CodeAssemblerT::SmiShiftBitsConstant();
}
private: private:
HandleScope scope_; HandleScope scope_;
LocalContext context_; LocalContext context_;
......
...@@ -21,7 +21,7 @@ Node* SmiTag(CodeAssemblerTester& m, Node* value) { ...@@ -21,7 +21,7 @@ Node* SmiTag(CodeAssemblerTester& m, Node* value) {
Smi::IsValid(constant_value)) { Smi::IsValid(constant_value)) {
return m.SmiConstant(Smi::FromInt(constant_value)); return m.SmiConstant(Smi::FromInt(constant_value));
} }
return m.WordShl(value, m.SmiShiftBitsConstant()); return m.WordShl(value, m.IntPtrConstant(kSmiShiftSize + kSmiTagSize));
} }
Node* UndefinedConstant(CodeAssemblerTester& m) { Node* UndefinedConstant(CodeAssemblerTester& m) {
......
...@@ -52,8 +52,6 @@ class InterpreterAssemblerTest : public TestWithIsolateAndZone { ...@@ -52,8 +52,6 @@ class InterpreterAssemblerTest : public TestWithIsolateAndZone {
Matcher<compiler::Node*> IsUnsignedOperand(int offset, Matcher<compiler::Node*> IsUnsignedOperand(int offset,
OperandSize operand_size); OperandSize operand_size);
using InterpreterAssembler::graph;
private: private:
DISALLOW_COPY_AND_ASSIGN(InterpreterAssemblerForTest); DISALLOW_COPY_AND_ASSIGN(InterpreterAssemblerForTest);
}; };
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment