Commit a638717d authored by titzer@chromium.org's avatar titzer@chromium.org

Add support for untagged LoadField, StoreField, LoadElement, and StoreElement...

Add support for untagged LoadField, StoreField, LoadElement, and StoreElement simplfied operators. This is needed for untagged accesses of typed array backing stores and generally cleans up.

R=bmeurer@chromium.org
BUG=

Review URL: https://codereview.chromium.org/439223004

git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@22815 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 0bd7c29d
......@@ -193,9 +193,11 @@ void SimplifiedLowering::DoChangeBitToBool(Node* node, Node* effect,
static WriteBarrierKind ComputeWriteBarrierKind(
MachineRepresentation representation, Type* type) {
BaseTaggedness base_is_tagged, MachineRepresentation representation,
Type* type) {
// TODO(turbofan): skip write barriers for Smis, etc.
if (representation == kMachineTagged) {
if (base_is_tagged == kTaggedBase && representation == kMachineTagged) {
// Write barriers are only for writes into heap objects (i.e. tagged base).
return kFullWriteBarrier;
}
return kNoWriteBarrier;
......@@ -205,19 +207,17 @@ static WriteBarrierKind ComputeWriteBarrierKind(
void SimplifiedLowering::DoLoadField(Node* node, Node* effect, Node* control) {
const FieldAccess& access = FieldAccessOf(node->op());
node->set_op(machine_.Load(access.representation));
Node* offset =
graph()->NewNode(common()->Int32Constant(access.offset - kHeapObjectTag));
Node* offset = jsgraph()->Int32Constant(access.offset - access.tag());
node->InsertInput(zone(), 1, offset);
}
void SimplifiedLowering::DoStoreField(Node* node, Node* effect, Node* control) {
const FieldAccess& access = FieldAccessOf(node->op());
WriteBarrierKind kind =
ComputeWriteBarrierKind(access.representation, access.type);
WriteBarrierKind kind = ComputeWriteBarrierKind(
access.base_is_tagged, access.representation, access.type);
node->set_op(machine_.Store(access.representation, kind));
Node* offset =
graph()->NewNode(common()->Int32Constant(access.offset - kHeapObjectTag));
Node* offset = jsgraph()->Int32Constant(access.offset - access.tag());
node->InsertInput(zone(), 1, offset);
}
......@@ -247,15 +247,13 @@ Node* SimplifiedLowering::ComputeIndex(const ElementAccess& access,
break;
}
if (element_size != 1) {
index = graph()->NewNode(
machine()->Int32Mul(),
graph()->NewNode(common()->Int32Constant(element_size)), index);
index = graph()->NewNode(machine()->Int32Mul(),
jsgraph()->Int32Constant(element_size), index);
}
int fixed_offset = access.header_size - kHeapObjectTag;
int fixed_offset = access.header_size - access.tag();
if (fixed_offset == 0) return index;
return graph()->NewNode(
machine()->Int32Add(),
graph()->NewNode(common()->Int32Constant(fixed_offset)), index);
return graph()->NewNode(machine()->Int32Add(),
jsgraph()->Int32Constant(fixed_offset), index);
}
......@@ -270,8 +268,8 @@ void SimplifiedLowering::DoLoadElement(Node* node, Node* effect,
void SimplifiedLowering::DoStoreElement(Node* node, Node* effect,
Node* control) {
const ElementAccess& access = ElementAccessOf(node->op());
WriteBarrierKind kind =
ComputeWriteBarrierKind(access.representation, access.type);
WriteBarrierKind kind = ComputeWriteBarrierKind(
access.base_is_tagged, access.representation, access.type);
node->set_op(machine_.Store(access.representation, kind));
node->ReplaceInput(1, ComputeIndex(access, node->InputAt(1)));
}
......
......@@ -13,22 +13,33 @@ namespace v8 {
namespace internal {
namespace compiler {
// An access descriptor for loads/stores from/to fixed structures
// like field accesses of heap objects.
enum BaseTaggedness { kUntaggedBase, kTaggedBase };
// An access descriptor for loads/stores of fixed structures like field
// accesses of heap objects. Accesses from either tagged or untagged base
// pointers are supported; untagging is done automatically during lowering.
struct FieldAccess {
int offset;
Handle<Name> name; // debug only.
Type* type;
MachineRepresentation representation;
BaseTaggedness base_is_tagged; // specifies if the base pointer is tagged.
int offset; // offset of the field, without tag.
Handle<Name> name; // debugging only.
Type* type; // type of the field.
MachineRepresentation representation; // machine representation of field.
int tag() const { return base_is_tagged == kTaggedBase ? kHeapObjectTag : 0; }
};
// An access descriptor for loads/stores of indexed structures
// like characters in strings or off-heap backing stores.
// An access descriptor for loads/stores of indexed structures like characters
// in strings or off-heap backing stores. Accesses from either tagged or
// untagged base pointers are supported; untagging is done automatically during
// lowering.
struct ElementAccess {
int header_size;
Type* type;
MachineRepresentation representation;
BaseTaggedness base_is_tagged; // specifies if the base pointer is tagged.
int header_size; // size of the header, without tag.
Type* type; // type of the element.
MachineRepresentation representation; // machine representation of element.
int tag() const { return base_is_tagged == kTaggedBase ? kHeapObjectTag : 0; }
};
......@@ -46,8 +57,8 @@ struct StaticParameterTraits<const FieldAccess> {
return (val.offset < 16) | (val.representation & 0xffff);
}
static bool Equals(const FieldAccess& a, const FieldAccess& b) {
return a.offset == b.offset && a.representation == b.representation &&
a.type->Is(b.type);
return a.base_is_tagged == b.base_is_tagged && a.offset == b.offset &&
a.representation == b.representation && a.type->Is(b.type);
}
};
......@@ -62,7 +73,8 @@ struct StaticParameterTraits<const ElementAccess> {
return (val.header_size < 16) | (val.representation & 0xffff);
}
static bool Equals(const ElementAccess& a, const ElementAccess& b) {
return a.header_size == b.header_size &&
return a.base_is_tagged == b.base_is_tagged &&
a.header_size == b.header_size &&
a.representation == b.representation && a.type->Is(b.type);
}
};
......
......@@ -86,22 +86,23 @@ class SimplifiedGraphBuilderTester : public GraphBuilderTester<ReturnType> {
namespace {
FieldAccess ForJSObjectMap() {
FieldAccess access = {JSObject::kMapOffset, Handle<Name>(), Type::Any(),
kMachineTagged};
FieldAccess access = {kTaggedBase, JSObject::kMapOffset, Handle<Name>(),
Type::Any(), kMachineTagged};
return access;
}
FieldAccess ForJSObjectProperties() {
FieldAccess access = {JSObject::kPropertiesOffset, Handle<Name>(),
Type::Any(), kMachineTagged};
FieldAccess access = {kTaggedBase, JSObject::kPropertiesOffset,
Handle<Name>(), Type::Any(), kMachineTagged};
return access;
}
FieldAccess ForArrayBufferBackingStore() {
FieldAccess access = {
JSArrayBuffer::kBackingStoreOffset, Handle<Name>(), Type::UntaggedPtr(),
kTaggedBase, JSArrayBuffer::kBackingStoreOffset,
Handle<Name>(), Type::UntaggedPtr(),
MachineOperatorBuilder::pointer_rep(),
};
return access;
......@@ -109,13 +110,16 @@ FieldAccess ForArrayBufferBackingStore() {
ElementAccess ForFixedArrayElement() {
ElementAccess access = {FixedArray::kHeaderSize, Type::Any(), kMachineTagged};
ElementAccess access = {kTaggedBase, FixedArray::kHeaderSize, Type::Any(),
kMachineTagged};
return access;
}
ElementAccess ForBackingStoreElement(MachineRepresentation rep) {
ElementAccess access = {kNonHeapObjectHeaderSize, Type::Any(), rep};
ElementAccess access = {kUntaggedBase,
kNonHeapObjectHeaderSize - kHeapObjectTag,
Type::Any(), rep};
return access;
}
}
......@@ -311,3 +315,102 @@ TEST(RunCopyFixedArray) {
CHECK_EQ(src_copy->get(i), dst->get(i));
}
}
TEST(RunLoadFieldFromUntaggedBase) {
Smi* smis[] = {Smi::FromInt(1), Smi::FromInt(2), Smi::FromInt(3)};
for (size_t i = 0; i < ARRAY_SIZE(smis); i++) {
FieldAccess access = {kUntaggedBase, // untagged base
i * sizeof(Smi*), // offset
Handle<Name>(), Type::Integral32(), kMachineTagged};
SimplifiedGraphBuilderTester<Object*> t;
Node* load = t.LoadField(access, t.PointerConstant(smis));
t.Return(load);
t.LowerAllNodes();
for (int j = -5; j <= 5; j++) {
Smi* expected = Smi::FromInt(j);
smis[i] = expected;
CHECK_EQ(expected, t.Call());
}
}
}
TEST(RunStoreFieldToUntaggedBase) {
Smi* smis[] = {Smi::FromInt(1), Smi::FromInt(2), Smi::FromInt(3)};
for (size_t i = 0; i < ARRAY_SIZE(smis); i++) {
FieldAccess access = {kUntaggedBase, // untagged base
i * sizeof(Smi*), // offset
Handle<Name>(), Type::Integral32(), kMachineTagged};
SimplifiedGraphBuilderTester<Object*> t(kMachineTagged);
Node* p0 = t.Parameter(0);
t.StoreField(access, t.PointerConstant(smis), p0);
t.Return(p0);
t.LowerAllNodes();
for (int j = -5; j <= 5; j++) {
Smi* expected = Smi::FromInt(j);
smis[i] = Smi::FromInt(-100);
CHECK_EQ(expected, t.Call(expected));
CHECK_EQ(expected, smis[i]);
}
}
}
TEST(RunLoadElementFromUntaggedBase) {
Smi* smis[] = {Smi::FromInt(1), Smi::FromInt(2), Smi::FromInt(3),
Smi::FromInt(4), Smi::FromInt(5)};
for (size_t i = 0; i < ARRAY_SIZE(smis); i++) { // for header sizes
for (size_t j = i; j < ARRAY_SIZE(smis); j++) { // for element index
ElementAccess access = {kUntaggedBase, // untagged base
i * sizeof(Smi*), // header size
Type::Integral32(), kMachineTagged};
SimplifiedGraphBuilderTester<Object*> t;
Node* load =
t.LoadElement(access, t.PointerConstant(smis), t.Int32Constant(j));
t.Return(load);
t.LowerAllNodes();
for (int k = -5; k <= 5; k++) {
Smi* expected = Smi::FromInt(k);
smis[i + j] = expected;
CHECK_EQ(expected, t.Call());
}
}
}
}
TEST(RunStoreElementFromUntaggedBase) {
Smi* smis[] = {Smi::FromInt(1), Smi::FromInt(2), Smi::FromInt(3),
Smi::FromInt(4), Smi::FromInt(5)};
for (size_t i = 0; i < ARRAY_SIZE(smis); i++) { // for header sizes
for (size_t j = i; j < ARRAY_SIZE(smis); j++) { // for element index
ElementAccess access = {kUntaggedBase, // untagged base
i * sizeof(Smi*), // header size
Type::Integral32(), kMachineTagged};
SimplifiedGraphBuilderTester<Object*> t(kMachineTagged);
Node* p0 = t.Parameter(0);
t.StoreElement(access, t.PointerConstant(smis), t.Int32Constant(j), p0);
t.Return(p0);
t.LowerAllNodes();
for (int k = -5; k <= 5; k++) {
Smi* expected = Smi::FromInt(k);
smis[i + j] = Smi::FromInt(-100);
CHECK_EQ(expected, t.Call(expected));
CHECK_EQ(expected, smis[i + j]);
}
}
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment