Commit 6cb995b9 authored by jkummerow's avatar jkummerow Committed by Commit bot

Move delete-last-fast-property code from CSA to C++

When deleting the most recently added fast property from an object
by undoing its last map transition, we must clear any recorded slots.
This can only be done in C++, so this functionality must move out
of the stub.
Also update a CHECK in the JSObject verifier to allow backing stores
sticking around after such property deletions.

BUG=chromium:716912,chromium:714981

Review-Url: https://codereview.chromium.org/2854373002
Cr-Commit-Position: refs/heads/master@{#45069}
parent 1e95840b
......@@ -169,84 +169,6 @@ class DeletePropertyBaseAssembler : public CodeStubAssembler {
explicit DeletePropertyBaseAssembler(compiler::CodeAssemblerState* state)
: CodeStubAssembler(state) {}
void DeleteFastProperty(Node* receiver, Node* receiver_map, Node* properties,
Node* name, Label* dont_delete, Label* not_found,
Label* slow) {
// This builtin implements a special case for fast property deletion:
// when the last property in an object is deleted, then instead of
// normalizing the properties, we can undo the last map transition,
// with a few prerequisites:
// (1) The current map must not be marked stable. Otherwise there could
// be optimized code that depends on the assumption that no object that
// reached this map transitions away from it (without triggering the
// "deoptimize dependent code" mechanism).
Node* bitfield3 = LoadMapBitField3(receiver_map);
GotoIfNot(IsSetWord32<Map::IsUnstable>(bitfield3), slow);
// (2) The property to be deleted must be the last property.
Node* descriptors = LoadMapDescriptors(receiver_map);
Node* nof = DecodeWord32<Map::NumberOfOwnDescriptorsBits>(bitfield3);
GotoIf(Word32Equal(nof, Int32Constant(0)), not_found);
Node* descriptor_number = Int32Sub(nof, Int32Constant(1));
Node* key_index = DescriptorArrayToKeyIndex(descriptor_number);
Node* actual_key = LoadFixedArrayElement(descriptors, key_index);
// TODO(jkummerow): We could implement full descriptor search in order
// to avoid the runtime call for deleting nonexistent properties, but
// that's probably a rare case.
GotoIf(WordNotEqual(actual_key, name), slow);
// (3) The property to be deleted must be deletable.
Node* details =
LoadDetailsByKeyIndex<DescriptorArray>(descriptors, key_index);
GotoIf(IsSetWord32(details, PropertyDetails::kAttributesDontDeleteMask),
dont_delete);
// (4) The map must have a back pointer.
Node* backpointer =
LoadObjectField(receiver_map, Map::kConstructorOrBackPointerOffset);
GotoIfNot(IsMap(backpointer), slow);
// (5) The last transition must have been caused by adding a property
// (and not any kind of special transition).
Node* previous_nof = DecodeWord32<Map::NumberOfOwnDescriptorsBits>(
LoadMapBitField3(backpointer));
GotoIfNot(Word32Equal(previous_nof, descriptor_number), slow);
// Preconditions successful, perform the map rollback!
// Zap the property to avoid keeping objects alive.
// Zapping is not necessary for properties stored in the descriptor array.
Label zapping_done(this);
GotoIf(Word32NotEqual(DecodeWord32<PropertyDetails::LocationField>(details),
Int32Constant(kField)),
&zapping_done);
Node* field_index =
DecodeWordFromWord32<PropertyDetails::FieldIndexField>(details);
Node* inobject_properties = LoadMapInobjectProperties(receiver_map);
Label inobject(this), backing_store(this);
// Due to inobject slack tracking, a field currently within the object
// could later be between objects. Use the one pointer filler map for
// zapping the deleted field to make this safe.
Node* filler = LoadRoot(Heap::kOnePointerFillerMapRootIndex);
DCHECK(Heap::RootIsImmortalImmovable(Heap::kOnePointerFillerMapRootIndex));
Branch(UintPtrLessThan(field_index, inobject_properties), &inobject,
&backing_store);
BIND(&inobject);
{
Node* field_offset =
IntPtrMul(IntPtrSub(LoadMapInstanceSize(receiver_map),
IntPtrSub(inobject_properties, field_index)),
IntPtrConstant(kPointerSize));
StoreObjectFieldNoWriteBarrier(receiver, field_offset, filler);
Goto(&zapping_done);
}
BIND(&backing_store);
{
Node* backing_store_index = IntPtrSub(field_index, inobject_properties);
StoreFixedArrayElement(properties, backing_store_index, filler,
SKIP_WRITE_BARRIER);
Goto(&zapping_done);
}
BIND(&zapping_done);
StoreMap(receiver, backpointer);
Return(TrueConstant());
}
void DeleteDictionaryProperty(Node* receiver, Node* properties, Node* name,
Node* context, Label* dont_delete,
Label* notfound) {
......@@ -328,8 +250,9 @@ TF_BUILTIN(DeleteProperty, DeletePropertyBaseAssembler) {
Node* properties_map = LoadMap(properties);
GotoIf(WordEqual(properties_map, LoadRoot(Heap::kHashTableMapRootIndex)),
&dictionary);
DeleteFastProperty(receiver, receiver_map, properties, unique, &dont_delete,
&if_notfound, &slow);
// Fast properties need to clear recorded slots, which can only be done
// in C++.
Goto(&slow);
BIND(&dictionary);
{
......
......@@ -336,12 +336,16 @@ void JSObject::JSObjectVerify() {
properties()->length() -
map()->NextFreePropertyIndex();
if (map()->unused_property_fields() != actual_unused_property_fields) {
// This could actually happen in the middle of StoreTransitionStub
// when the new extended backing store is already set into the object and
// the allocation of the MutableHeapNumber triggers GC (in this case map
// is not updated yet).
CHECK_EQ(map()->unused_property_fields(),
actual_unused_property_fields - JSObject::kFieldsAdded);
// There are two reasons why this can happen:
// - in the middle of StoreTransitionStub when the new extended backing
// store is already set into the object and the allocation of the
// MutableHeapNumber triggers GC while the map isn't updated yet.
// - deletion of the last property can leave additional backing store
// capacity behind.
CHECK_GT(actual_unused_property_fields, map()->unused_property_fields());
int delta =
actual_unused_property_fields - map()->unused_property_fields();
CHECK_EQ(0, delta % JSObject::kFieldsAdded);
}
DescriptorArray* descriptors = map()->instance_descriptors();
Isolate* isolate = GetIsolate();
......
......@@ -126,11 +126,70 @@ static MaybeHandle<Object> KeyedGetObjectProperty(Isolate* isolate,
return Runtime::GetObjectProperty(isolate, receiver_obj, key_obj);
}
namespace {
bool DeleteObjectPropertyFast(Isolate* isolate, Handle<JSReceiver> receiver,
Handle<Object> raw_key) {
// This implements a special case for fast property deletion: when the
// last property in an object is deleted, then instead of normalizing
// the properties, we can undo the last map transition, with a few
// prerequisites:
// (1) The receiver must be a regular object and the key a unique name.
Map* map = receiver->map();
if (map->IsSpecialReceiverMap()) return false;
if (!raw_key->IsUniqueName()) return false;
Handle<Name> key = Handle<Name>::cast(raw_key);
// (2) The property to be deleted must be the last property.
int nof = map->NumberOfOwnDescriptors();
if (nof == 0) return false;
int descriptor = nof - 1;
DescriptorArray* descriptors = map->instance_descriptors();
if (descriptors->GetKey(descriptor) != *key) return false;
// (3) The property to be deleted must be deletable.
PropertyDetails details = descriptors->GetDetails(descriptor);
if (!details.IsConfigurable()) return false;
// (4) The map must have a back pointer.
Object* backpointer = map->GetBackPointer();
if (!backpointer->IsMap()) return false;
// (5) The last transition must have been caused by adding a property
// (and not any kind of special transition).
if (Map::cast(backpointer)->NumberOfOwnDescriptors() != nof - 1) return false;
// Preconditions successful. No more bailouts after this point.
// Zap the property to avoid keeping objects alive. Zapping is not necessary
// for properties stored in the descriptor array.
if (details.location() == kField) {
Object* filler = isolate->heap()->one_pointer_filler_map();
FieldIndex index = FieldIndex::ForPropertyIndex(map, details.field_index());
JSObject::cast(*receiver)->RawFastPropertyAtPut(index, filler);
// We must clear any recorded slot for the deleted property, because
// subsequent object modifications might put a raw double there.
// Slot clearing is the reason why this entire function cannot currently
// be implemented in the DeleteProperty stub.
if (index.is_inobject() && !map->IsUnboxedDoubleField(index)) {
isolate->heap()->ClearRecordedSlot(
*receiver, HeapObject::RawField(*receiver, index.offset()));
}
}
// If the map was marked stable before, then there could be optimized code
// that depends on the assumption that no object that reached this map
// transitions away from it without triggering the "deoptimize dependent
// code" mechanism.
map->NotifyLeafMapLayoutChange();
// Finally, perform the map rollback.
receiver->synchronized_set_map(Map::cast(backpointer));
return true;
}
} // namespace
Maybe<bool> Runtime::DeleteObjectProperty(Isolate* isolate,
Handle<JSReceiver> receiver,
Handle<Object> key,
LanguageMode language_mode) {
if (DeleteObjectPropertyFast(isolate, receiver, key)) return Just(true);
bool success = false;
LookupIterator it = LookupIterator::PropertyOrElement(
isolate, receiver, key, &success, LookupIterator::OWN);
......
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
function addProperties(o)
{
o.p1 = 1;
o.p2 = 2;
o.p3 = 3;
o.p4 = 4;
o.p5 = 5;
o.p6 = 6;
o.p7 = 7;
o.p8 = 8;
}
function removeProperties(o)
{
delete o.p8;
delete o.p7;
delete o.p6;
delete o.p5;
}
function makeO()
{
var o = { };
addProperties(o);
removeProperties(o);
addProperties(o);
}
for (var i = 0; i < 3; ++i) {
o = makeO();
}
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --expose-gc --invoke-weak-callbacks
function __f_6() {
this.a4 = {};
}
__v_6 = new __f_6();
__v_6.prototype = __v_6;
__v_6 = new __f_6();
gc();
gc();
buf = new ArrayBuffer(8);
__v_8 = new Int32Array(buf);
__v_9 = new Float64Array(buf);
__v_8[0] = 1;
__v_6.a4 = {a: 0};
delete __v_6.a4;
__v_6.boom = __v_9[0];
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment