Commit dd49272c authored by ulan@chromium.org's avatar ulan@chromium.org

Weak Cells

Introduce an object that holds a weak reference.
Design document: http://goo.gl/9dSvvy.

BUG=
R=erik.corry@gmail.com

Review URL: https://codereview.chromium.org/640303006

git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@24606 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 75fe010f
......@@ -5927,7 +5927,7 @@ class Internals {
static const int kNullValueRootIndex = 7;
static const int kTrueValueRootIndex = 8;
static const int kFalseValueRootIndex = 9;
static const int kEmptyStringRootIndex = 153;
static const int kEmptyStringRootIndex = 154;
// The external allocation limit should be below 256 MB on all architectures
// to avoid that resource-constrained embedders run low on memory.
......@@ -5942,7 +5942,7 @@ class Internals {
static const int kNodeIsIndependentShift = 4;
static const int kNodeIsPartiallyDependentShift = 5;
static const int kJSObjectType = 0xbc;
static const int kJSObjectType = 0xbd;
static const int kFirstNonstringType = 0x80;
static const int kOddballType = 0x83;
static const int kForeignType = 0x88;
......
......@@ -931,6 +931,13 @@ Handle<PropertyCell> Factory::NewPropertyCell(Handle<Object> value) {
}
Handle<WeakCell> Factory::NewWeakCell(Handle<HeapObject> value) {
AllowDeferredHandleDereference convert_to_cell;
CALL_HEAP_FUNCTION(isolate(), isolate()->heap()->AllocateWeakCell(*value),
WeakCell);
}
Handle<AllocationSite> Factory::NewAllocationSite() {
Handle<Map> map = allocation_site_map();
Handle<AllocationSite> site = New<AllocationSite>(map, OLD_POINTER_SPACE);
......
......@@ -296,6 +296,8 @@ class Factory FINAL {
Handle<PropertyCell> NewPropertyCell(Handle<Object> value);
Handle<WeakCell> NewWeakCell(Handle<HeapObject> value);
// Allocate a tenured AllocationSite. It's payload is null.
Handle<AllocationSite> NewAllocationSite();
......
......@@ -150,6 +150,7 @@ Heap::Heap()
set_array_buffers_list(Smi::FromInt(0));
set_allocation_sites_list(Smi::FromInt(0));
set_encountered_weak_collections(Smi::FromInt(0));
set_encountered_weak_cells(Smi::FromInt(0));
// Put a dummy entry in the remembered pages so we can find the list the
// minidump even if there are no real unmapped pages.
RememberUnmappedPage(NULL, false);
......@@ -1509,6 +1510,8 @@ void Heap::Scavenge() {
// Copy objects reachable from the encountered weak collections list.
scavenge_visitor.VisitPointer(&encountered_weak_collections_);
// Copy objects reachable from the encountered weak cells.
scavenge_visitor.VisitPointer(&encountered_weak_cells_);
// Copy objects reachable from the code flushing candidates list.
MarkCompactCollector* collector = mark_compact_collector();
......@@ -2559,6 +2562,7 @@ bool Heap::CreateInitialMaps() {
ALLOCATE_MAP(CELL_TYPE, Cell::kSize, cell)
ALLOCATE_MAP(PROPERTY_CELL_TYPE, PropertyCell::kSize, global_property_cell)
ALLOCATE_MAP(WEAK_CELL_TYPE, WeakCell::kSize, weak_cell)
ALLOCATE_MAP(FILLER_TYPE, kPointerSize, one_pointer_filler)
ALLOCATE_MAP(FILLER_TYPE, 2 * kPointerSize, two_pointer_filler)
......@@ -2685,6 +2689,22 @@ AllocationResult Heap::AllocatePropertyCell() {
}
AllocationResult Heap::AllocateWeakCell(HeapObject* value) {
int size = WeakCell::kSize;
STATIC_ASSERT(WeakCell::kSize <= Page::kMaxRegularHeapObjectSize);
HeapObject* result;
{
AllocationResult allocation =
AllocateRaw(size, OLD_POINTER_SPACE, OLD_POINTER_SPACE);
if (!allocation.To(&result)) return allocation;
}
result->set_map_no_write_barrier(weak_cell_map());
WeakCell::cast(result)->initialize(value);
WeakCell::cast(result)->set_next(undefined_value(), SKIP_WRITE_BARRIER);
return result;
}
void Heap::CreateApiObjects() {
HandleScope scope(isolate());
Factory* factory = isolate()->factory();
......
......@@ -52,6 +52,7 @@ namespace internal {
V(Map, fixed_cow_array_map, FixedCOWArrayMap) \
V(Map, fixed_double_array_map, FixedDoubleArrayMap) \
V(Map, constant_pool_array_map, ConstantPoolArrayMap) \
V(Map, weak_cell_map, WeakCellMap) \
V(Oddball, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \
V(Map, hash_table_map, HashTableMap) \
V(Map, ordered_hash_table_map, OrderedHashTableMap) \
......@@ -223,6 +224,7 @@ namespace internal {
V(fixed_cow_array_map) \
V(fixed_double_array_map) \
V(constant_pool_array_map) \
V(weak_cell_map) \
V(no_interceptor_result_sentinel) \
V(hash_table_map) \
V(ordered_hash_table_map) \
......@@ -829,6 +831,11 @@ class Heap {
return encountered_weak_collections_;
}
void set_encountered_weak_cells(Object* weak_cell) {
encountered_weak_cells_ = weak_cell;
}
Object* encountered_weak_cells() const { return encountered_weak_cells_; }
// Number of mark-sweeps.
unsigned int ms_count() { return ms_count_; }
......@@ -1559,6 +1566,8 @@ class Heap {
// contains Smi(0) while marking is not active.
Object* encountered_weak_collections_;
Object* encountered_weak_cells_;
StoreBufferRebuilder store_buffer_rebuilder_;
struct StringTypeTable {
......@@ -1839,6 +1848,8 @@ class Heap {
// Allocate a tenured JS global property cell initialized with the hole.
MUST_USE_RESULT AllocationResult AllocatePropertyCell();
MUST_USE_RESULT AllocationResult AllocateWeakCell(HeapObject* value);
// Allocates a new utility object in the old generation.
MUST_USE_RESULT AllocationResult AllocateStruct(InstanceType type);
......
......@@ -298,8 +298,12 @@ void MarkCompactCollector::CollectGarbage() {
if (FLAG_collect_maps) ClearNonLiveReferences();
ProcessAndClearWeakCells();
ClearWeakCollections();
heap_->set_encountered_weak_cells(Smi::FromInt(0));
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) {
VerifyMarking(heap_);
......@@ -822,6 +826,7 @@ void MarkCompactCollector::Prepare() {
heap()->incremental_marking()->Abort();
ClearMarkbits();
AbortWeakCollections();
AbortWeakCells();
AbortCompaction();
was_marked_incrementally_ = false;
}
......@@ -2734,6 +2739,37 @@ void MarkCompactCollector::AbortWeakCollections() {
}
void MarkCompactCollector::ProcessAndClearWeakCells() {
HeapObject* undefined = heap()->undefined_value();
Object* weak_cell_obj = heap()->encountered_weak_cells();
while (weak_cell_obj != Smi::FromInt(0)) {
WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj);
HeapObject* value = weak_cell->value();
if (!MarkCompactCollector::IsMarked(value)) {
weak_cell->clear(undefined);
} else {
Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
heap()->mark_compact_collector()->RecordSlot(slot, slot, value);
}
weak_cell_obj = weak_cell->next();
weak_cell->set_next(undefined, SKIP_WRITE_BARRIER);
}
heap()->set_encountered_weak_cells(Smi::FromInt(0));
}
void MarkCompactCollector::AbortWeakCells() {
Object* undefined = heap()->undefined_value();
Object* weak_cell_obj = heap()->encountered_weak_cells();
while (weak_cell_obj != Smi::FromInt(0)) {
WeakCell* weak_cell = reinterpret_cast<WeakCell*>(weak_cell_obj);
weak_cell_obj = weak_cell->next();
weak_cell->set_next(undefined, SKIP_WRITE_BARRIER);
}
heap()->set_encountered_weak_cells(Smi::FromInt(0));
}
void MarkCompactCollector::RecordMigratedSlot(Object* value, Address slot) {
if (heap_->InNewSpace(value)) {
heap_->store_buffer()->Mark(slot);
......@@ -2745,7 +2781,7 @@ void MarkCompactCollector::RecordMigratedSlot(Object* value, Address slot) {
}
// We scavange new space simultaneously with sweeping. This is done in two
// We scavenge new space simultaneously with sweeping. This is done in two
// passes.
//
// The first pass migrates all alive objects from one semispace to another or
......
......@@ -826,6 +826,10 @@ class MarkCompactCollector {
// collections when incremental marking is aborted.
void AbortWeakCollections();
void ProcessAndClearWeakCells();
void AbortWeakCells();
// -----------------------------------------------------------------------
// Phase 2: Sweeping to clear mark bits and free non-live objects for
// a non-compacting collection.
......
......@@ -191,6 +191,8 @@ void StaticMarkingVisitor<StaticVisitor>::Initialize() {
table_.Register(kVisitPropertyCell, &VisitPropertyCell);
table_.Register(kVisitWeakCell, &VisitWeakCell);
table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
kVisitDataObjectGeneric>();
......@@ -349,6 +351,22 @@ void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
}
template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitWeakCell(Map* map,
HeapObject* object) {
Heap* heap = map->GetHeap();
WeakCell* weak_cell = reinterpret_cast<WeakCell*>(object);
Object* undefined = heap->undefined_value();
// Enqueue weak cell in linked list of encountered weak collections.
// We can ignore weak cells with cleared values because they will always point
// to the undefined_value.
if (weak_cell->next() == undefined && weak_cell->value() != undefined) {
weak_cell->set_next(heap->encountered_weak_cells());
heap->set_encountered_weak_cells(weak_cell);
}
}
template <typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite(
Map* map, HeapObject* object) {
......
......@@ -69,6 +69,9 @@ StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
case PROPERTY_CELL_TYPE:
return kVisitPropertyCell;
case WEAK_CELL_TYPE:
return kVisitWeakCell;
case JS_SET_TYPE:
return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
JSSet::kSize);
......
......@@ -71,6 +71,7 @@ class StaticVisitorBase : public AllStatic {
V(Map) \
V(Cell) \
V(PropertyCell) \
V(WeakCell) \
V(SharedFunctionInfo) \
V(JSFunction) \
V(JSWeakCollection) \
......@@ -362,6 +363,7 @@ class StaticMarkingVisitor : public StaticVisitorBase {
}
INLINE(static void VisitPropertyCell(Map* map, HeapObject* object));
INLINE(static void VisitWeakCell(Map* map, HeapObject* object));
INLINE(static void VisitCodeEntry(Heap* heap, Address entry_address));
INLINE(static void VisitEmbeddedPointer(Heap* heap, RelocInfo* rinfo));
INLINE(static void VisitCell(Heap* heap, RelocInfo* rinfo));
......
......@@ -125,6 +125,9 @@ void HeapObject::HeapObjectVerify() {
case PROPERTY_CELL_TYPE:
PropertyCell::cast(this)->PropertyCellVerify();
break;
case WEAK_CELL_TYPE:
WeakCell::cast(this)->WeakCellVerify();
break;
case JS_ARRAY_TYPE:
JSArray::cast(this)->JSArrayVerify();
break;
......@@ -635,6 +638,13 @@ void PropertyCell::PropertyCellVerify() {
}
void WeakCell::WeakCellVerify() {
CHECK(IsWeakCell());
VerifyObjectField(kValueOffset);
VerifyObjectField(kNextOffset);
}
void Code::CodeVerify() {
CHECK(IsAligned(reinterpret_cast<intptr_t>(instruction_start()),
kCodeAlignment));
......
......@@ -786,6 +786,7 @@ TYPE_CHECKER(Code, CODE_TYPE)
TYPE_CHECKER(Oddball, ODDBALL_TYPE)
TYPE_CHECKER(Cell, CELL_TYPE)
TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
TYPE_CHECKER(WeakCell, WEAK_CELL_TYPE)
TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
......@@ -1929,6 +1930,33 @@ void PropertyCell::set_type_raw(Object* val, WriteBarrierMode ignored) {
}
HeapObject* WeakCell::value() const {
return HeapObject::cast(READ_FIELD(this, kValueOffset));
}
void WeakCell::clear(HeapObject* undefined) {
WRITE_FIELD(this, kValueOffset, undefined);
}
void WeakCell::initialize(HeapObject* val) {
WRITE_FIELD(this, kValueOffset, val);
WRITE_BARRIER(GetHeap(), this, kValueOffset, val);
}
Object* WeakCell::next() const { return READ_FIELD(this, kNextOffset); }
void WeakCell::set_next(Object* val, WriteBarrierMode mode) {
WRITE_FIELD(this, kNextOffset, val);
if (mode == UPDATE_WRITE_BARRIER) {
WRITE_BARRIER(GetHeap(), this, kNextOffset, val);
}
}
int JSObject::GetHeaderSize() {
InstanceType type = map()->instance_type();
// Check for the most common kind of JavaScript object before
......@@ -3253,6 +3281,7 @@ CAST_ACCESSOR(StringTable)
CAST_ACCESSOR(Struct)
CAST_ACCESSOR(Symbol)
CAST_ACCESSOR(UnseededNumberDictionary)
CAST_ACCESSOR(WeakCell)
CAST_ACCESSOR(WeakHashTable)
......
......@@ -169,6 +169,9 @@ void HeapObject::HeapObjectPrint(std::ostream& os) { // NOLINT
case PROPERTY_CELL_TYPE:
PropertyCell::cast(this)->PropertyCellPrint(os);
break;
case WEAK_CELL_TYPE:
WeakCell::cast(this)->WeakCellPrint(os);
break;
case JS_ARRAY_BUFFER_TYPE:
JSArrayBuffer::cast(this)->JSArrayBufferPrint(os);
break;
......@@ -862,6 +865,11 @@ void PropertyCell::PropertyCellPrint(std::ostream& os) { // NOLINT
}
void WeakCell::WeakCellPrint(std::ostream& os) { // NOLINT
HeapObject::PrintHeader(os, "WeakCell");
}
void Code::CodePrint(std::ostream& os) { // NOLINT
HeapObject::PrintHeader(os, "Code");
#ifdef ENABLE_DISASSEMBLER
......
......@@ -1624,6 +1624,9 @@ void HeapObject::IterateBody(InstanceType type, int object_size,
case PROPERTY_CELL_TYPE:
PropertyCell::BodyDescriptor::IterateBody(this, v);
break;
case WEAK_CELL_TYPE:
WeakCell::BodyDescriptor::IterateBody(this, v);
break;
case SYMBOL_TYPE:
Symbol::BodyDescriptor::IterateBody(this, v);
break;
......
......@@ -143,6 +143,7 @@
// - DebugInfo
// - BreakPointInfo
// - CodeCache
// - WeakCell
//
// Formats of Object*:
// Smi: [31 bit signed int] 0
......@@ -425,6 +426,7 @@ const int kStubMinorKeyBits = kSmiValueSize - kStubMajorKeyBits - 1;
V(FIXED_DOUBLE_ARRAY_TYPE) \
V(CONSTANT_POOL_ARRAY_TYPE) \
V(SHARED_FUNCTION_INFO_TYPE) \
V(WEAK_CELL_TYPE) \
\
V(JS_MESSAGE_OBJECT_TYPE) \
\
......@@ -721,6 +723,7 @@ enum InstanceType {
FIXED_ARRAY_TYPE,
CONSTANT_POOL_ARRAY_TYPE,
SHARED_FUNCTION_INFO_TYPE,
WEAK_CELL_TYPE,
// All the following types are subtypes of JSReceiver, which corresponds to
// objects in the JS sense. The first and the last type in this range are
......@@ -984,6 +987,7 @@ template <class C> inline bool Is(Object* obj);
V(AccessCheckNeeded) \
V(Cell) \
V(PropertyCell) \
V(WeakCell) \
V(ObjectHashTable) \
V(WeakHashTable) \
V(OrderedHashTable)
......@@ -9567,6 +9571,35 @@ class PropertyCell: public Cell {
};
class WeakCell : public HeapObject {
public:
inline HeapObject* value() const;
// This should not be called by anyone except GC.
inline void clear(HeapObject* undefined);
// This should not be called by anyone except allocator.
inline void initialize(HeapObject* value);
DECL_ACCESSORS(next, Object)
DECLARE_CAST(WeakCell)
DECLARE_PRINTER(WeakCell)
DECLARE_VERIFIER(WeakCell)
// Layout description.
static const int kValueOffset = HeapObject::kHeaderSize;
static const int kNextOffset = kValueOffset + kPointerSize;
static const int kSize = kNextOffset + kPointerSize;
typedef FixedBodyDescriptor<kValueOffset, kSize, kSize> BodyDescriptor;
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(WeakCell);
};
// The JSProxy describes EcmaScript Harmony proxies
class JSProxy: public JSReceiver {
public:
......
......@@ -4283,6 +4283,72 @@ TEST(WeakMapInMonomorphicCompareNilIC) {
}
TEST(WeakCell) {
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
v8::internal::Heap* heap = CcTest::heap();
v8::internal::Factory* factory = isolate->factory();
HandleScope outer_scope(isolate);
Handle<WeakCell> weak_cell1;
{
HandleScope inner_scope(isolate);
Handle<HeapObject> value = factory->NewFixedArray(1, NOT_TENURED);
weak_cell1 = inner_scope.CloseAndEscape(factory->NewWeakCell(value));
}
Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
Handle<WeakCell> weak_cell2;
{
HandleScope inner_scope(isolate);
weak_cell2 = inner_scope.CloseAndEscape(factory->NewWeakCell(survivor));
}
CHECK(weak_cell1->value()->IsFixedArray());
CHECK_EQ(*survivor, weak_cell2->value());
heap->CollectGarbage(NEW_SPACE);
CHECK(weak_cell1->value()->IsFixedArray());
CHECK_EQ(*survivor, weak_cell2->value());
heap->CollectGarbage(NEW_SPACE);
CHECK(weak_cell1->value()->IsFixedArray());
CHECK_EQ(*survivor, weak_cell2->value());
heap->CollectAllAvailableGarbage();
CHECK_EQ(*survivor, weak_cell2->value());
CHECK(weak_cell2->value()->IsFixedArray());
}
TEST(WeakCellsWithIncrementalMarking) {
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
v8::internal::Heap* heap = CcTest::heap();
v8::internal::Factory* factory = isolate->factory();
const int N = 16;
HandleScope outer_scope(isolate);
Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
Handle<WeakCell> weak_cells[N];
for (int i = 0; i < N; i++) {
HandleScope inner_scope(isolate);
Handle<HeapObject> value =
i == 0 ? survivor : factory->NewFixedArray(1, NOT_TENURED);
Handle<WeakCell> weak_cell = factory->NewWeakCell(value);
CHECK(weak_cell->value()->IsFixedArray());
IncrementalMarking* marking = heap->incremental_marking();
if (marking->IsStopped()) marking->Start();
marking->Step(128, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
heap->CollectGarbage(NEW_SPACE);
CHECK(weak_cell->value()->IsFixedArray());
weak_cells[i] = inner_scope.CloseAndEscape(weak_cell);
}
heap->CollectAllGarbage(Heap::kNoGCFlags);
CHECK_EQ(*survivor, weak_cells[0]->value());
for (int i = 1; i < N; i++) {
CHECK(weak_cells[i]->value()->IsUndefined());
}
}
#ifdef DEBUG
TEST(AddInstructionChangesNewSpacePromotion) {
i::FLAG_allow_natives_syntax = true;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment