Commit b79b9859 authored by ulan's avatar ulan Committed by Commit bot

Use weak cells in dependent code.

BUG=

Review URL: https://codereview.chromium.org/871253005

Cr-Commit-Position: refs/heads/master@{#26614}
parent 9159c419
......@@ -6253,7 +6253,7 @@ class Internals {
static const int kNullValueRootIndex = 7;
static const int kTrueValueRootIndex = 8;
static const int kFalseValueRootIndex = 9;
static const int kEmptyStringRootIndex = 154;
static const int kEmptyStringRootIndex = 155;
// The external allocation limit should be below 256 MB on all architectures
// to avoid that resource-constrained embedders run low on memory.
......
......@@ -206,6 +206,16 @@ CompilationInfo::~CompilationInfo() {
void CompilationInfo::CommitDependencies(Handle<Code> code) {
bool has_dependencies = false;
for (int i = 0; i < DependentCode::kGroupCount; i++) {
has_dependencies |=
dependencies_[i] != NULL && dependencies_[i]->length() > 0;
}
// Avoid creating a weak cell for code with no dependencies.
if (!has_dependencies) return;
AllowDeferredHandleDereference get_object_wrapper;
Handle<WeakCell> cell = Code::WeakCellFor(code);
for (int i = 0; i < DependentCode::kGroupCount; i++) {
ZoneList<Handle<HeapObject> >* group_objects = dependencies_[i];
if (group_objects == NULL) continue;
......@@ -215,7 +225,7 @@ void CompilationInfo::CommitDependencies(Handle<Code> code) {
static_cast<DependentCode::DependencyGroup>(i);
DependentCode* dependent_code =
DependentCode::ForObject(group_objects->at(j), group);
dependent_code->UpdateToFinishedCode(group, this, *code);
dependent_code->UpdateToFinishedCode(group, *object_wrapper(), *cell);
}
dependencies_[i] = NULL; // Zone-allocated, no need to delete.
}
......@@ -223,6 +233,7 @@ void CompilationInfo::CommitDependencies(Handle<Code> code) {
void CompilationInfo::RollbackDependencies() {
AllowDeferredHandleDereference get_object_wrapper;
// Unregister from all dependent maps if not yet committed.
for (int i = 0; i < DependentCode::kGroupCount; i++) {
ZoneList<Handle<HeapObject> >* group_objects = dependencies_[i];
......@@ -232,7 +243,7 @@ void CompilationInfo::RollbackDependencies() {
static_cast<DependentCode::DependencyGroup>(i);
DependentCode* dependent_code =
DependentCode::ForObject(group_objects->at(j), group);
dependent_code->RemoveCompilationInfo(group, this);
dependent_code->RemoveCompilationInfo(group, *object_wrapper());
}
dependencies_[i] = NULL; // Zone-allocated, no need to delete.
}
......
......@@ -135,9 +135,6 @@ Heap::Heap()
full_codegen_bytes_generated_(0),
crankshaft_codegen_bytes_generated_(0),
gcs_since_last_deopt_(0),
#ifdef VERIFY_HEAP
no_weak_object_verification_scope_depth_(0),
#endif
allocation_sites_scratchpad_length_(0),
promotion_queue_(this),
configured_(false),
......@@ -3169,6 +3166,10 @@ void Heap::CreateInitialObjects() {
set_detached_contexts(empty_fixed_array());
set_weak_object_to_code_table(
*WeakHashTable::New(isolate(), 16, USE_DEFAULT_MINIMUM_CAPACITY,
TENURED));
Handle<SeededNumberDictionary> slow_element_dictionary =
SeededNumberDictionary::New(isolate(), 0, TENURED);
slow_element_dictionary->set_requires_slow_elements();
......@@ -5600,7 +5601,6 @@ bool Heap::CreateHeapObjects() {
set_array_buffers_list(undefined_value());
set_new_array_buffer_views_list(undefined_value());
set_allocation_sites_list(undefined_value());
weak_object_to_code_table_ = undefined_value();
return true;
}
......@@ -5775,41 +5775,25 @@ void Heap::RemoveGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback) {
// TODO(ishell): Find a better place for this.
void Heap::AddWeakObjectToCodeDependency(Handle<Object> obj,
void Heap::AddWeakObjectToCodeDependency(Handle<HeapObject> obj,
Handle<DependentCode> dep) {
DCHECK(!InNewSpace(*obj));
DCHECK(!InNewSpace(*dep));
// This handle scope keeps the table handle local to this function, which
// allows us to safely skip write barriers in table update operations.
HandleScope scope(isolate());
Handle<WeakHashTable> table(WeakHashTable::cast(weak_object_to_code_table_),
isolate());
Handle<WeakHashTable> table(weak_object_to_code_table(), isolate());
table = WeakHashTable::Put(table, obj, dep);
if (ShouldZapGarbage() && weak_object_to_code_table_ != *table) {
WeakHashTable::cast(weak_object_to_code_table_)->Zap(the_hole_value());
}
set_weak_object_to_code_table(*table);
DCHECK_EQ(*dep, table->Lookup(obj));
if (*table != weak_object_to_code_table())
set_weak_object_to_code_table(*table);
DCHECK_EQ(*dep, LookupWeakObjectToCodeDependency(obj));
}
DependentCode* Heap::LookupWeakObjectToCodeDependency(Handle<Object> obj) {
Object* dep = WeakHashTable::cast(weak_object_to_code_table_)->Lookup(obj);
DependentCode* Heap::LookupWeakObjectToCodeDependency(Handle<HeapObject> obj) {
Object* dep = weak_object_to_code_table()->Lookup(obj);
if (dep->IsDependentCode()) return DependentCode::cast(dep);
return DependentCode::cast(empty_fixed_array());
}
void Heap::EnsureWeakObjectToCodeTable() {
if (!weak_object_to_code_table()->IsHashTable()) {
set_weak_object_to_code_table(
*WeakHashTable::New(isolate(), 16, USE_DEFAULT_MINIMUM_CAPACITY,
TENURED));
}
}
void Heap::FatalProcessOutOfMemory(const char* location, bool take_snapshot) {
v8::internal::V8::FatalProcessOutOfMemory(location, take_snapshot);
}
......
......@@ -182,7 +182,8 @@ namespace internal {
V(FixedArray, allocation_sites_scratchpad, AllocationSitesScratchpad) \
V(FixedArray, microtask_queue, MicrotaskQueue) \
V(FixedArray, keyed_load_dummy_vector, KeyedLoadDummyVector) \
V(FixedArray, detached_contexts, DetachedContexts)
V(FixedArray, detached_contexts, DetachedContexts) \
V(WeakHashTable, weak_object_to_code_table, WeakObjectToCodeTable)
// Entries in this list are limited to Smis and are not visited during GC.
#define SMI_ROOT_LIST(V) \
......@@ -873,8 +874,6 @@ class Heap {
// Used in CreateAllocationSiteStub and the (de)serializer.
Object** allocation_sites_list_address() { return &allocation_sites_list_; }
Object* weak_object_to_code_table() { return weak_object_to_code_table_; }
void set_encountered_weak_collections(Object* weak_collection) {
encountered_weak_collections_ = weak_collection;
}
......@@ -979,11 +978,6 @@ class Heap {
#ifdef VERIFY_HEAP
// Verify the heap is in its normal state before or after a GC.
void Verify();
bool weak_embedded_objects_verification_enabled() {
return no_weak_object_verification_scope_depth_ == 0;
}
#endif
#ifdef DEBUG
......@@ -1439,16 +1433,10 @@ class Heap {
Heap* heap_;
};
void AddWeakObjectToCodeDependency(Handle<Object> obj,
void AddWeakObjectToCodeDependency(Handle<HeapObject> obj,
Handle<DependentCode> dep);
DependentCode* LookupWeakObjectToCodeDependency(Handle<Object> obj);
void InitializeWeakObjectToCodeTable() {
set_weak_object_to_code_table(undefined_value());
}
void EnsureWeakObjectToCodeTable();
DependentCode* LookupWeakObjectToCodeDependency(Handle<HeapObject> obj);
static void FatalProcessOutOfMemory(const char* location,
bool take_snapshot = false);
......@@ -1645,11 +1633,6 @@ class Heap {
// array buffer.
Object* new_array_buffer_views_list_;
// WeakHashTable that maps objects embedded in optimized code to dependent
// code list. It is initialized lazily and contains the undefined_value at
// start.
Object* weak_object_to_code_table_;
// List of encountered weak collections (JSWeakMap and JSWeakSet) during
// marking. It is initialized during marking, destroyed after marking and
// contains Smi(0) while marking is not active.
......@@ -2067,15 +2050,6 @@ class Heap {
void ClearObjectStats(bool clear_last_time_stats = false);
void set_weak_object_to_code_table(Object* value) {
DCHECK(!InNewSpace(value));
weak_object_to_code_table_ = value;
}
Object** weak_object_to_code_table_address() {
return &weak_object_to_code_table_;
}
inline void UpdateAllocationsHash(HeapObject* object);
inline void UpdateAllocationsHash(uint32_t value);
inline void PrintAlloctionsHash();
......@@ -2127,10 +2101,6 @@ class Heap {
// deoptimization triggered by garbage collection.
int gcs_since_last_deopt_;
#ifdef VERIFY_HEAP
int no_weak_object_verification_scope_depth_;
#endif
static const int kAllocationSiteScratchpadSize = 256;
int allocation_sites_scratchpad_length_;
......
......@@ -552,8 +552,6 @@ void IncrementalMarking::StartMarking(CompactionFlag flag) {
IncrementalMarkingRootMarkingVisitor visitor(this);
heap_->IterateStrongRoots(&visitor, VISIT_ONLY_STRONG);
heap_->mark_compact_collector()->MarkWeakObjectToCodeTable();
// Ready to start incremental marking.
if (FLAG_trace_incremental_marking) {
PrintF("[IncrementalMarking] Running\n");
......
This diff is collapsed.
......@@ -656,10 +656,6 @@ class MarkCompactCollector {
bool evacuation() const { return evacuation_; }
// Mark the global table which maps weak objects to dependent code without
// marking its contents.
void MarkWeakObjectToCodeTable();
// Special case for processing weak references in a full collection. We need
// to artificially keep AllocationSites alive for a time.
void MarkAllocationSite(AllocationSite* site);
......@@ -818,11 +814,6 @@ class MarkCompactCollector {
int number_of_own_descriptors);
void TrimEnumCache(Map* map, DescriptorArray* descriptors);
void ClearDependentCode(DependentCode* dependent_code);
void ClearNonLiveDependentCode(DependentCode* dependent_code);
int ClearNonLiveDependentCodeInGroup(DependentCode* dependent_code, int group,
int start, int end, int new_start);
// Mark all values associated with reachable keys in weak collections
// encountered so far. This might push new object or even new weak maps onto
// the marking stack.
......
......@@ -326,19 +326,6 @@ void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
Map* map, HeapObject* object) {
Heap* heap = map->GetHeap();
Object** slot =
HeapObject::RawField(object, PropertyCell::kDependentCodeOffset);
if (FLAG_collect_maps) {
// Mark property cell dependent codes array but do not push it onto marking
// stack, this will make references from it weak. We will clean dead
// codes when we iterate over property cells in ClearNonLiveReferences.
HeapObject* obj = HeapObject::cast(*slot);
heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
StaticVisitor::MarkObjectWithoutPush(heap, obj);
} else {
StaticVisitor::VisitPointer(heap, slot);
}
StaticVisitor::VisitPointers(
heap,
HeapObject::RawField(object, PropertyCell::kPointerFieldsBeginOffset),
......@@ -367,20 +354,6 @@ void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite(
Map* map, HeapObject* object) {
Heap* heap = map->GetHeap();
Object** slot =
HeapObject::RawField(object, AllocationSite::kDependentCodeOffset);
if (FLAG_collect_maps) {
// Mark allocation site dependent codes array but do not push it onto
// marking stack, this will make references from it weak. We will clean
// dead codes when we iterate over allocation sites in
// ClearNonLiveReferences.
HeapObject* obj = HeapObject::cast(*slot);
heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
StaticVisitor::MarkObjectWithoutPush(heap, obj);
} else {
StaticVisitor::VisitPointer(heap, slot);
}
StaticVisitor::VisitPointers(
heap,
HeapObject::RawField(object, AllocationSite::kPointerFieldsBeginOffset),
......@@ -643,14 +616,6 @@ void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap,
descriptors->GetDescriptorEndSlot(end));
}
// Mark prototype dependent codes array but do not push it onto marking
// stack, this will make references from it weak. We will clean dead
// codes when we iterate over maps in ClearNonLiveTransitions.
Object** slot = HeapObject::RawField(map, Map::kDependentCodeOffset);
HeapObject* obj = HeapObject::cast(*slot);
heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
StaticVisitor::MarkObjectWithoutPush(heap, obj);
// Mark the pointer fields of the Map. Since the transitions array has
// been marked already, it is fine that one of these fields contains a
// pointer to it.
......
......@@ -414,12 +414,12 @@ Representation LChunk::LookupLiteralRepresentation(
static void AddWeakObjectToCodeDependency(Isolate* isolate,
Handle<Object> object,
Handle<HeapObject> object,
Handle<Code> code) {
Handle<WeakCell> cell = Code::WeakCellFor(code);
Heap* heap = isolate->heap();
heap->EnsureWeakObjectToCodeTable();
Handle<DependentCode> dep(heap->LookupWeakObjectToCodeDependency(object));
dep = DependentCode::Insert(dep, DependentCode::kWeakCodeGroup, code);
dep = DependentCode::InsertWeakCode(dep, DependentCode::kWeakCodeGroup, cell);
heap->AddWeakObjectToCodeDependency(object, dep);
}
......@@ -462,6 +462,9 @@ void LChunk::RegisterWeakObjectsInOptimizedCode(Handle<Code> code) const {
void LChunk::CommitDependencies(Handle<Code> code) const {
if (!code->is_optimized_code()) return;
HandleScope scope(isolate());
for (MapSet::const_iterator it = deprecation_dependencies_.begin(),
iend = deprecation_dependencies_.end(); it != iend; ++it) {
Handle<Map> map = *it;
......@@ -479,7 +482,7 @@ void LChunk::CommitDependencies(Handle<Code> code) const {
}
info_->CommitDependencies(code);
if (code->is_optimized_code()) RegisterWeakObjectsInOptimizedCode(code);
RegisterWeakObjectsInOptimizedCode(code);
}
......
......@@ -677,6 +677,7 @@ void Code::CodeVerify() {
void Code::VerifyEmbeddedObjectsDependency() {
if (!CanContainWeakObjects()) return;
WeakCell* cell = CachedWeakCell();
DisallowHeapAllocation no_gc;
Isolate* isolate = GetIsolate();
HandleScope scope(isolate);
......@@ -687,13 +688,13 @@ void Code::VerifyEmbeddedObjectsDependency() {
if (obj->IsMap()) {
Map* map = Map::cast(obj);
CHECK(map->dependent_code()->Contains(DependentCode::kWeakCodeGroup,
this));
cell));
} else if (obj->IsJSObject()) {
Object* raw_table = GetIsolate()->heap()->weak_object_to_code_table();
WeakHashTable* table = WeakHashTable::cast(raw_table);
Handle<Object> key_obj(obj, isolate);
CHECK(DependentCode::cast(table->Lookup(key_obj))->Contains(
DependentCode::kWeakCodeGroup, this));
WeakHashTable* table =
GetIsolate()->heap()->weak_object_to_code_table();
Handle<HeapObject> key_obj(HeapObject::cast(obj), isolate);
CHECK(DependentCode::cast(table->Lookup(key_obj))
->Contains(DependentCode::kWeakCodeGroup, cell));
}
}
}
......
......@@ -4766,21 +4766,6 @@ void DependentCode::set_number_of_entries(DependencyGroup group, int value) {
}
bool DependentCode::is_code_at(int i) {
return get(kCodesStartIndex + i)->IsCode();
}
Code* DependentCode::code_at(int i) {
return Code::cast(get(kCodesStartIndex + i));
}
CompilationInfo* DependentCode::compilation_info_at(int i) {
return reinterpret_cast<CompilationInfo*>(
Foreign::cast(get(kCodesStartIndex + i))->foreign_address());
}
void DependentCode::set_object_at(int i, Object* object) {
set(kCodesStartIndex + i, object);
}
......@@ -4791,11 +4776,6 @@ Object* DependentCode::object_at(int i) {
}
Object** DependentCode::slot_at(int i) {
return RawFieldOfElementAt(kCodesStartIndex + i);
}
void DependentCode::clear_at(int i) {
set_undefined(kCodesStartIndex + i);
}
......@@ -7228,13 +7208,18 @@ Handle<ObjectHashTable> ObjectHashTable::Shrink(
template <int entrysize>
bool WeakHashTableShape<entrysize>::IsMatch(Handle<Object> key, Object* other) {
return key->SameValue(other);
if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
return key->IsWeakCell() ? WeakCell::cast(*key)->value() == other
: *key == other;
}
template <int entrysize>
uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) {
intptr_t hash = reinterpret_cast<intptr_t>(*key);
intptr_t hash =
key->IsWeakCell()
? reinterpret_cast<intptr_t>(WeakCell::cast(*key)->value())
: reinterpret_cast<intptr_t>(*key);
return (uint32_t)(hash & 0xFFFFFFFF);
}
......@@ -7242,6 +7227,7 @@ uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) {
template <int entrysize>
uint32_t WeakHashTableShape<entrysize>::HashForObject(Handle<Object> key,
Object* other) {
if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
intptr_t hash = reinterpret_cast<intptr_t>(other);
return (uint32_t)(hash & 0xFFFFFFFF);
}
......
This diff is collapsed.
......@@ -850,6 +850,7 @@ class ConsString;
class DictionaryElementsAccessor;
class ElementsAccessor;
class FixedArrayBase;
class FunctionLiteral;
class GlobalObject;
class LayoutDescriptor;
class LookupIterator;
......@@ -857,7 +858,6 @@ class ObjectVisitor;
class StringStream;
class TypeFeedbackVector;
class WeakCell;
class FunctionLiteral;
// We cannot just say "class HeapType;" if it is created from a template... =8-?
template<class> class TypeImpl;
......@@ -4049,9 +4049,9 @@ class WeakHashTableShape : public BaseShape<Handle<Object> > {
};
// WeakHashTable maps keys that are arbitrary objects to object values.
// It is used for the global weak hash table that maps objects
// embedded in optimized code to dependent code lists.
// WeakHashTable maps keys that are arbitrary heap objects to heap object
// values. The table wraps the keys in weak cells and store values directly.
// Thus it references keys weakly and values strongly.
class WeakHashTable: public HashTable<WeakHashTable,
WeakHashTableShape<2>,
Handle<Object> > {
......@@ -4062,27 +4062,18 @@ class WeakHashTable: public HashTable<WeakHashTable,
// Looks up the value associated with the given key. The hole value is
// returned in case the key is not present.
Object* Lookup(Handle<Object> key);
Object* Lookup(Handle<HeapObject> key);
// Adds (or overwrites) the value associated with the given key. Mapping a
// key to the hole value causes removal of the whole entry.
MUST_USE_RESULT static Handle<WeakHashTable> Put(Handle<WeakHashTable> table,
Handle<Object> key,
Handle<Object> value);
// This function is called when heap verification is turned on.
void Zap(Object* value) {
int capacity = Capacity();
for (int i = 0; i < capacity; i++) {
set(EntryToIndex(i), value);
set(EntryToValueIndex(i), value);
}
}
Handle<HeapObject> key,
Handle<HeapObject> value);
private:
friend class MarkCompactCollector;
void AddEntry(int entry, Handle<Object> key, Handle<Object> value);
void AddEntry(int entry, Handle<WeakCell> key, Handle<HeapObject> value);
// Returns the index to the value of an entry.
static inline int EntryToValueIndex(int entry) {
......@@ -5543,9 +5534,9 @@ class CompilationInfo;
//
// The first n elements are Smis, each of them specifies the number of codes
// in the corresponding group. The subsequent elements contain grouped code
// objects. The suffix of the array can be filled with the undefined value if
// the number of codes is less than the length of the array. The order of the
// code objects within a group is not preserved.
// objects in weak cells. The suffix of the array can be filled with the
// undefined value if the number of codes is less than the length of the
// array. The order of the code objects within a group is not preserved.
//
// All code indexes used in the class are counted starting from the first
// code object of the first group. In other words, code index 0 corresponds
......@@ -5599,15 +5590,21 @@ class DependentCode: public FixedArray {
int start_indexes_[kGroupCount + 1];
};
bool Contains(DependencyGroup group, Code* code);
static Handle<DependentCode> Insert(Handle<DependentCode> entries,
DependencyGroup group,
Handle<Object> object);
void UpdateToFinishedCode(DependencyGroup group,
CompilationInfo* info,
Code* code);
bool Contains(DependencyGroup group, WeakCell* code_cell);
static Handle<DependentCode> InsertCompilationInfo(
Handle<DependentCode> entries, DependencyGroup group,
Handle<Foreign> info);
static Handle<DependentCode> InsertWeakCode(Handle<DependentCode> entries,
DependencyGroup group,
Handle<WeakCell> code_cell);
void UpdateToFinishedCode(DependencyGroup group, Foreign* info,
WeakCell* code_cell);
void RemoveCompilationInfo(DependentCode::DependencyGroup group,
CompilationInfo* info);
Foreign* info);
void DeoptimizeDependentCodeGroup(Isolate* isolate,
DependentCode::DependencyGroup group);
......@@ -5619,12 +5616,8 @@ class DependentCode: public FixedArray {
// and the mark compact collector.
inline int number_of_entries(DependencyGroup group);
inline void set_number_of_entries(DependencyGroup group, int value);
inline bool is_code_at(int i);
inline Code* code_at(int i);
inline CompilationInfo* compilation_info_at(int i);
inline void set_object_at(int i, Object* object);
inline Object** slot_at(int i);
inline Object* object_at(int i);
inline void set_object_at(int i, Object* object);
inline void clear_at(int i);
inline void copy(int from, int to);
DECLARE_CAST(DependentCode)
......@@ -5636,9 +5629,20 @@ class DependentCode: public FixedArray {
static void SetMarkedForDeoptimization(Code* code, DependencyGroup group);
private:
static Handle<DependentCode> Insert(Handle<DependentCode> entries,
DependencyGroup group,
Handle<Object> object);
static Handle<DependentCode> EnsureSpace(Handle<DependentCode> entries);
// Make a room at the end of the given group by moving out the first
// code objects of the subsequent groups.
inline void ExtendGroup(DependencyGroup group);
// Compact by removing cleared weak cells and return true if there was
// any cleared weak cell.
bool Compact();
static int Grow(int number_of_entries) {
if (number_of_entries < 5) return number_of_entries + 1;
return number_of_entries * 5 / 4;
}
static const int kCodesStartIndex = kGroupCount;
};
......@@ -8473,7 +8477,7 @@ class AllocationSite: public Struct {
// During mark compact we need to take special care for the dependent code
// field.
static const int kPointerFieldsBeginOffset = kTransitionInfoOffset;
static const int kPointerFieldsEndOffset = kDependentCodeOffset;
static const int kPointerFieldsEndOffset = kWeakNextOffset;
// For other visitors, use the fixed body descriptor below.
typedef FixedBodyDescriptor<HeapObject::kHeaderSize,
......@@ -9741,7 +9745,7 @@ class PropertyCell: public Cell {
static const int kSize = kDependentCodeOffset + kPointerSize;
static const int kPointerFieldsBeginOffset = kValueOffset;
static const int kPointerFieldsEndOffset = kDependentCodeOffset;
static const int kPointerFieldsEndOffset = kSize;
typedef FixedBodyDescriptor<kValueOffset,
kSize,
......
......@@ -682,8 +682,6 @@ void Deserializer::Deserialize(Isolate* isolate) {
isolate_->heap()->undefined_value());
}
isolate_->heap()->InitializeWeakObjectToCodeTable();
// Update data pointers to the external strings containing natives sources.
for (int i = 0; i < Natives::GetBuiltinsCount(); i++) {
Object* source = isolate_->heap()->natives_source_cache()->get(i);
......
......@@ -4068,8 +4068,9 @@ TEST(EnsureAllocationSiteDependentCodesProcessed) {
DependentCode::GroupStartIndexes starts(site->dependent_code());
CHECK_GE(starts.number_of_entries(), 1);
int index = starts.at(DependentCode::kAllocationSiteTransitionChangedGroup);
CHECK(site->dependent_code()->is_code_at(index));
Code* function_bar = site->dependent_code()->code_at(index);
CHECK(site->dependent_code()->object_at(index)->IsWeakCell());
Code* function_bar = Code::cast(
WeakCell::cast(site->dependent_code()->object_at(index))->value());
Handle<JSFunction> bar_handle =
v8::Utils::OpenHandle(
*v8::Handle<v8::Function>::Cast(
......@@ -4087,7 +4088,8 @@ TEST(EnsureAllocationSiteDependentCodesProcessed) {
// longer referred to by dependent_code().
DependentCode::GroupStartIndexes starts(site->dependent_code());
int index = starts.at(DependentCode::kAllocationSiteTransitionChangedGroup);
CHECK(!(site->dependent_code()->is_code_at(index)));
CHECK(site->dependent_code()->object_at(index)->IsWeakCell() &&
WeakCell::cast(site->dependent_code()->object_at(index))->cleared());
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment