Commit a99fe1fd authored by jochen's avatar jochen Committed by Commit bot

Revert of Reland "Keep track of array buffers in new space separately"...

Revert of Reland "Keep track of array buffers in new space separately" (patchset #2 id:20001 of https://codereview.chromium.org/1177083003/)

Reason for revert:
Still broken

Original issue's description:
> Reland "Keep track of array buffers in new space separately"
>
> Original review https://codereview.chromium.org/1133773002/
>
> BUG=v8:3996
> TBR=hpayer@chromium.org
> LOG=n
>
> Committed: https://crrev.com/89b9a2cfb317e52186f682c91502b22932d52db3
> Cr-Commit-Position: refs/heads/master@{#28987}

TBR=hpayer@chromium.org
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=v8:3996

Review URL: https://codereview.chromium.org/1186613007

Cr-Commit-Position: refs/heads/master@{#29009}
parent 028025f0
...@@ -6560,8 +6560,7 @@ v8::ArrayBuffer::Contents v8::ArrayBuffer::Externalize() { ...@@ -6560,8 +6560,7 @@ v8::ArrayBuffer::Contents v8::ArrayBuffer::Externalize() {
Utils::ApiCheck(!self->is_external(), "v8::ArrayBuffer::Externalize", Utils::ApiCheck(!self->is_external(), "v8::ArrayBuffer::Externalize",
"ArrayBuffer already externalized"); "ArrayBuffer already externalized");
self->set_is_external(true); self->set_is_external(true);
isolate->heap()->UnregisterArrayBuffer(isolate->heap()->InNewSpace(*self), isolate->heap()->UnregisterArrayBuffer(self->backing_store());
self->backing_store());
return GetContents(); return GetContents();
} }
...@@ -6768,8 +6767,7 @@ v8::SharedArrayBuffer::Contents v8::SharedArrayBuffer::Externalize() { ...@@ -6768,8 +6767,7 @@ v8::SharedArrayBuffer::Contents v8::SharedArrayBuffer::Externalize() {
Utils::ApiCheck(!self->is_external(), "v8::SharedArrayBuffer::Externalize", Utils::ApiCheck(!self->is_external(), "v8::SharedArrayBuffer::Externalize",
"SharedArrayBuffer already externalized"); "SharedArrayBuffer already externalized");
self->set_is_external(true); self->set_is_external(true);
isolate->heap()->UnregisterArrayBuffer(isolate->heap()->InNewSpace(*self), isolate->heap()->UnregisterArrayBuffer(self->backing_store());
self->backing_store());
return GetContents(); return GetContents();
} }
......
...@@ -1623,8 +1623,6 @@ void Heap::Scavenge() { ...@@ -1623,8 +1623,6 @@ void Heap::Scavenge() {
SelectScavengingVisitorsTable(); SelectScavengingVisitorsTable();
PrepareArrayBufferDiscoveryInNewSpace();
// Flip the semispaces. After flipping, to space is empty, from space has // Flip the semispaces. After flipping, to space is empty, from space has
// live objects. // live objects.
new_space_.Flip(); new_space_.Flip();
...@@ -1706,8 +1704,6 @@ void Heap::Scavenge() { ...@@ -1706,8 +1704,6 @@ void Heap::Scavenge() {
new_space_.LowerInlineAllocationLimit( new_space_.LowerInlineAllocationLimit(
new_space_.inline_allocation_limit_step()); new_space_.inline_allocation_limit_step());
FreeDeadArrayBuffers(true);
// Update how much has survived scavenge. // Update how much has survived scavenge.
IncrementYoungSurvivorsCounter(static_cast<int>( IncrementYoungSurvivorsCounter(static_cast<int>(
(PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size())); (PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size()));
...@@ -1801,122 +1797,46 @@ void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) { ...@@ -1801,122 +1797,46 @@ void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) {
} }
void Heap::RegisterNewArrayBufferHelper(std::map<void*, size_t>& live_buffers, void Heap::RegisterNewArrayBuffer(void* data, size_t length) {
void* data, size_t length) {
live_buffers[data] = length;
}
void Heap::UnregisterArrayBufferHelper(
std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers, void* data) {
DCHECK(live_buffers.count(data) > 0);
live_buffers.erase(data);
not_yet_discovered_buffers.erase(data);
}
void Heap::RegisterLiveArrayBufferHelper(
std::map<void*, size_t>& not_yet_discovered_buffers, void* data) {
not_yet_discovered_buffers.erase(data);
}
size_t Heap::FreeDeadArrayBuffersHelper(
Isolate* isolate, std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers) {
size_t freed_memory = 0;
for (auto buffer = not_yet_discovered_buffers.begin();
buffer != not_yet_discovered_buffers.end(); ++buffer) {
isolate->array_buffer_allocator()->Free(buffer->first, buffer->second);
freed_memory += buffer->second;
live_buffers.erase(buffer->first);
}
not_yet_discovered_buffers = live_buffers;
return freed_memory;
}
void Heap::TearDownArrayBuffersHelper(
Isolate* isolate, std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers) {
for (auto buffer = live_buffers.begin(); buffer != live_buffers.end();
++buffer) {
isolate->array_buffer_allocator()->Free(buffer->first, buffer->second);
}
live_buffers.clear();
not_yet_discovered_buffers.clear();
}
void Heap::RegisterNewArrayBuffer(bool in_new_space, void* data,
size_t length) {
if (!data) return; if (!data) return;
RegisterNewArrayBufferHelper( live_array_buffers_[data] = length;
in_new_space ? live_new_array_buffers_ : live_array_buffers_, data,
length);
reinterpret_cast<v8::Isolate*>(isolate_) reinterpret_cast<v8::Isolate*>(isolate_)
->AdjustAmountOfExternalAllocatedMemory(length); ->AdjustAmountOfExternalAllocatedMemory(length);
} }
void Heap::UnregisterArrayBuffer(bool in_new_space, void* data) { void Heap::UnregisterArrayBuffer(void* data) {
if (!data) return; if (!data) return;
UnregisterArrayBufferHelper( DCHECK(live_array_buffers_.count(data) > 0);
in_new_space ? live_new_array_buffers_ : live_array_buffers_, live_array_buffers_.erase(data);
in_new_space ? not_yet_discovered_new_array_buffers_ not_yet_discovered_array_buffers_.erase(data);
: not_yet_discovered_array_buffers_,
data);
} }
void Heap::RegisterLiveArrayBuffer(bool in_new_space, void* data) { void Heap::RegisterLiveArrayBuffer(void* data) {
// ArrayBuffer might be in the middle of being constructed. not_yet_discovered_array_buffers_.erase(data);
if (data == undefined_value()) return;
RegisterLiveArrayBufferHelper(in_new_space
? not_yet_discovered_new_array_buffers_
: not_yet_discovered_array_buffers_,
data);
} }
void Heap::FreeDeadArrayBuffers(bool in_new_space) { void Heap::FreeDeadArrayBuffers() {
size_t freed_memory = FreeDeadArrayBuffersHelper( for (auto buffer = not_yet_discovered_array_buffers_.begin();
isolate_, in_new_space ? live_new_array_buffers_ : live_array_buffers_, buffer != not_yet_discovered_array_buffers_.end(); ++buffer) {
in_new_space ? not_yet_discovered_new_array_buffers_ isolate_->array_buffer_allocator()->Free(buffer->first, buffer->second);
: not_yet_discovered_array_buffers_); // Don't use the API method here since this could trigger another GC.
if (freed_memory) { amount_of_external_allocated_memory_ -= buffer->second;
reinterpret_cast<v8::Isolate*>(isolate_) live_array_buffers_.erase(buffer->first);
->AdjustAmountOfExternalAllocatedMemory(
-static_cast<int64_t>(freed_memory));
} }
not_yet_discovered_array_buffers_ = live_array_buffers_;
} }
void Heap::TearDownArrayBuffers() { void Heap::TearDownArrayBuffers() {
TearDownArrayBuffersHelper(isolate_, live_array_buffers_, for (auto buffer = live_array_buffers_.begin();
not_yet_discovered_array_buffers_); buffer != live_array_buffers_.end(); ++buffer) {
TearDownArrayBuffersHelper(isolate_, live_new_array_buffers_, isolate_->array_buffer_allocator()->Free(buffer->first, buffer->second);
not_yet_discovered_new_array_buffers_); }
} live_array_buffers_.clear();
not_yet_discovered_array_buffers_.clear();
void Heap::PrepareArrayBufferDiscoveryInNewSpace() {
not_yet_discovered_new_array_buffers_ = live_new_array_buffers_;
}
void Heap::PromoteArrayBuffer(Object* obj) {
JSArrayBuffer* buffer = JSArrayBuffer::cast(obj);
if (buffer->is_external()) return;
void* data = buffer->backing_store();
if (!data) return;
// ArrayBuffer might be in the middle of being constructed.
if (data == undefined_value()) return;
DCHECK(live_new_array_buffers_.count(data) > 0);
live_array_buffers_[data] = live_new_array_buffers_[data];
live_new_array_buffers_.erase(data);
not_yet_discovered_new_array_buffers_.erase(data);
} }
...@@ -2169,7 +2089,6 @@ class ScavengingVisitor : public StaticVisitorBase { ...@@ -2169,7 +2089,6 @@ class ScavengingVisitor : public StaticVisitorBase {
table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray); table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray);
table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray); table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray);
table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array); table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array);
table_.Register(kVisitJSArrayBuffer, &EvacuateJSArrayBuffer);
table_.Register( table_.Register(
kVisitNativeContext, kVisitNativeContext,
...@@ -2199,6 +2118,9 @@ class ScavengingVisitor : public StaticVisitorBase { ...@@ -2199,6 +2118,9 @@ class ScavengingVisitor : public StaticVisitorBase {
table_.Register(kVisitJSWeakCollection, table_.Register(kVisitJSWeakCollection,
&ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
table_.Register(kVisitJSArrayBuffer,
&ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
table_.Register(kVisitJSTypedArray, table_.Register(kVisitJSTypedArray,
&ObjectEvacuationStrategy<POINTER_OBJECT>::Visit); &ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
...@@ -2426,18 +2348,6 @@ class ScavengingVisitor : public StaticVisitorBase { ...@@ -2426,18 +2348,6 @@ class ScavengingVisitor : public StaticVisitorBase {
} }
static inline void EvacuateJSArrayBuffer(Map* map, HeapObject** slot,
HeapObject* object) {
ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object);
Heap* heap = map->GetHeap();
MapWord map_word = object->map_word();
DCHECK(map_word.IsForwardingAddress());
HeapObject* target = map_word.ToForwardingAddress();
if (!heap->InNewSpace(target)) heap->PromoteArrayBuffer(target);
}
static inline void EvacuateByteArray(Map* map, HeapObject** slot, static inline void EvacuateByteArray(Map* map, HeapObject** slot,
HeapObject* object) { HeapObject* object) {
int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize(); int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
......
...@@ -1567,28 +1567,10 @@ class Heap { ...@@ -1567,28 +1567,10 @@ class Heap {
bool deserialization_complete() const { return deserialization_complete_; } bool deserialization_complete() const { return deserialization_complete_; }
// The following methods are used to track raw C++ pointers to externally void RegisterNewArrayBuffer(void* data, size_t length);
// allocated memory used as backing store in live array buffers. void UnregisterArrayBuffer(void* data);
void RegisterLiveArrayBuffer(void* data);
// A new ArrayBuffer was created with |data| as backing store. void FreeDeadArrayBuffers();
void RegisterNewArrayBuffer(bool in_new_space, void* data, size_t length);
// The backing store |data| is no longer owned by V8.
void UnregisterArrayBuffer(bool in_new_space, void* data);
// A live ArrayBuffer was discovered during marking/scavenge.
void RegisterLiveArrayBuffer(bool in_new_space, void* data);
// Frees all backing store pointers that weren't discovered in the previous
// marking or scavenge phase.
void FreeDeadArrayBuffers(bool in_new_space);
// Prepare for a new scavenge phase. A new marking phase is implicitly
// prepared by finishing the previous one.
void PrepareArrayBufferDiscoveryInNewSpace();
// An ArrayBuffer moved from new space to old space.
void PromoteArrayBuffer(Object* buffer);
protected: protected:
// Methods made available to tests. // Methods made available to tests.
...@@ -2092,24 +2074,9 @@ class Heap { ...@@ -2092,24 +2074,9 @@ class Heap {
// the old space. // the old space.
void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc); void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc);
// Called on heap tear-down. Frees all remaining ArrayBuffer backing stores. // Called on heap tear-down.
void TearDownArrayBuffers(); void TearDownArrayBuffers();
// These correspond to the non-Helper versions.
void RegisterNewArrayBufferHelper(std::map<void*, size_t>& live_buffers,
void* data, size_t length);
void UnregisterArrayBufferHelper(
std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers, void* data);
void RegisterLiveArrayBufferHelper(
std::map<void*, size_t>& not_yet_discovered_buffers, void* data);
size_t FreeDeadArrayBuffersHelper(
Isolate* isolate, std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers);
void TearDownArrayBuffersHelper(
Isolate* isolate, std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers);
// Record statistics before and after garbage collection. // Record statistics before and after garbage collection.
void ReportStatisticsBeforeGC(); void ReportStatisticsBeforeGC();
void ReportStatisticsAfterGC(); void ReportStatisticsAfterGC();
...@@ -2352,9 +2319,7 @@ class Heap { ...@@ -2352,9 +2319,7 @@ class Heap {
bool concurrent_sweeping_enabled_; bool concurrent_sweeping_enabled_;
std::map<void*, size_t> live_array_buffers_; std::map<void*, size_t> live_array_buffers_;
std::map<void*, size_t> live_new_array_buffers_;
std::map<void*, size_t> not_yet_discovered_array_buffers_; std::map<void*, size_t> not_yet_discovered_array_buffers_;
std::map<void*, size_t> not_yet_discovered_new_array_buffers_;
struct StrongRootsList; struct StrongRootsList;
StrongRootsList* strong_roots_list_; StrongRootsList* strong_roots_list_;
......
...@@ -3041,10 +3041,6 @@ bool MarkCompactCollector::TryPromoteObject(HeapObject* object, ...@@ -3041,10 +3041,6 @@ bool MarkCompactCollector::TryPromoteObject(HeapObject* object,
AllocationResult allocation = old_space->AllocateRaw(object_size, alignment); AllocationResult allocation = old_space->AllocateRaw(object_size, alignment);
if (allocation.To(&target)) { if (allocation.To(&target)) {
MigrateObject(target, object, object_size, old_space->identity()); MigrateObject(target, object, object_size, old_space->identity());
// If we end up needing more special cases, we should factor this out.
if (V8_UNLIKELY(target->IsJSArrayBuffer())) {
heap()->PromoteArrayBuffer(target);
}
heap()->IncrementPromotedObjectsSize(object_size); heap()->IncrementPromotedObjectsSize(object_size);
return true; return true;
} }
...@@ -4371,6 +4367,7 @@ void MarkCompactCollector::SweepSpaces() { ...@@ -4371,6 +4367,7 @@ void MarkCompactCollector::SweepSpaces() {
#ifdef DEBUG #ifdef DEBUG
state_ = SWEEP_SPACES; state_ = SWEEP_SPACES;
#endif #endif
heap()->FreeDeadArrayBuffers();
MoveEvacuationCandidatesToEndOfPagesList(); MoveEvacuationCandidatesToEndOfPagesList();
...@@ -4398,8 +4395,6 @@ void MarkCompactCollector::SweepSpaces() { ...@@ -4398,8 +4395,6 @@ void MarkCompactCollector::SweepSpaces() {
EvacuateNewSpaceAndCandidates(); EvacuateNewSpaceAndCandidates();
heap()->FreeDeadArrayBuffers(false);
// ClearNonLiveReferences depends on precise sweeping of map space to // ClearNonLiveReferences depends on precise sweeping of map space to
// detect whether unmarked map became dead in this collection or in one // detect whether unmarked map became dead in this collection or in one
// of the previous ones. // of the previous ones.
......
...@@ -85,10 +85,6 @@ int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer( ...@@ -85,10 +85,6 @@ int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
heap, heap,
HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset), HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields)); HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
if (!JSArrayBuffer::cast(object)->is_external()) {
heap->RegisterLiveArrayBuffer(true,
JSArrayBuffer::cast(object)->backing_store());
}
return JSArrayBuffer::kSizeWithInternalFields; return JSArrayBuffer::kSizeWithInternalFields;
} }
...@@ -508,8 +504,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer( ...@@ -508,8 +504,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset), HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields)); HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
if (!JSArrayBuffer::cast(object)->is_external()) { if (!JSArrayBuffer::cast(object)->is_external()) {
heap->RegisterLiveArrayBuffer(heap->InNewSpace(object), heap->RegisterLiveArrayBuffer(JSArrayBuffer::cast(object)->backing_store());
JSArrayBuffer::cast(object)->backing_store());
} }
} }
......
...@@ -16470,8 +16470,7 @@ Handle<JSArrayBuffer> JSTypedArray::MaterializeArrayBuffer( ...@@ -16470,8 +16470,7 @@ Handle<JSArrayBuffer> JSTypedArray::MaterializeArrayBuffer(
void* backing_store = void* backing_store =
isolate->array_buffer_allocator()->AllocateUninitialized( isolate->array_buffer_allocator()->AllocateUninitialized(
fixed_typed_array->DataSize()); fixed_typed_array->DataSize());
isolate->heap()->RegisterNewArrayBuffer(isolate->heap()->InNewSpace(*buffer), isolate->heap()->RegisterNewArrayBuffer(backing_store,
backing_store,
fixed_typed_array->DataSize()); fixed_typed_array->DataSize());
buffer->set_backing_store(backing_store); buffer->set_backing_store(backing_store);
buffer->set_is_external(false); buffer->set_is_external(false);
......
...@@ -34,8 +34,7 @@ void Runtime::SetupArrayBuffer(Isolate* isolate, ...@@ -34,8 +34,7 @@ void Runtime::SetupArrayBuffer(Isolate* isolate,
array_buffer->set_byte_length(*byte_length); array_buffer->set_byte_length(*byte_length);
if (data && !is_external) { if (data && !is_external) {
isolate->heap()->RegisterNewArrayBuffer( isolate->heap()->RegisterNewArrayBuffer(data, allocated_length);
isolate->heap()->InNewSpace(*array_buffer), data, allocated_length);
} }
} }
...@@ -151,8 +150,7 @@ RUNTIME_FUNCTION(Runtime_ArrayBufferNeuter) { ...@@ -151,8 +150,7 @@ RUNTIME_FUNCTION(Runtime_ArrayBufferNeuter) {
size_t byte_length = NumberToSize(isolate, array_buffer->byte_length()); size_t byte_length = NumberToSize(isolate, array_buffer->byte_length());
array_buffer->set_is_external(true); array_buffer->set_is_external(true);
Runtime::NeuterArrayBuffer(array_buffer); Runtime::NeuterArrayBuffer(array_buffer);
isolate->heap()->UnregisterArrayBuffer( isolate->heap()->UnregisterArrayBuffer(backing_store);
isolate->heap()->InNewSpace(*array_buffer), backing_store);
isolate->array_buffer_allocator()->Free(backing_store, byte_length); isolate->array_buffer_allocator()->Free(backing_store, byte_length);
return isolate->heap()->undefined_value(); return isolate->heap()->undefined_value();
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment