Commit a99fe1fd authored by jochen's avatar jochen Committed by Commit bot

Revert of Reland "Keep track of array buffers in new space separately"...

Revert of Reland "Keep track of array buffers in new space separately" (patchset #2 id:20001 of https://codereview.chromium.org/1177083003/)

Reason for revert:
Still broken

Original issue's description:
> Reland "Keep track of array buffers in new space separately"
>
> Original review https://codereview.chromium.org/1133773002/
>
> BUG=v8:3996
> TBR=hpayer@chromium.org
> LOG=n
>
> Committed: https://crrev.com/89b9a2cfb317e52186f682c91502b22932d52db3
> Cr-Commit-Position: refs/heads/master@{#28987}

TBR=hpayer@chromium.org
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=v8:3996

Review URL: https://codereview.chromium.org/1186613007

Cr-Commit-Position: refs/heads/master@{#29009}
parent 028025f0
......@@ -6560,8 +6560,7 @@ v8::ArrayBuffer::Contents v8::ArrayBuffer::Externalize() {
Utils::ApiCheck(!self->is_external(), "v8::ArrayBuffer::Externalize",
"ArrayBuffer already externalized");
self->set_is_external(true);
isolate->heap()->UnregisterArrayBuffer(isolate->heap()->InNewSpace(*self),
self->backing_store());
isolate->heap()->UnregisterArrayBuffer(self->backing_store());
return GetContents();
}
......@@ -6768,8 +6767,7 @@ v8::SharedArrayBuffer::Contents v8::SharedArrayBuffer::Externalize() {
Utils::ApiCheck(!self->is_external(), "v8::SharedArrayBuffer::Externalize",
"SharedArrayBuffer already externalized");
self->set_is_external(true);
isolate->heap()->UnregisterArrayBuffer(isolate->heap()->InNewSpace(*self),
self->backing_store());
isolate->heap()->UnregisterArrayBuffer(self->backing_store());
return GetContents();
}
......
......@@ -1623,8 +1623,6 @@ void Heap::Scavenge() {
SelectScavengingVisitorsTable();
PrepareArrayBufferDiscoveryInNewSpace();
// Flip the semispaces. After flipping, to space is empty, from space has
// live objects.
new_space_.Flip();
......@@ -1706,8 +1704,6 @@ void Heap::Scavenge() {
new_space_.LowerInlineAllocationLimit(
new_space_.inline_allocation_limit_step());
FreeDeadArrayBuffers(true);
// Update how much has survived scavenge.
IncrementYoungSurvivorsCounter(static_cast<int>(
(PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size()));
......@@ -1801,122 +1797,46 @@ void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) {
}
void Heap::RegisterNewArrayBufferHelper(std::map<void*, size_t>& live_buffers,
void* data, size_t length) {
live_buffers[data] = length;
}
void Heap::UnregisterArrayBufferHelper(
std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers, void* data) {
DCHECK(live_buffers.count(data) > 0);
live_buffers.erase(data);
not_yet_discovered_buffers.erase(data);
}
void Heap::RegisterLiveArrayBufferHelper(
std::map<void*, size_t>& not_yet_discovered_buffers, void* data) {
not_yet_discovered_buffers.erase(data);
}
size_t Heap::FreeDeadArrayBuffersHelper(
Isolate* isolate, std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers) {
size_t freed_memory = 0;
for (auto buffer = not_yet_discovered_buffers.begin();
buffer != not_yet_discovered_buffers.end(); ++buffer) {
isolate->array_buffer_allocator()->Free(buffer->first, buffer->second);
freed_memory += buffer->second;
live_buffers.erase(buffer->first);
}
not_yet_discovered_buffers = live_buffers;
return freed_memory;
}
void Heap::TearDownArrayBuffersHelper(
Isolate* isolate, std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers) {
for (auto buffer = live_buffers.begin(); buffer != live_buffers.end();
++buffer) {
isolate->array_buffer_allocator()->Free(buffer->first, buffer->second);
}
live_buffers.clear();
not_yet_discovered_buffers.clear();
}
void Heap::RegisterNewArrayBuffer(bool in_new_space, void* data,
size_t length) {
void Heap::RegisterNewArrayBuffer(void* data, size_t length) {
if (!data) return;
RegisterNewArrayBufferHelper(
in_new_space ? live_new_array_buffers_ : live_array_buffers_, data,
length);
live_array_buffers_[data] = length;
reinterpret_cast<v8::Isolate*>(isolate_)
->AdjustAmountOfExternalAllocatedMemory(length);
}
void Heap::UnregisterArrayBuffer(bool in_new_space, void* data) {
void Heap::UnregisterArrayBuffer(void* data) {
if (!data) return;
UnregisterArrayBufferHelper(
in_new_space ? live_new_array_buffers_ : live_array_buffers_,
in_new_space ? not_yet_discovered_new_array_buffers_
: not_yet_discovered_array_buffers_,
data);
DCHECK(live_array_buffers_.count(data) > 0);
live_array_buffers_.erase(data);
not_yet_discovered_array_buffers_.erase(data);
}
void Heap::RegisterLiveArrayBuffer(bool in_new_space, void* data) {
// ArrayBuffer might be in the middle of being constructed.
if (data == undefined_value()) return;
RegisterLiveArrayBufferHelper(in_new_space
? not_yet_discovered_new_array_buffers_
: not_yet_discovered_array_buffers_,
data);
void Heap::RegisterLiveArrayBuffer(void* data) {
not_yet_discovered_array_buffers_.erase(data);
}
void Heap::FreeDeadArrayBuffers(bool in_new_space) {
size_t freed_memory = FreeDeadArrayBuffersHelper(
isolate_, in_new_space ? live_new_array_buffers_ : live_array_buffers_,
in_new_space ? not_yet_discovered_new_array_buffers_
: not_yet_discovered_array_buffers_);
if (freed_memory) {
reinterpret_cast<v8::Isolate*>(isolate_)
->AdjustAmountOfExternalAllocatedMemory(
-static_cast<int64_t>(freed_memory));
void Heap::FreeDeadArrayBuffers() {
for (auto buffer = not_yet_discovered_array_buffers_.begin();
buffer != not_yet_discovered_array_buffers_.end(); ++buffer) {
isolate_->array_buffer_allocator()->Free(buffer->first, buffer->second);
// Don't use the API method here since this could trigger another GC.
amount_of_external_allocated_memory_ -= buffer->second;
live_array_buffers_.erase(buffer->first);
}
not_yet_discovered_array_buffers_ = live_array_buffers_;
}
void Heap::TearDownArrayBuffers() {
TearDownArrayBuffersHelper(isolate_, live_array_buffers_,
not_yet_discovered_array_buffers_);
TearDownArrayBuffersHelper(isolate_, live_new_array_buffers_,
not_yet_discovered_new_array_buffers_);
}
void Heap::PrepareArrayBufferDiscoveryInNewSpace() {
not_yet_discovered_new_array_buffers_ = live_new_array_buffers_;
}
void Heap::PromoteArrayBuffer(Object* obj) {
JSArrayBuffer* buffer = JSArrayBuffer::cast(obj);
if (buffer->is_external()) return;
void* data = buffer->backing_store();
if (!data) return;
// ArrayBuffer might be in the middle of being constructed.
if (data == undefined_value()) return;
DCHECK(live_new_array_buffers_.count(data) > 0);
live_array_buffers_[data] = live_new_array_buffers_[data];
live_new_array_buffers_.erase(data);
not_yet_discovered_new_array_buffers_.erase(data);
for (auto buffer = live_array_buffers_.begin();
buffer != live_array_buffers_.end(); ++buffer) {
isolate_->array_buffer_allocator()->Free(buffer->first, buffer->second);
}
live_array_buffers_.clear();
not_yet_discovered_array_buffers_.clear();
}
......@@ -2169,7 +2089,6 @@ class ScavengingVisitor : public StaticVisitorBase {
table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray);
table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray);
table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array);
table_.Register(kVisitJSArrayBuffer, &EvacuateJSArrayBuffer);
table_.Register(
kVisitNativeContext,
......@@ -2199,6 +2118,9 @@ class ScavengingVisitor : public StaticVisitorBase {
table_.Register(kVisitJSWeakCollection,
&ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
table_.Register(kVisitJSArrayBuffer,
&ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
table_.Register(kVisitJSTypedArray,
&ObjectEvacuationStrategy<POINTER_OBJECT>::Visit);
......@@ -2426,18 +2348,6 @@ class ScavengingVisitor : public StaticVisitorBase {
}
static inline void EvacuateJSArrayBuffer(Map* map, HeapObject** slot,
HeapObject* object) {
ObjectEvacuationStrategy<POINTER_OBJECT>::Visit(map, slot, object);
Heap* heap = map->GetHeap();
MapWord map_word = object->map_word();
DCHECK(map_word.IsForwardingAddress());
HeapObject* target = map_word.ToForwardingAddress();
if (!heap->InNewSpace(target)) heap->PromoteArrayBuffer(target);
}
static inline void EvacuateByteArray(Map* map, HeapObject** slot,
HeapObject* object) {
int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
......
......@@ -1567,28 +1567,10 @@ class Heap {
bool deserialization_complete() const { return deserialization_complete_; }
// The following methods are used to track raw C++ pointers to externally
// allocated memory used as backing store in live array buffers.
// A new ArrayBuffer was created with |data| as backing store.
void RegisterNewArrayBuffer(bool in_new_space, void* data, size_t length);
// The backing store |data| is no longer owned by V8.
void UnregisterArrayBuffer(bool in_new_space, void* data);
// A live ArrayBuffer was discovered during marking/scavenge.
void RegisterLiveArrayBuffer(bool in_new_space, void* data);
// Frees all backing store pointers that weren't discovered in the previous
// marking or scavenge phase.
void FreeDeadArrayBuffers(bool in_new_space);
// Prepare for a new scavenge phase. A new marking phase is implicitly
// prepared by finishing the previous one.
void PrepareArrayBufferDiscoveryInNewSpace();
// An ArrayBuffer moved from new space to old space.
void PromoteArrayBuffer(Object* buffer);
void RegisterNewArrayBuffer(void* data, size_t length);
void UnregisterArrayBuffer(void* data);
void RegisterLiveArrayBuffer(void* data);
void FreeDeadArrayBuffers();
protected:
// Methods made available to tests.
......@@ -2092,24 +2074,9 @@ class Heap {
// the old space.
void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc);
// Called on heap tear-down. Frees all remaining ArrayBuffer backing stores.
// Called on heap tear-down.
void TearDownArrayBuffers();
// These correspond to the non-Helper versions.
void RegisterNewArrayBufferHelper(std::map<void*, size_t>& live_buffers,
void* data, size_t length);
void UnregisterArrayBufferHelper(
std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers, void* data);
void RegisterLiveArrayBufferHelper(
std::map<void*, size_t>& not_yet_discovered_buffers, void* data);
size_t FreeDeadArrayBuffersHelper(
Isolate* isolate, std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers);
void TearDownArrayBuffersHelper(
Isolate* isolate, std::map<void*, size_t>& live_buffers,
std::map<void*, size_t>& not_yet_discovered_buffers);
// Record statistics before and after garbage collection.
void ReportStatisticsBeforeGC();
void ReportStatisticsAfterGC();
......@@ -2352,9 +2319,7 @@ class Heap {
bool concurrent_sweeping_enabled_;
std::map<void*, size_t> live_array_buffers_;
std::map<void*, size_t> live_new_array_buffers_;
std::map<void*, size_t> not_yet_discovered_array_buffers_;
std::map<void*, size_t> not_yet_discovered_new_array_buffers_;
struct StrongRootsList;
StrongRootsList* strong_roots_list_;
......
......@@ -3041,10 +3041,6 @@ bool MarkCompactCollector::TryPromoteObject(HeapObject* object,
AllocationResult allocation = old_space->AllocateRaw(object_size, alignment);
if (allocation.To(&target)) {
MigrateObject(target, object, object_size, old_space->identity());
// If we end up needing more special cases, we should factor this out.
if (V8_UNLIKELY(target->IsJSArrayBuffer())) {
heap()->PromoteArrayBuffer(target);
}
heap()->IncrementPromotedObjectsSize(object_size);
return true;
}
......@@ -4371,6 +4367,7 @@ void MarkCompactCollector::SweepSpaces() {
#ifdef DEBUG
state_ = SWEEP_SPACES;
#endif
heap()->FreeDeadArrayBuffers();
MoveEvacuationCandidatesToEndOfPagesList();
......@@ -4398,8 +4395,6 @@ void MarkCompactCollector::SweepSpaces() {
EvacuateNewSpaceAndCandidates();
heap()->FreeDeadArrayBuffers(false);
// ClearNonLiveReferences depends on precise sweeping of map space to
// detect whether unmarked map became dead in this collection or in one
// of the previous ones.
......
......@@ -85,10 +85,6 @@ int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
heap,
HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
if (!JSArrayBuffer::cast(object)->is_external()) {
heap->RegisterLiveArrayBuffer(true,
JSArrayBuffer::cast(object)->backing_store());
}
return JSArrayBuffer::kSizeWithInternalFields;
}
......@@ -508,8 +504,7 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
if (!JSArrayBuffer::cast(object)->is_external()) {
heap->RegisterLiveArrayBuffer(heap->InNewSpace(object),
JSArrayBuffer::cast(object)->backing_store());
heap->RegisterLiveArrayBuffer(JSArrayBuffer::cast(object)->backing_store());
}
}
......
......@@ -16470,8 +16470,7 @@ Handle<JSArrayBuffer> JSTypedArray::MaterializeArrayBuffer(
void* backing_store =
isolate->array_buffer_allocator()->AllocateUninitialized(
fixed_typed_array->DataSize());
isolate->heap()->RegisterNewArrayBuffer(isolate->heap()->InNewSpace(*buffer),
backing_store,
isolate->heap()->RegisterNewArrayBuffer(backing_store,
fixed_typed_array->DataSize());
buffer->set_backing_store(backing_store);
buffer->set_is_external(false);
......
......@@ -34,8 +34,7 @@ void Runtime::SetupArrayBuffer(Isolate* isolate,
array_buffer->set_byte_length(*byte_length);
if (data && !is_external) {
isolate->heap()->RegisterNewArrayBuffer(
isolate->heap()->InNewSpace(*array_buffer), data, allocated_length);
isolate->heap()->RegisterNewArrayBuffer(data, allocated_length);
}
}
......@@ -151,8 +150,7 @@ RUNTIME_FUNCTION(Runtime_ArrayBufferNeuter) {
size_t byte_length = NumberToSize(isolate, array_buffer->byte_length());
array_buffer->set_is_external(true);
Runtime::NeuterArrayBuffer(array_buffer);
isolate->heap()->UnregisterArrayBuffer(
isolate->heap()->InNewSpace(*array_buffer), backing_store);
isolate->heap()->UnregisterArrayBuffer(backing_store);
isolate->array_buffer_allocator()->Free(backing_store, byte_length);
return isolate->heap()->undefined_value();
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment