Commit 3e25666c authored by machenbach's avatar machenbach Committed by Commit bot

Revert of Remove the weak list of array buffers (patchset #8 id:140001 of...

Revert of Remove the weak list of array buffers (patchset #8 id:140001 of https://codereview.chromium.org/1114563002/)

Reason for revert:
[Sheriff] Crashes in layout tests:
https://chromegw.corp.google.com/i/client.v8/builders/V8-Blink%20Linux%2064%20%28dbg%29/builds/2668

Original issue's description:
> Remove the weak list of array buffers
>
> Instead, collect live array buffers during marking and free pointers we
> no longer found.
>
> BUG=v8:3996
> R=hpayer@chromium.org
> LOG=n
>
> Committed: https://crrev.com/2d39709cf5ee17637f6f2d75380a9e61ae0b342b
> Cr-Commit-Position: refs/heads/master@{#28132}

TBR=dslomov@chromium.org,hpayer@chromium.org,jochen@chromium.org
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=v8:3996

Review URL: https://codereview.chromium.org/1115043005

Cr-Commit-Position: refs/heads/master@{#28148}
parent cf420ec3
...@@ -6254,12 +6254,9 @@ bool v8::ArrayBuffer::IsNeuterable() const { ...@@ -6254,12 +6254,9 @@ bool v8::ArrayBuffer::IsNeuterable() const {
v8::ArrayBuffer::Contents v8::ArrayBuffer::Externalize() { v8::ArrayBuffer::Contents v8::ArrayBuffer::Externalize() {
i::Handle<i::JSArrayBuffer> self = Utils::OpenHandle(this); i::Handle<i::JSArrayBuffer> self = Utils::OpenHandle(this);
i::Isolate* isolate = self->GetIsolate();
Utils::ApiCheck(!self->is_external(), "v8::ArrayBuffer::Externalize", Utils::ApiCheck(!self->is_external(), "v8::ArrayBuffer::Externalize",
"ArrayBuffer already externalized"); "ArrayBuffer already externalized");
self->set_is_external(true); self->set_is_external(true);
isolate->heap()->UnregisterArrayBuffer(self->backing_store());
return GetContents(); return GetContents();
} }
......
...@@ -1566,6 +1566,8 @@ class JSArrayBufferDataEntryAllocator : public HeapEntriesAllocator { ...@@ -1566,6 +1566,8 @@ class JSArrayBufferDataEntryAllocator : public HeapEntriesAllocator {
void V8HeapExplorer::ExtractJSArrayBufferReferences( void V8HeapExplorer::ExtractJSArrayBufferReferences(
int entry, JSArrayBuffer* buffer) { int entry, JSArrayBuffer* buffer) {
SetWeakReference(buffer, entry, "weak_next", buffer->weak_next(),
JSArrayBuffer::kWeakNextOffset);
// Setup a reference to a native memory backing_store object. // Setup a reference to a native memory backing_store object.
if (!buffer->backing_store()) if (!buffer->backing_store())
return; return;
......
...@@ -154,6 +154,8 @@ Heap::Heap() ...@@ -154,6 +154,8 @@ Heap::Heap()
memset(roots_, 0, sizeof(roots_[0]) * kRootListLength); memset(roots_, 0, sizeof(roots_[0]) * kRootListLength);
set_native_contexts_list(NULL); set_native_contexts_list(NULL);
set_array_buffers_list(Smi::FromInt(0));
set_last_array_buffer_in_list(Smi::FromInt(0));
set_allocation_sites_list(Smi::FromInt(0)); set_allocation_sites_list(Smi::FromInt(0));
set_encountered_weak_collections(Smi::FromInt(0)); set_encountered_weak_collections(Smi::FromInt(0));
set_encountered_weak_cells(Smi::FromInt(0)); set_encountered_weak_cells(Smi::FromInt(0));
...@@ -1705,67 +1707,64 @@ void Heap::UpdateReferencesInExternalStringTable( ...@@ -1705,67 +1707,64 @@ void Heap::UpdateReferencesInExternalStringTable(
void Heap::ProcessAllWeakReferences(WeakObjectRetainer* retainer) { void Heap::ProcessAllWeakReferences(WeakObjectRetainer* retainer) {
ProcessArrayBuffers(retainer, false);
ProcessNativeContexts(retainer); ProcessNativeContexts(retainer);
ProcessAllocationSites(retainer); ProcessAllocationSites(retainer);
} }
void Heap::ProcessYoungWeakReferences(WeakObjectRetainer* retainer) { void Heap::ProcessYoungWeakReferences(WeakObjectRetainer* retainer) {
ProcessArrayBuffers(retainer, true);
ProcessNativeContexts(retainer); ProcessNativeContexts(retainer);
} }
void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) { void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) {
Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer); Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer,
false, NULL);
// Update the head of the list of contexts. // Update the head of the list of contexts.
set_native_contexts_list(head); set_native_contexts_list(head);
} }
void Heap::RegisterNewArrayBuffer(void* data, size_t length) { void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer,
live_array_buffers_[data] = length; bool stop_after_young) {
reinterpret_cast<v8::Isolate*>(isolate_) Object* last_array_buffer = undefined_value();
->AdjustAmountOfExternalAllocatedMemory(length); Object* array_buffer_obj =
} VisitWeakList<JSArrayBuffer>(this, array_buffers_list(), retainer,
stop_after_young, &last_array_buffer);
set_array_buffers_list(array_buffer_obj);
void Heap::UnregisterArrayBuffer(void* data) { set_last_array_buffer_in_list(last_array_buffer);
DCHECK(live_array_buffers_.count(data) > 0);
live_array_buffers_.erase(data); // Verify invariant that young array buffers come before old array buffers
not_yet_discovered_array_buffers_.erase(data); // in array buffers list if there was no promotion failure.
} Object* undefined = undefined_value();
Object* next = array_buffers_list();
bool old_objects_recorded = false;
void Heap::RegisterLiveArrayBuffer(void* data) { while (next != undefined) {
not_yet_discovered_array_buffers_.erase(data); if (!old_objects_recorded) {
} old_objects_recorded = !InNewSpace(next);
}
CHECK((InNewSpace(next) && !old_objects_recorded) || !InNewSpace(next));
void Heap::FreeDeadArrayBuffers() { next = JSArrayBuffer::cast(next)->weak_next();
for (auto buffer = not_yet_discovered_array_buffers_.begin();
buffer != not_yet_discovered_array_buffers_.end(); ++buffer) {
isolate_->array_buffer_allocator()->Free(buffer->first, buffer->second);
// Don't use the API method here since this could trigger another GC.
amount_of_external_allocated_memory_ -= buffer->second;
live_array_buffers_.erase(buffer->first);
} }
not_yet_discovered_array_buffers_ = live_array_buffers_;
} }
void Heap::TearDownArrayBuffers() { void Heap::TearDownArrayBuffers() {
for (auto buffer = live_array_buffers_.begin(); Object* undefined = undefined_value();
buffer != live_array_buffers_.end(); ++buffer) { for (Object* o = array_buffers_list(); o != undefined;) {
isolate_->array_buffer_allocator()->Free(buffer->first, buffer->second); JSArrayBuffer* buffer = JSArrayBuffer::cast(o);
Runtime::FreeArrayBuffer(isolate(), buffer);
o = buffer->weak_next();
} }
live_array_buffers_.clear(); set_array_buffers_list(undefined);
not_yet_discovered_array_buffers_.clear();
} }
void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) { void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) {
Object* allocation_site_obj = Object* allocation_site_obj = VisitWeakList<AllocationSite>(
VisitWeakList<AllocationSite>(this, allocation_sites_list(), retainer); this, allocation_sites_list(), retainer, false, NULL);
set_allocation_sites_list(allocation_site_obj); set_allocation_sites_list(allocation_site_obj);
} }
...@@ -5411,6 +5410,8 @@ bool Heap::CreateHeapObjects() { ...@@ -5411,6 +5410,8 @@ bool Heap::CreateHeapObjects() {
CHECK_EQ(0u, gc_count_); CHECK_EQ(0u, gc_count_);
set_native_contexts_list(undefined_value()); set_native_contexts_list(undefined_value());
set_array_buffers_list(undefined_value());
set_last_array_buffer_in_list(undefined_value());
set_allocation_sites_list(undefined_value()); set_allocation_sites_list(undefined_value());
return true; return true;
} }
......
...@@ -6,7 +6,6 @@ ...@@ -6,7 +6,6 @@
#define V8_HEAP_HEAP_H_ #define V8_HEAP_HEAP_H_
#include <cmath> #include <cmath>
#include <map>
#include "src/allocation.h" #include "src/allocation.h"
#include "src/assert-scope.h" #include "src/assert-scope.h"
...@@ -875,6 +874,16 @@ class Heap { ...@@ -875,6 +874,16 @@ class Heap {
} }
Object* native_contexts_list() const { return native_contexts_list_; } Object* native_contexts_list() const { return native_contexts_list_; }
void set_array_buffers_list(Object* object) { array_buffers_list_ = object; }
Object* array_buffers_list() const { return array_buffers_list_; }
void set_last_array_buffer_in_list(Object* object) {
last_array_buffer_in_list_ = object;
}
Object* last_array_buffer_in_list() const {
return last_array_buffer_in_list_;
}
void set_allocation_sites_list(Object* object) { void set_allocation_sites_list(Object* object) {
allocation_sites_list_ = object; allocation_sites_list_ = object;
} }
...@@ -1481,11 +1490,6 @@ class Heap { ...@@ -1481,11 +1490,6 @@ class Heap {
bool deserialization_complete() const { return deserialization_complete_; } bool deserialization_complete() const { return deserialization_complete_; }
void RegisterNewArrayBuffer(void* data, size_t length);
void UnregisterArrayBuffer(void* data);
void RegisterLiveArrayBuffer(void* data);
void FreeDeadArrayBuffers();
protected: protected:
// Methods made available to tests. // Methods made available to tests.
...@@ -1656,6 +1660,8 @@ class Heap { ...@@ -1656,6 +1660,8 @@ class Heap {
// Weak list heads, threaded through the objects. // Weak list heads, threaded through the objects.
// List heads are initialized lazily and contain the undefined_value at start. // List heads are initialized lazily and contain the undefined_value at start.
Object* native_contexts_list_; Object* native_contexts_list_;
Object* array_buffers_list_;
Object* last_array_buffer_in_list_;
Object* allocation_sites_list_; Object* allocation_sites_list_;
// List of encountered weak collections (JSWeakMap and JSWeakSet) during // List of encountered weak collections (JSWeakMap and JSWeakSet) during
...@@ -1989,6 +1995,7 @@ class Heap { ...@@ -1989,6 +1995,7 @@ class Heap {
void MarkCompactEpilogue(); void MarkCompactEpilogue();
void ProcessNativeContexts(WeakObjectRetainer* retainer); void ProcessNativeContexts(WeakObjectRetainer* retainer);
void ProcessArrayBuffers(WeakObjectRetainer* retainer, bool stop_after_young);
void ProcessAllocationSites(WeakObjectRetainer* retainer); void ProcessAllocationSites(WeakObjectRetainer* retainer);
// Deopts all code that contains allocation instruction which are tenured or // Deopts all code that contains allocation instruction which are tenured or
...@@ -2149,9 +2156,6 @@ class Heap { ...@@ -2149,9 +2156,6 @@ class Heap {
bool concurrent_sweeping_enabled_; bool concurrent_sweeping_enabled_;
std::map<void*, size_t> live_array_buffers_;
std::map<void*, size_t> not_yet_discovered_array_buffers_;
friend class AlwaysAllocateScope; friend class AlwaysAllocateScope;
friend class Deserializer; friend class Deserializer;
friend class Factory; friend class Factory;
......
...@@ -4386,8 +4386,6 @@ void MarkCompactCollector::SweepSpaces() { ...@@ -4386,8 +4386,6 @@ void MarkCompactCollector::SweepSpaces() {
#ifdef DEBUG #ifdef DEBUG
state_ = SWEEP_SPACES; state_ = SWEEP_SPACES;
#endif #endif
heap()->FreeDeadArrayBuffers();
MoveEvacuationCandidatesToEndOfPagesList(); MoveEvacuationCandidatesToEndOfPagesList();
// Noncompacting collections simply sweep the spaces to clear the mark // Noncompacting collections simply sweep the spaces to clear the mark
......
...@@ -80,9 +80,12 @@ int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer( ...@@ -80,9 +80,12 @@ int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
Map* map, HeapObject* object) { Map* map, HeapObject* object) {
Heap* heap = map->GetHeap(); Heap* heap = map->GetHeap();
VisitPointers(heap, HeapObject::RawField(
object, JSArrayBuffer::BodyDescriptor::kStartOffset),
HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
VisitPointers( VisitPointers(
heap, heap, HeapObject::RawField(object,
HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset), JSArrayBuffer::kWeakNextOffset + kPointerSize),
HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields)); HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
return JSArrayBuffer::kSizeWithInternalFields; return JSArrayBuffer::kSizeWithInternalFields;
} }
...@@ -530,10 +533,11 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer( ...@@ -530,10 +533,11 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
StaticVisitor::VisitPointers( StaticVisitor::VisitPointers(
heap, heap,
HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset), HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
StaticVisitor::VisitPointers(
heap, HeapObject::RawField(object,
JSArrayBuffer::kWeakNextOffset + kPointerSize),
HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields)); HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
if (!JSArrayBuffer::cast(object)->is_external()) {
heap->RegisterLiveArrayBuffer(JSArrayBuffer::cast(object)->backing_store());
}
} }
......
...@@ -191,7 +191,8 @@ struct WeakListVisitor; ...@@ -191,7 +191,8 @@ struct WeakListVisitor;
template <class T> template <class T>
Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) { Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer,
bool stop_after_young, Object** list_tail) {
Object* undefined = heap->undefined_value(); Object* undefined = heap->undefined_value();
Object* head = undefined; Object* head = undefined;
T* tail = NULL; T* tail = NULL;
...@@ -234,7 +235,10 @@ Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) { ...@@ -234,7 +235,10 @@ Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
} }
// Terminate the list if there is one or more elements. // Terminate the list if there is one or more elements.
if (tail != NULL) WeakListVisitor<T>::SetWeakNext(tail, undefined); if (tail != NULL) {
WeakListVisitor<T>::SetWeakNext(tail, undefined);
if (list_tail) *list_tail = tail;
}
return head; return head;
} }
...@@ -316,7 +320,8 @@ struct WeakListVisitor<Context> { ...@@ -316,7 +320,8 @@ struct WeakListVisitor<Context> {
static void DoWeakList(Heap* heap, Context* context, static void DoWeakList(Heap* heap, Context* context,
WeakObjectRetainer* retainer, int index) { WeakObjectRetainer* retainer, int index) {
// Visit the weak list, removing dead intermediate elements. // Visit the weak list, removing dead intermediate elements.
Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer); Object* list_head =
VisitWeakList<T>(heap, context->get(index), retainer, false, NULL);
// Update the list head. // Update the list head.
context->set(index, list_head, UPDATE_WRITE_BARRIER); context->set(index, list_head, UPDATE_WRITE_BARRIER);
...@@ -339,6 +344,26 @@ struct WeakListVisitor<Context> { ...@@ -339,6 +344,26 @@ struct WeakListVisitor<Context> {
}; };
template <>
struct WeakListVisitor<JSArrayBuffer> {
static void SetWeakNext(JSArrayBuffer* obj, Object* next) {
obj->set_weak_next(next);
}
static Object* WeakNext(JSArrayBuffer* obj) { return obj->weak_next(); }
static int WeakNextOffset() { return JSArrayBuffer::kWeakNextOffset; }
static void VisitLiveObject(Heap* heap, JSArrayBuffer* array_buffer,
WeakObjectRetainer* retainer) {
}
static void VisitPhantomObject(Heap* heap, JSArrayBuffer* phantom) {
Runtime::FreeArrayBuffer(heap->isolate(), phantom);
}
};
template <> template <>
struct WeakListVisitor<AllocationSite> { struct WeakListVisitor<AllocationSite> {
static void SetWeakNext(AllocationSite* obj, Object* next) { static void SetWeakNext(AllocationSite* obj, Object* next) {
...@@ -356,9 +381,19 @@ struct WeakListVisitor<AllocationSite> { ...@@ -356,9 +381,19 @@ struct WeakListVisitor<AllocationSite> {
template Object* VisitWeakList<Context>(Heap* heap, Object* list, template Object* VisitWeakList<Context>(Heap* heap, Object* list,
WeakObjectRetainer* retainer); WeakObjectRetainer* retainer,
bool stop_after_young,
Object** list_tail);
template Object* VisitWeakList<JSArrayBuffer>(Heap* heap, Object* list,
WeakObjectRetainer* retainer,
bool stop_after_young,
Object** list_tail);
template Object* VisitWeakList<AllocationSite>(Heap* heap, Object* list, template Object* VisitWeakList<AllocationSite>(Heap* heap, Object* list,
WeakObjectRetainer* retainer); WeakObjectRetainer* retainer,
bool stop_after_young,
Object** list_tail);
} }
} // namespace v8::internal } // namespace v8::internal
...@@ -490,7 +490,10 @@ class WeakObjectRetainer; ...@@ -490,7 +490,10 @@ class WeakObjectRetainer;
// pointers. The template parameter T is a WeakListVisitor that defines how to // pointers. The template parameter T is a WeakListVisitor that defines how to
// access the next-element pointers. // access the next-element pointers.
template <class T> template <class T>
Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer); Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer,
bool stop_after_young, Object** list_tail);
Object* VisitNewArrayBufferViewsWeakList(Heap* heap, Object* list,
WeakObjectRetainer* retainer);
} }
} // namespace v8::internal } // namespace v8::internal
......
...@@ -6470,6 +6470,9 @@ void JSArrayBuffer::set_was_neutered(bool value) { ...@@ -6470,6 +6470,9 @@ void JSArrayBuffer::set_was_neutered(bool value) {
} }
ACCESSORS(JSArrayBuffer, weak_next, Object, kWeakNextOffset)
Object* JSArrayBufferView::byte_offset() const { Object* JSArrayBufferView::byte_offset() const {
if (WasNeutered()) return Smi::FromInt(0); if (WasNeutered()) return Smi::FromInt(0);
return Object::cast(READ_FIELD(this, kByteOffsetOffset)); return Object::cast(READ_FIELD(this, kByteOffsetOffset));
......
...@@ -10258,6 +10258,9 @@ class JSArrayBuffer: public JSObject { ...@@ -10258,6 +10258,9 @@ class JSArrayBuffer: public JSObject {
inline bool was_neutered(); inline bool was_neutered();
inline void set_was_neutered(bool value); inline void set_was_neutered(bool value);
// [weak_next]: linked list of array buffers.
DECL_ACCESSORS(weak_next, Object)
DECLARE_CAST(JSArrayBuffer) DECLARE_CAST(JSArrayBuffer)
void Neuter(); void Neuter();
...@@ -10274,7 +10277,8 @@ class JSArrayBuffer: public JSObject { ...@@ -10274,7 +10277,8 @@ class JSArrayBuffer: public JSObject {
#else #else
static const int kBitFieldOffset = kBitFieldSlot + kIntSize; static const int kBitFieldOffset = kBitFieldSlot + kIntSize;
#endif #endif
static const int kSize = kBitFieldSlot + kPointerSize; static const int kWeakNextOffset = kBitFieldSlot + kPointerSize;
static const int kSize = kWeakNextOffset + kPointerSize;
static const int kSizeWithInternalFields = static const int kSizeWithInternalFields =
kSize + v8::ArrayBuffer::kInternalFieldCount * kPointerSize; kSize + v8::ArrayBuffer::kInternalFieldCount * kPointerSize;
......
...@@ -12,6 +12,22 @@ ...@@ -12,6 +12,22 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
void Runtime::FreeArrayBuffer(Isolate* isolate,
JSArrayBuffer* phantom_array_buffer) {
if (phantom_array_buffer->is_external()) return;
size_t allocated_length =
NumberToSize(isolate, phantom_array_buffer->byte_length());
reinterpret_cast<v8::Isolate*>(isolate)
->AdjustAmountOfExternalAllocatedMemory(
-static_cast<int64_t>(allocated_length));
CHECK(isolate->array_buffer_allocator() != NULL);
isolate->array_buffer_allocator()->Free(phantom_array_buffer->backing_store(),
allocated_length);
}
void Runtime::SetupArrayBuffer(Isolate* isolate, void Runtime::SetupArrayBuffer(Isolate* isolate,
Handle<JSArrayBuffer> array_buffer, Handle<JSArrayBuffer> array_buffer,
bool is_external, void* data, bool is_external, void* data,
...@@ -31,8 +47,17 @@ void Runtime::SetupArrayBuffer(Isolate* isolate, ...@@ -31,8 +47,17 @@ void Runtime::SetupArrayBuffer(Isolate* isolate,
CHECK(byte_length->IsSmi() || byte_length->IsHeapNumber()); CHECK(byte_length->IsSmi() || byte_length->IsHeapNumber());
array_buffer->set_byte_length(*byte_length); array_buffer->set_byte_length(*byte_length);
if (data && !is_external) { if (isolate->heap()->InNewSpace(*array_buffer) ||
isolate->heap()->RegisterNewArrayBuffer(data, allocated_length); isolate->heap()->array_buffers_list()->IsUndefined()) {
array_buffer->set_weak_next(isolate->heap()->array_buffers_list());
isolate->heap()->set_array_buffers_list(*array_buffer);
if (isolate->heap()->last_array_buffer_in_list()->IsUndefined()) {
isolate->heap()->set_last_array_buffer_in_list(*array_buffer);
}
} else {
JSArrayBuffer::cast(isolate->heap()->last_array_buffer_in_list())
->set_weak_next(*array_buffer);
isolate->heap()->set_last_array_buffer_in_list(*array_buffer);
} }
} }
...@@ -58,6 +83,10 @@ bool Runtime::SetupArrayBufferAllocatingData(Isolate* isolate, ...@@ -58,6 +83,10 @@ bool Runtime::SetupArrayBufferAllocatingData(Isolate* isolate,
} }
SetupArrayBuffer(isolate, array_buffer, false, data, allocated_length); SetupArrayBuffer(isolate, array_buffer, false, data, allocated_length);
reinterpret_cast<v8::Isolate*>(isolate)
->AdjustAmountOfExternalAllocatedMemory(allocated_length);
return true; return true;
} }
...@@ -144,7 +173,6 @@ RUNTIME_FUNCTION(Runtime_ArrayBufferNeuter) { ...@@ -144,7 +173,6 @@ RUNTIME_FUNCTION(Runtime_ArrayBufferNeuter) {
size_t byte_length = NumberToSize(isolate, array_buffer->byte_length()); size_t byte_length = NumberToSize(isolate, array_buffer->byte_length());
array_buffer->set_is_external(true); array_buffer->set_is_external(true);
Runtime::NeuterArrayBuffer(array_buffer); Runtime::NeuterArrayBuffer(array_buffer);
isolate->heap()->UnregisterArrayBuffer(backing_store);
isolate->array_buffer_allocator()->Free(backing_store, byte_length); isolate->array_buffer_allocator()->Free(backing_store, byte_length);
return isolate->heap()->undefined_value(); return isolate->heap()->undefined_value();
} }
......
...@@ -817,6 +817,9 @@ class Runtime : public AllStatic { ...@@ -817,6 +817,9 @@ class Runtime : public AllStatic {
static void NeuterArrayBuffer(Handle<JSArrayBuffer> array_buffer); static void NeuterArrayBuffer(Handle<JSArrayBuffer> array_buffer);
static void FreeArrayBuffer(Isolate* isolate,
JSArrayBuffer* phantom_array_buffer);
static int FindIndexedNonNativeFrame(JavaScriptFrameIterator* it, int index); static int FindIndexedNonNativeFrame(JavaScriptFrameIterator* it, int index);
enum TypedArrayId { enum TypedArrayId {
......
...@@ -563,6 +563,10 @@ void Deserializer::Deserialize(Isolate* isolate) { ...@@ -563,6 +563,10 @@ void Deserializer::Deserialize(Isolate* isolate) {
isolate_->heap()->set_native_contexts_list( isolate_->heap()->set_native_contexts_list(
isolate_->heap()->undefined_value()); isolate_->heap()->undefined_value());
isolate_->heap()->set_array_buffers_list(
isolate_->heap()->undefined_value());
isolate_->heap()->set_last_array_buffer_in_list(
isolate_->heap()->undefined_value());
// The allocation site list is build during root iteration, but if no sites // The allocation site list is build during root iteration, but if no sites
// were encountered then it needs to be initialized to undefined. // were encountered then it needs to be initialized to undefined.
......
...@@ -164,6 +164,7 @@ ...@@ -164,6 +164,7 @@
'test-version.cc', 'test-version.cc',
'test-weakmaps.cc', 'test-weakmaps.cc',
'test-weaksets.cc', 'test-weaksets.cc',
'test-weaktypedarrays.cc',
'trace-extension.cc', 'trace-extension.cc',
'../../src/startup-data-util.h', '../../src/startup-data-util.h',
'../../src/startup-data-util.cc' '../../src/startup-data-util.cc'
......
// Copyright 2013 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include <stdlib.h>
#include "src/v8.h"
#include "test/cctest/cctest.h"
#include "src/api.h"
#include "src/heap/heap.h"
#include "src/objects.h"
using namespace v8::internal;
static Isolate* GetIsolateFrom(LocalContext* context) {
return reinterpret_cast<Isolate*>((*context)->GetIsolate());
}
static int CountArrayBuffersInWeakList(Heap* heap) {
int count = 0;
for (Object* o = heap->array_buffers_list();
!o->IsUndefined();
o = JSArrayBuffer::cast(o)->weak_next()) {
count++;
}
return count;
}
static bool HasArrayBufferInWeakList(Heap* heap, JSArrayBuffer* ab) {
for (Object* o = heap->array_buffers_list();
!o->IsUndefined();
o = JSArrayBuffer::cast(o)->weak_next()) {
if (ab == o) return true;
}
return false;
}
TEST(WeakArrayBuffersFromScript) {
v8::V8::Initialize();
LocalContext context;
Isolate* isolate = GetIsolateFrom(&context);
int start = CountArrayBuffersInWeakList(isolate->heap());
for (int i = 1; i <= 3; i++) {
// Create 3 array buffers, make i-th of them garbage,
// validate correct state of array buffer weak list.
CHECK_EQ(start, CountArrayBuffersInWeakList(isolate->heap()));
{
v8::HandleScope scope(context->GetIsolate());
{
v8::HandleScope s1(context->GetIsolate());
CompileRun("var ab1 = new ArrayBuffer(256);"
"var ab2 = new ArrayBuffer(256);"
"var ab3 = new ArrayBuffer(256);");
v8::Handle<v8::ArrayBuffer> ab1 =
v8::Handle<v8::ArrayBuffer>::Cast(CompileRun("ab1"));
v8::Handle<v8::ArrayBuffer> ab2 =
v8::Handle<v8::ArrayBuffer>::Cast(CompileRun("ab2"));
v8::Handle<v8::ArrayBuffer> ab3 =
v8::Handle<v8::ArrayBuffer>::Cast(CompileRun("ab3"));
CHECK_EQ(3, CountArrayBuffersInWeakList(isolate->heap()) - start);
CHECK(HasArrayBufferInWeakList(isolate->heap(),
*v8::Utils::OpenHandle(*ab1)));
CHECK(HasArrayBufferInWeakList(isolate->heap(),
*v8::Utils::OpenHandle(*ab2)));
CHECK(HasArrayBufferInWeakList(isolate->heap(),
*v8::Utils::OpenHandle(*ab3)));
}
i::ScopedVector<char> source(1024);
i::SNPrintF(source, "ab%d = null;", i);
CompileRun(source.start());
isolate->heap()->CollectAllGarbage();
CHECK_EQ(2, CountArrayBuffersInWeakList(isolate->heap()) - start);
{
v8::HandleScope s2(context->GetIsolate());
for (int j = 1; j <= 3; j++) {
if (j == i) continue;
i::SNPrintF(source, "ab%d", j);
v8::Handle<v8::ArrayBuffer> ab =
v8::Handle<v8::ArrayBuffer>::Cast(CompileRun(source.start()));
CHECK(HasArrayBufferInWeakList(isolate->heap(),
*v8::Utils::OpenHandle(*ab)));
}
}
CompileRun("ab1 = null; ab2 = null; ab3 = null;");
}
isolate->heap()->CollectAllGarbage();
CHECK_EQ(start, CountArrayBuffersInWeakList(isolate->heap()));
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment