Commit b3282c29 authored by dslomov@chromium.org's avatar dslomov@chromium.org

Recording array buffer views.

R=hpayer@chromium.org
BUG=

Review URL: https://codereview.chromium.org/15562008

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@15000 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent a0f786f0
......@@ -6173,6 +6173,14 @@ Float64Array* Float64Array::Cast(v8::Value* value) {
}
Uint8ClampedArray* Uint8ClampedArray::Cast(v8::Value* value) {
#ifdef V8_ENABLE_CHECKS
CheckCast(value);
#endif
return static_cast<Uint8ClampedArray*>(value);
}
Function* Function::Cast(v8::Value* value) {
#ifdef V8_ENABLE_CHECKS
CheckCast(value);
......
......@@ -6206,6 +6206,9 @@ i::Handle<i::JSTypedArray> NewTypedArray(
obj->set_buffer(*buffer);
obj->set_weak_next(buffer->weak_first_array());
buffer->set_weak_first_array(*obj);
i::Handle<i::Object> byte_offset_object = isolate->factory()->NewNumber(
static_cast<double>(byte_offset));
obj->set_byte_offset(*byte_offset_object);
......
......@@ -1324,8 +1324,7 @@ void Genesis::InitializeExperimentalGlobal() {
Handle<JSFunction> array_buffer_fun =
InstallFunction(
global, "ArrayBuffer", JS_ARRAY_BUFFER_TYPE,
JSArrayBuffer::kSize +
v8::ArrayBuffer::kInternalFieldCount * kPointerSize,
JSArrayBuffer::kSizeWithInternalFields,
isolate()->initial_object_prototype(),
Builtins::kIllegal, true, true);
native_context()->set_array_buffer_fun(*array_buffer_fun);
......
......@@ -180,6 +180,7 @@ Heap::Heap()
memset(roots_, 0, sizeof(roots_[0]) * kRootListLength);
native_contexts_list_ = NULL;
array_buffers_list_ = Smi::FromInt(0);
mark_compact_collector_.heap_ = this;
external_string_table_.heap_ = this;
// Put a dummy entry in the remembered pages so we can find the list the
......@@ -1539,11 +1540,6 @@ static Object* ProcessFunctionWeakReferences(Heap* heap,
void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
Object* undefined = undefined_value();
Object* head = undefined;
Context* tail = NULL;
Object* candidate = native_contexts_list_;
// We don't record weak slots during marking or scavenges.
// Instead we do it once when we complete mark-compact cycle.
// Note that write barrier has no effect if we are already in the middle of
......@@ -1551,6 +1547,16 @@ void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
bool record_slots =
gc_state() == MARK_COMPACT &&
mark_compact_collector()->is_compacting();
ProcessArrayBuffers(retainer, record_slots);
ProcessNativeContexts(retainer, record_slots);
}
void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer,
bool record_slots) {
Object* undefined = undefined_value();
Object* head = undefined;
Context* tail = NULL;
Object* candidate = native_contexts_list_;
while (candidate != undefined) {
// Check whether to keep the candidate in the list.
......@@ -1619,6 +1625,101 @@ void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
}
template <class T>
struct WeakListVisitor;
template <class T>
static Object* VisitWeakList(Object* list,
MarkCompactCollector* collector,
WeakObjectRetainer* retainer, bool record_slots) {
Object* head = Smi::FromInt(0);
T* tail = NULL;
while (list != Smi::FromInt(0)) {
Object* retained = retainer->RetainAs(list);
if (retained != NULL) {
if (head == Smi::FromInt(0)) {
head = retained;
} else {
ASSERT(tail != NULL);
WeakListVisitor<T>::set_weak_next(tail, retained);
if (record_slots) {
Object** next_slot =
HeapObject::RawField(tail, WeakListVisitor<T>::kWeakNextOffset);
collector->RecordSlot(next_slot, next_slot, retained);
}
}
tail = reinterpret_cast<T*>(retained);
WeakListVisitor<T>::VisitLiveObject(
tail, collector, retainer, record_slots);
}
list = WeakListVisitor<T>::get_weak_next(reinterpret_cast<T*>(list));
}
if (tail != NULL) {
tail->set_weak_next(Smi::FromInt(0));
}
return head;
}
template<>
struct WeakListVisitor<JSTypedArray> {
static void set_weak_next(JSTypedArray* obj, Object* next) {
obj->set_weak_next(next);
}
static Object* get_weak_next(JSTypedArray* obj) {
return obj->weak_next();
}
static void VisitLiveObject(JSTypedArray* obj,
MarkCompactCollector* collector,
WeakObjectRetainer* retainer,
bool record_slots) {}
static const int kWeakNextOffset = JSTypedArray::kWeakNextOffset;
};
template<>
struct WeakListVisitor<JSArrayBuffer> {
static void set_weak_next(JSArrayBuffer* obj, Object* next) {
obj->set_weak_next(next);
}
static Object* get_weak_next(JSArrayBuffer* obj) {
return obj->weak_next();
}
static void VisitLiveObject(JSArrayBuffer* array_buffer,
MarkCompactCollector* collector,
WeakObjectRetainer* retainer,
bool record_slots) {
Object* typed_array_obj =
VisitWeakList<JSTypedArray>(array_buffer->weak_first_array(),
collector, retainer, record_slots);
array_buffer->set_weak_first_array(typed_array_obj);
if (typed_array_obj != Smi::FromInt(0) && record_slots) {
Object** slot = HeapObject::RawField(
array_buffer, JSArrayBuffer::kWeakFirstArrayOffset);
collector->RecordSlot(slot, slot, typed_array_obj);
}
}
static const int kWeakNextOffset = JSArrayBuffer::kWeakNextOffset;
};
void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer,
bool record_slots) {
Object* array_buffer_obj =
VisitWeakList<JSArrayBuffer>(array_buffers_list(),
mark_compact_collector(),
retainer, record_slots);
set_array_buffers_list(array_buffer_obj);
}
void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) {
DisallowHeapAllocation no_allocation;
......@@ -1794,6 +1895,14 @@ class ScavengingVisitor : public StaticVisitorBase {
&ObjectEvacuationStrategy<POINTER_OBJECT>::
Visit);
table_.Register(kVisitJSArrayBuffer,
&ObjectEvacuationStrategy<POINTER_OBJECT>::
Visit);
table_.Register(kVisitJSTypedArray,
&ObjectEvacuationStrategy<POINTER_OBJECT>::
Visit);
table_.Register(kVisitJSRegExp,
&ObjectEvacuationStrategy<POINTER_OBJECT>::
Visit);
......
......@@ -1352,6 +1352,12 @@ class Heap {
}
Object* native_contexts_list() { return native_contexts_list_; }
void set_array_buffers_list(Object* object) {
array_buffers_list_ = object;
}
Object* array_buffers_list() { return array_buffers_list_; }
// Number of mark-sweeps.
unsigned int ms_count() { return ms_count_; }
......@@ -2022,6 +2028,8 @@ class Heap {
Object* native_contexts_list_;
Object* array_buffers_list_;
StoreBufferRebuilder store_buffer_rebuilder_;
struct StringTypeTable {
......@@ -2165,6 +2173,9 @@ class Heap {
// Code to be run before and after mark-compact.
void MarkCompactPrologue();
void ProcessNativeContexts(WeakObjectRetainer* retainer, bool record_slots);
void ProcessArrayBuffers(WeakObjectRetainer* retainer, bool record_slots);
// Record statistics before and after garbage collection.
void ReportStatisticsBeforeGC();
void ReportStatisticsAfterGC();
......
......@@ -5330,10 +5330,15 @@ void JSArrayBuffer::set_is_external(bool value) {
}
ACCESSORS(JSArrayBuffer, weak_next, Object, kWeakNextOffset)
ACCESSORS(JSArrayBuffer, weak_first_array, Object, kWeakFirstArrayOffset)
ACCESSORS(JSTypedArray, buffer, Object, kBufferOffset)
ACCESSORS(JSTypedArray, byte_offset, Object, kByteOffsetOffset)
ACCESSORS(JSTypedArray, byte_length, Object, kByteLengthOffset)
ACCESSORS(JSTypedArray, length, Object, kLengthOffset)
ACCESSORS(JSTypedArray, weak_next, Object, kWeakNextOffset)
ACCESSORS(JSRegExp, data, Object, kDataOffset)
......
......@@ -540,8 +540,6 @@ static const char* TypeToString(InstanceType type) {
case JS_FUNCTION_TYPE: return "JS_FUNCTION";
case CODE_TYPE: return "CODE";
case JS_ARRAY_TYPE: return "JS_ARRAY";
case JS_ARRAY_BUFFER_TYPE: return "JS_ARRAY_BUFFER";
case JS_TYPED_ARRAY_TYPE: return "JS_TYPED_ARRAY";
case JS_PROXY_TYPE: return "JS_PROXY";
case JS_WEAK_MAP_TYPE: return "JS_WEAK_MAP";
case JS_REGEXP_TYPE: return "JS_REGEXP";
......@@ -549,6 +547,8 @@ static const char* TypeToString(InstanceType type) {
case JS_GLOBAL_OBJECT_TYPE: return "JS_GLOBAL_OBJECT";
case JS_BUILTINS_OBJECT_TYPE: return "JS_BUILTINS_OBJECT";
case JS_GLOBAL_PROXY_TYPE: return "JS_GLOBAL_PROXY";
case JS_TYPED_ARRAY_TYPE: return "JS_TYPED_ARRAY";
case JS_ARRAY_BUFFER_TYPE: return "JS_ARRAY_BUFFER";
case FOREIGN_TYPE: return "FOREIGN";
case JS_MESSAGE_OBJECT_TYPE: return "JS_MESSAGE_OBJECT_TYPE";
#define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return #NAME;
......
......@@ -79,6 +79,10 @@ void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
table_.Register(kVisitJSFunction, &VisitJSFunction);
table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
table_.Register(kVisitFreeSpace, &VisitFreeSpace);
table_.Register(kVisitJSWeakMap, &JSObjectVisitor::Visit);
......@@ -98,6 +102,43 @@ void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
}
template<typename StaticVisitor>
int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
Map* map, HeapObject* object) {
Heap* heap = map->GetHeap();
STATIC_ASSERT(
JSArrayBuffer::kWeakFirstArrayOffset ==
JSArrayBuffer::kWeakNextOffset + kPointerSize);
VisitPointers(
heap,
HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
VisitPointers(
heap,
HeapObject::RawField(object,
JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
return JSArrayBuffer::kSizeWithInternalFields;
}
template<typename StaticVisitor>
int StaticNewSpaceVisitor<StaticVisitor>::VisitJSTypedArray(
Map* map, HeapObject* object) {
VisitPointers(
map->GetHeap(),
HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
VisitPointers(
map->GetHeap(),
HeapObject::RawField(object,
JSTypedArray::kWeakNextOffset + kPointerSize),
HeapObject::RawField(object, JSTypedArray::kSize));
return JSTypedArray::kSize;
}
template<typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::Initialize() {
table_.Register(kVisitShortcutCandidate,
......@@ -149,6 +190,10 @@ void StaticMarkingVisitor<StaticVisitor>::Initialize() {
table_.Register(kVisitJSFunction, &VisitJSFunction);
table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
// Registration for kVisitJSRegExp is done by StaticVisitor.
table_.Register(kVisitPropertyCell,
......@@ -400,6 +445,41 @@ void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(
}
template<typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
Map* map, HeapObject* object) {
Heap* heap = map->GetHeap();
STATIC_ASSERT(
JSArrayBuffer::kWeakFirstArrayOffset ==
JSArrayBuffer::kWeakNextOffset + kPointerSize);
StaticVisitor::VisitPointers(
heap,
HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
StaticVisitor::VisitPointers(
heap,
HeapObject::RawField(object,
JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
}
template<typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::VisitJSTypedArray(
Map* map, HeapObject* object) {
StaticVisitor::VisitPointers(
map->GetHeap(),
HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
StaticVisitor::VisitPointers(
map->GetHeap(),
HeapObject::RawField(object,
JSTypedArray::kWeakNextOffset + kPointerSize),
HeapObject::RawField(object, JSTypedArray::kSize));
}
template<typename StaticVisitor>
void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(
Heap* heap, Map* map) {
......
......@@ -134,6 +134,12 @@ StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
case FILLER_TYPE:
return kVisitDataObjectGeneric;
case JS_ARRAY_BUFFER_TYPE:
return kVisitJSArrayBuffer;
case JS_TYPED_ARRAY_TYPE:
return kVisitJSTypedArray;
case JS_OBJECT_TYPE:
case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
case JS_GENERATOR_OBJECT_TYPE:
......@@ -145,8 +151,6 @@ StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
case JS_GLOBAL_OBJECT_TYPE:
case JS_BUILTINS_OBJECT_TYPE:
case JS_MESSAGE_OBJECT_TYPE:
case JS_ARRAY_BUFFER_TYPE:
case JS_TYPED_ARRAY_TYPE:
return GetVisitorIdForSize(kVisitJSObject,
kVisitJSObjectGeneric,
instance_size);
......
......@@ -92,6 +92,8 @@ class StaticVisitorBase : public AllStatic {
V(SharedFunctionInfo) \
V(JSFunction) \
V(JSWeakMap) \
V(JSArrayBuffer) \
V(JSTypedArray) \
V(JSRegExp)
// For data objects, JS objects and structs along with generic visitor which
......@@ -333,6 +335,9 @@ class StaticNewSpaceVisitor : public StaticVisitorBase {
return FreeSpace::cast(object)->Size();
}
INLINE(static int VisitJSArrayBuffer(Map* map, HeapObject* object));
INLINE(static int VisitJSTypedArray(Map* map, HeapObject* object));
class DataObjectVisitor {
public:
template<int object_size>
......@@ -407,6 +412,8 @@ class StaticMarkingVisitor : public StaticVisitorBase {
INLINE(static void VisitSharedFunctionInfo(Map* map, HeapObject* object));
INLINE(static void VisitJSFunction(Map* map, HeapObject* object));
INLINE(static void VisitJSRegExp(Map* map, HeapObject* object));
INLINE(static void VisitJSArrayBuffer(Map* map, HeapObject* object));
INLINE(static void VisitJSTypedArray(Map* map, HeapObject* object));
INLINE(static void VisitNativeContext(Map* map, HeapObject* object));
// Mark pointers in a Map and its TransitionArray together, possibly
......
......@@ -8786,6 +8786,12 @@ class JSArrayBuffer: public JSObject {
inline bool is_external();
inline void set_is_external(bool value);
// [weak_next]: linked list of array buffers.
DECL_ACCESSORS(weak_next, Object)
// [weak_first_array]: weak linked list of typed arrays.
DECL_ACCESSORS(weak_first_array, Object)
// Casting.
static inline JSArrayBuffer* cast(Object* obj);
......@@ -8796,7 +8802,12 @@ class JSArrayBuffer: public JSObject {
static const int kBackingStoreOffset = JSObject::kHeaderSize;
static const int kByteLengthOffset = kBackingStoreOffset + kPointerSize;
static const int kFlagOffset = kByteLengthOffset + kPointerSize;
static const int kSize = kFlagOffset + kPointerSize;
static const int kWeakNextOffset = kFlagOffset + kPointerSize;
static const int kWeakFirstArrayOffset = kWeakNextOffset + kPointerSize;
static const int kSize = kWeakFirstArrayOffset + kPointerSize;
static const int kSizeWithInternalFields =
kSize + v8::ArrayBuffer::kInternalFieldCount * kPointerSize;
private:
// Bit position in a flag
......@@ -8820,6 +8831,9 @@ class JSTypedArray: public JSObject {
// [length]: length of typed array in elements.
DECL_ACCESSORS(length, Object)
// [weak_next]: linked list of typed arrays over the same array buffer.
DECL_ACCESSORS(weak_next, Object)
// Casting.
static inline JSTypedArray* cast(Object* obj);
......@@ -8834,7 +8848,8 @@ class JSTypedArray: public JSObject {
static const int kByteOffsetOffset = kBufferOffset + kPointerSize;
static const int kByteLengthOffset = kByteOffsetOffset + kPointerSize;
static const int kLengthOffset = kByteLengthOffset + kPointerSize;
static const int kSize = kLengthOffset + kPointerSize;
static const int kWeakNextOffset = kLengthOffset + kPointerSize;
static const int kSize = kWeakNextOffset + kPointerSize;
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(JSTypedArray);
......
......@@ -687,6 +687,10 @@ void Runtime::SetupArrayBuffer(Isolate* isolate,
isolate->factory()->NewNumberFromSize(allocated_length);
CHECK(byte_length->IsSmi() || byte_length->IsHeapNumber());
array_buffer->set_byte_length(*byte_length);
array_buffer->set_weak_next(isolate->heap()->array_buffers_list());
isolate->heap()->set_array_buffers_list(*array_buffer);
array_buffer->set_weak_first_array(Smi::FromInt(0));
}
......@@ -855,6 +859,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_TypedArrayInitialize) {
Handle<Object> length_obj = isolate->factory()->NewNumberFromSize(length);
holder->set_length(*length_obj);
holder->set_weak_next(buffer->weak_first_array());
buffer->set_weak_first_array(*holder);
Handle<ExternalArray> elements =
isolate->factory()->NewExternalArray(
......
......@@ -103,7 +103,8 @@
'test-unbound-queue.cc',
'test-utils.cc',
'test-version.cc',
'test-weakmaps.cc'
'test-weakmaps.cc',
'test-weaktypedarrays.cc'
],
'conditions': [
['v8_target_arch=="ia32"', {
......
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment