Commit e5a082f0 authored by yangguo's avatar yangguo Committed by Commit bot

Reland "Only use FreeSpace objects in the free list"

Review URL: https://codereview.chromium.org/882633002

Cr-Commit-Position: refs/heads/master@{#26296}
parent 22421bbe
...@@ -227,9 +227,7 @@ void AllocationTracker::AllocationEvent(Address addr, int size) { ...@@ -227,9 +227,7 @@ void AllocationTracker::AllocationEvent(Address addr, int size) {
// Mark the new block as FreeSpace to make sure the heap is iterable // Mark the new block as FreeSpace to make sure the heap is iterable
// while we are capturing stack trace. // while we are capturing stack trace.
FreeListNode::FromAddress(addr)->set_size(heap, size); heap->CreateFillerObjectAt(addr, size);
DCHECK_EQ(HeapObject::FromAddress(addr)->Size(), size);
DCHECK(FreeListNode::IsFreeListNode(HeapObject::FromAddress(addr)));
Isolate* isolate = heap->isolate(); Isolate* isolate = heap->isolate();
int length = 0; int length = 0;
......
...@@ -27,6 +27,7 @@ ...@@ -27,6 +27,7 @@
#include "src/debug.h" #include "src/debug.h"
#include "src/deoptimizer.h" #include "src/deoptimizer.h"
#include "src/execution.h" #include "src/execution.h"
#include "src/full-codegen.h"
#include "src/global-handles.h" #include "src/global-handles.h"
#include "src/heap-profiler.h" #include "src/heap-profiler.h"
#include "src/heap-snapshot-generator-inl.h" #include "src/heap-snapshot-generator-inl.h"
...@@ -220,6 +221,54 @@ bool RunExtraCode(Isolate* isolate, char* utf8_source) { ...@@ -220,6 +221,54 @@ bool RunExtraCode(Isolate* isolate, char* utf8_source) {
} }
void CheckDefaultReservationSizes(const i::StartupSerializer& startup_ser,
const i::PartialSerializer& context_ser) {
#ifdef DEBUG
i::List<i::SerializedData::Reservation> startup_reservations;
i::List<i::SerializedData::Reservation> context_reservations;
startup_ser.EncodeReservations(&startup_reservations);
context_ser.EncodeReservations(&context_reservations);
for (int space = 0; space < i::Serializer::kNumberOfSpaces; space++) {
// Exactly one chunk per space.
CHECK(startup_reservations[space].is_last());
CHECK(startup_reservations[space].is_last());
uint32_t sum = startup_reservations[space].chunk_size() +
context_reservations[space].chunk_size();
uint32_t limit = 0;
const int constant_pool_delta = i::FLAG_enable_ool_constant_pool ? 48 : 0;
switch (space) {
case i::NEW_SPACE:
limit = 3 * i::kPointerSize;
break;
case i::OLD_POINTER_SPACE:
limit = (128 + constant_pool_delta) * i::kPointerSize * i::KB;
break;
case i::OLD_DATA_SPACE:
limit = 192 * i::KB;
break;
case i::MAP_SPACE:
limit = 16 * i::kPointerSize * i::KB;
break;
case i::CELL_SPACE:
limit = 16 * i::kPointerSize * i::KB;
break;
case i::PROPERTY_CELL_SPACE:
limit = 8 * i::kPointerSize * i::KB;
break;
case i::CODE_SPACE:
limit = RoundUp((480 - constant_pool_delta) * i::KB *
i::FullCodeGenerator::kBootCodeSizeMultiplier / 100,
i::kPointerSize);
break;
default:
break;
}
CHECK_LE(sum, limit);
}
#endif // DEBUG
}
StartupData V8::CreateSnapshotDataBlob(char* custom_source) { StartupData V8::CreateSnapshotDataBlob(char* custom_source) {
Isolate::CreateParams params; Isolate::CreateParams params;
params.enable_serializer = true; params.enable_serializer = true;
...@@ -266,6 +315,8 @@ StartupData V8::CreateSnapshotDataBlob(char* custom_source) { ...@@ -266,6 +315,8 @@ StartupData V8::CreateSnapshotDataBlob(char* custom_source) {
i::SnapshotData sd(snapshot_sink, ser); i::SnapshotData sd(snapshot_sink, ser);
i::SnapshotData csd(context_sink, context_ser); i::SnapshotData csd(context_sink, context_ser);
if (custom_source == NULL) CheckDefaultReservationSizes(ser, context_ser);
result = i::Snapshot::CreateSnapshotBlob(sd.RawData(), csd.RawData(), result = i::Snapshot::CreateSnapshotBlob(sd.RawData(), csd.RawData(),
metadata); metadata);
} }
......
...@@ -124,7 +124,7 @@ class FullCodeGenerator: public AstVisitor { ...@@ -124,7 +124,7 @@ class FullCodeGenerator: public AstVisitor {
static const int kBootCodeSizeMultiplier = 120; static const int kBootCodeSizeMultiplier = 120;
#elif V8_TARGET_ARCH_MIPS64 #elif V8_TARGET_ARCH_MIPS64
static const int kCodeSizeMultiplier = 149; static const int kCodeSizeMultiplier = 149;
static const int kBootCodeSizeMultiplier = 120; static const int kBootCodeSizeMultiplier = 170;
#else #else
#error Unsupported target architecture. #error Unsupported target architecture.
#endif #endif
......
...@@ -495,11 +495,11 @@ void Heap::ClearAllICsByKind(Code::Kind kind) { ...@@ -495,11 +495,11 @@ void Heap::ClearAllICsByKind(Code::Kind kind) {
} }
void Heap::RepairFreeListsAfterBoot() { void Heap::RepairFreeListsAfterDeserialization() {
PagedSpaces spaces(this); PagedSpaces spaces(this);
for (PagedSpace* space = spaces.next(); space != NULL; for (PagedSpace* space = spaces.next(); space != NULL;
space = spaces.next()) { space = spaces.next()) {
space->RepairFreeListsAfterBoot(); space->RepairFreeListsAfterDeserialization();
} }
} }
...@@ -952,14 +952,15 @@ bool Heap::ReserveSpace(Reservation* reservations) { ...@@ -952,14 +952,15 @@ bool Heap::ReserveSpace(Reservation* reservations) {
} else { } else {
allocation = paged_space(space)->AllocateRaw(size); allocation = paged_space(space)->AllocateRaw(size);
} }
FreeListNode* node; HeapObject* free_space;
if (allocation.To(&node)) { if (allocation.To(&free_space)) {
// Mark with a free list node, in case we have a GC before // Mark with a free list node, in case we have a GC before
// deserializing. // deserializing.
node->set_size(this, size); Address free_space_address = free_space->address();
CreateFillerObjectAt(free_space_address, size);
DCHECK(space < Serializer::kNumberOfPreallocatedSpaces); DCHECK(space < Serializer::kNumberOfPreallocatedSpaces);
chunk.start = node->address(); chunk.start = free_space_address;
chunk.end = node->address() + size; chunk.end = free_space_address + size;
} else { } else {
perform_gc = true; perform_gc = true;
break; break;
...@@ -3392,13 +3393,18 @@ AllocationResult Heap::AllocateByteArray(int length, PretenureFlag pretenure) { ...@@ -3392,13 +3393,18 @@ AllocationResult Heap::AllocateByteArray(int length, PretenureFlag pretenure) {
void Heap::CreateFillerObjectAt(Address addr, int size) { void Heap::CreateFillerObjectAt(Address addr, int size) {
if (size == 0) return; if (size == 0) return;
HeapObject* filler = HeapObject::FromAddress(addr); HeapObject* filler = HeapObject::FromAddress(addr);
// At this point, we may be deserializing the heap from a snapshot, and
// none of the maps have been created yet and are NULL.
if (size == kPointerSize) { if (size == kPointerSize) {
filler->set_map_no_write_barrier(one_pointer_filler_map()); filler->set_map_no_write_barrier(raw_unchecked_one_pointer_filler_map());
DCHECK(filler->map() == NULL || filler->map() == one_pointer_filler_map());
} else if (size == 2 * kPointerSize) { } else if (size == 2 * kPointerSize) {
filler->set_map_no_write_barrier(two_pointer_filler_map()); filler->set_map_no_write_barrier(raw_unchecked_two_pointer_filler_map());
DCHECK(filler->map() == NULL || filler->map() == two_pointer_filler_map());
} else { } else {
filler->set_map_no_write_barrier(free_space_map()); filler->set_map_no_write_barrier(raw_unchecked_free_space_map());
FreeSpace::cast(filler)->set_size(size); DCHECK(filler->map() == NULL || filler->map() == free_space_map());
FreeSpace::cast(filler)->nobarrier_set_size(size);
} }
} }
......
...@@ -692,8 +692,8 @@ class Heap { ...@@ -692,8 +692,8 @@ class Heap {
// Iterates the whole code space to clear all ICs of the given kind. // Iterates the whole code space to clear all ICs of the given kind.
void ClearAllICsByKind(Code::Kind kind); void ClearAllICsByKind(Code::Kind kind);
// For use during bootup. // FreeSpace objects have a null map after deserialization. Update the map.
void RepairFreeListsAfterBoot(); void RepairFreeListsAfterDeserialization();
template <typename T> template <typename T>
static inline bool IsOneByte(T t, int chars); static inline bool IsOneByte(T t, int chars);
......
...@@ -305,14 +305,6 @@ intptr_t LargeObjectSpace::Available() { ...@@ -305,14 +305,6 @@ intptr_t LargeObjectSpace::Available() {
return ObjectSizeFor(heap()->isolate()->memory_allocator()->Available()); return ObjectSizeFor(heap()->isolate()->memory_allocator()->Available());
} }
bool FreeListNode::IsFreeListNode(HeapObject* object) {
Map* map = object->map();
Heap* heap = object->GetHeap();
return map == heap->raw_unchecked_free_space_map() ||
map == heap->raw_unchecked_one_pointer_filler_map() ||
map == heap->raw_unchecked_two_pointer_filler_map();
}
} }
} // namespace v8::internal } // namespace v8::internal
......
This diff is collapsed.
...@@ -1411,45 +1411,6 @@ class AllocationStats BASE_EMBEDDED { ...@@ -1411,45 +1411,6 @@ class AllocationStats BASE_EMBEDDED {
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// Free lists for old object spaces // Free lists for old object spaces
//
// Free-list nodes are free blocks in the heap. They look like heap objects
// (free-list node pointers have the heap object tag, and they have a map like
// a heap object). They have a size and a next pointer. The next pointer is
// the raw address of the next free list node (or NULL).
class FreeListNode : public HeapObject {
public:
// Obtain a free-list node from a raw address. This is not a cast because
// it does not check nor require that the first word at the address is a map
// pointer.
static FreeListNode* FromAddress(Address address) {
return reinterpret_cast<FreeListNode*>(HeapObject::FromAddress(address));
}
static inline bool IsFreeListNode(HeapObject* object);
// Set the size in bytes, which can be read with HeapObject::Size(). This
// function also writes a map to the first word of the block so that it
// looks like a heap object to the garbage collector and heap iteration
// functions.
void set_size(Heap* heap, int size_in_bytes);
// Accessors for the next field.
inline FreeListNode* next();
inline FreeListNode** next_address();
inline void set_next(FreeListNode* next);
inline void Zap();
static inline FreeListNode* cast(Object* object) {
return reinterpret_cast<FreeListNode*>(object);
}
private:
static const int kNextOffset = POINTER_SIZE_ALIGN(FreeSpace::kHeaderSize);
DISALLOW_IMPLICIT_CONSTRUCTORS(FreeListNode);
};
// The free list category holds a pointer to the top element and a pointer to // The free list category holds a pointer to the top element and a pointer to
// the end element of the linked list of free memory blocks. // the end element of the linked list of free memory blocks.
...@@ -1461,27 +1422,26 @@ class FreeListCategory { ...@@ -1461,27 +1422,26 @@ class FreeListCategory {
void Reset(); void Reset();
void Free(FreeListNode* node, int size_in_bytes); void Free(FreeSpace* node, int size_in_bytes);
FreeListNode* PickNodeFromList(int* node_size); FreeSpace* PickNodeFromList(int* node_size);
FreeListNode* PickNodeFromList(int size_in_bytes, int* node_size); FreeSpace* PickNodeFromList(int size_in_bytes, int* node_size);
intptr_t EvictFreeListItemsInList(Page* p); intptr_t EvictFreeListItemsInList(Page* p);
bool ContainsPageFreeListItemsInList(Page* p); bool ContainsPageFreeListItemsInList(Page* p);
void RepairFreeList(Heap* heap); void RepairFreeList(Heap* heap);
FreeListNode* top() const { FreeSpace* top() const {
return reinterpret_cast<FreeListNode*>(base::NoBarrier_Load(&top_)); return reinterpret_cast<FreeSpace*>(base::NoBarrier_Load(&top_));
} }
void set_top(FreeListNode* top) { void set_top(FreeSpace* top) {
base::NoBarrier_Store(&top_, reinterpret_cast<base::AtomicWord>(top)); base::NoBarrier_Store(&top_, reinterpret_cast<base::AtomicWord>(top));
} }
FreeListNode** GetEndAddress() { return &end_; } FreeSpace* end() const { return end_; }
FreeListNode* end() const { return end_; } void set_end(FreeSpace* end) { end_ = end; }
void set_end(FreeListNode* end) { end_ = end; }
int* GetAvailableAddress() { return &available_; } int* GetAvailableAddress() { return &available_; }
int available() const { return available_; } int available() const { return available_; }
...@@ -1497,9 +1457,9 @@ class FreeListCategory { ...@@ -1497,9 +1457,9 @@ class FreeListCategory {
#endif #endif
private: private:
// top_ points to the top FreeListNode* in the free list category. // top_ points to the top FreeSpace* in the free list category.
base::AtomicWord top_; base::AtomicWord top_;
FreeListNode* end_; FreeSpace* end_;
base::Mutex mutex_; base::Mutex mutex_;
// Total available bytes in all blocks of this free list category. // Total available bytes in all blocks of this free list category.
...@@ -1596,17 +1556,18 @@ class FreeList { ...@@ -1596,17 +1556,18 @@ class FreeList {
FreeListCategory* large_list() { return &large_list_; } FreeListCategory* large_list() { return &large_list_; }
FreeListCategory* huge_list() { return &huge_list_; } FreeListCategory* huge_list() { return &huge_list_; }
static const int kSmallListMin = 0x20 * kPointerSize;
private: private:
// The size range of blocks, in bytes. // The size range of blocks, in bytes.
static const int kMinBlockSize = 3 * kPointerSize; static const int kMinBlockSize = 3 * kPointerSize;
static const int kMaxBlockSize = Page::kMaxRegularHeapObjectSize; static const int kMaxBlockSize = Page::kMaxRegularHeapObjectSize;
FreeListNode* FindNodeFor(int size_in_bytes, int* node_size); FreeSpace* FindNodeFor(int size_in_bytes, int* node_size);
PagedSpace* owner_; PagedSpace* owner_;
Heap* heap_; Heap* heap_;
static const int kSmallListMin = 0x20 * kPointerSize;
static const int kSmallListMax = 0xff * kPointerSize; static const int kSmallListMax = 0xff * kPointerSize;
static const int kMediumListMax = 0x7ff * kPointerSize; static const int kMediumListMax = 0x7ff * kPointerSize;
static const int kLargeListMax = 0x3fff * kPointerSize; static const int kLargeListMax = 0x3fff * kPointerSize;
...@@ -1702,7 +1663,7 @@ class PagedSpace : public Space { ...@@ -1702,7 +1663,7 @@ class PagedSpace : public Space {
// During boot the free_space_map is created, and afterwards we may need // During boot the free_space_map is created, and afterwards we may need
// to write it into the free list nodes that were already created. // to write it into the free list nodes that were already created.
void RepairFreeListsAfterBoot(); void RepairFreeListsAfterDeserialization();
// Prepares for a mark-compact GC. // Prepares for a mark-compact GC.
void PrepareForMarkCompact(); void PrepareForMarkCompact();
...@@ -1909,8 +1870,6 @@ class PagedSpace : public Space { ...@@ -1909,8 +1870,6 @@ class PagedSpace : public Space {
// Maximum capacity of this space. // Maximum capacity of this space.
intptr_t max_capacity_; intptr_t max_capacity_;
intptr_t SizeOfFirstPage();
// Accounting information for this space. // Accounting information for this space.
AllocationStats accounting_stats_; AllocationStats accounting_stats_;
......
...@@ -1362,7 +1362,6 @@ class Isolate { ...@@ -1362,7 +1362,6 @@ class Isolate {
friend class ExecutionAccess; friend class ExecutionAccess;
friend class HandleScopeImplementer; friend class HandleScopeImplementer;
friend class IsolateInitializer;
friend class OptimizingCompilerThread; friend class OptimizingCompilerThread;
friend class SweeperThread; friend class SweeperThread;
friend class ThreadManager; friend class ThreadManager;
......
...@@ -3334,7 +3334,6 @@ CAST_ACCESSOR(FixedArrayBase) ...@@ -3334,7 +3334,6 @@ CAST_ACCESSOR(FixedArrayBase)
CAST_ACCESSOR(FixedDoubleArray) CAST_ACCESSOR(FixedDoubleArray)
CAST_ACCESSOR(FixedTypedArrayBase) CAST_ACCESSOR(FixedTypedArrayBase)
CAST_ACCESSOR(Foreign) CAST_ACCESSOR(Foreign)
CAST_ACCESSOR(FreeSpace)
CAST_ACCESSOR(GlobalObject) CAST_ACCESSOR(GlobalObject)
CAST_ACCESSOR(HeapObject) CAST_ACCESSOR(HeapObject)
CAST_ACCESSOR(JSArray) CAST_ACCESSOR(JSArray)
...@@ -3443,6 +3442,39 @@ SMI_ACCESSORS(String, length, kLengthOffset) ...@@ -3443,6 +3442,39 @@ SMI_ACCESSORS(String, length, kLengthOffset)
SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset) SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset)
FreeSpace* FreeSpace::next() {
DCHECK(map() == GetHeap()->raw_unchecked_free_space_map() ||
(!GetHeap()->deserialization_complete() && map() == NULL));
DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
return reinterpret_cast<FreeSpace*>(
Memory::Address_at(address() + kNextOffset));
}
FreeSpace** FreeSpace::next_address() {
DCHECK(map() == GetHeap()->raw_unchecked_free_space_map() ||
(!GetHeap()->deserialization_complete() && map() == NULL));
DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
return reinterpret_cast<FreeSpace**>(address() + kNextOffset);
}
void FreeSpace::set_next(FreeSpace* next) {
DCHECK(map() == GetHeap()->raw_unchecked_free_space_map() ||
(!GetHeap()->deserialization_complete() && map() == NULL));
DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
base::NoBarrier_Store(
reinterpret_cast<base::AtomicWord*>(address() + kNextOffset),
reinterpret_cast<base::AtomicWord>(next));
}
FreeSpace* FreeSpace::cast(HeapObject* o) {
SLOW_DCHECK(!o->GetHeap()->deserialization_complete() || o->IsFreeSpace());
return reinterpret_cast<FreeSpace*>(o);
}
uint32_t Name::hash_field() { uint32_t Name::hash_field() {
return READ_UINT32_FIELD(this, kHashFieldOffset); return READ_UINT32_FIELD(this, kHashFieldOffset);
} }
......
...@@ -4448,8 +4448,11 @@ class ByteArray: public FixedArrayBase { ...@@ -4448,8 +4448,11 @@ class ByteArray: public FixedArrayBase {
}; };
// FreeSpace represents fixed sized areas of the heap that are not currently in // FreeSpace are fixed-size free memory blocks used by the heap and GC.
// use. Used by the heap and GC. // They look like heap objects (are heap object tagged and have a map) so that
// the heap remains iterable. They have a size and a next pointer.
// The next pointer is the raw address of the next FreeSpace object (or NULL)
// in the free list.
class FreeSpace: public HeapObject { class FreeSpace: public HeapObject {
public: public:
// [size]: size of the free space including the header. // [size]: size of the free space including the header.
...@@ -4461,7 +4464,12 @@ class FreeSpace: public HeapObject { ...@@ -4461,7 +4464,12 @@ class FreeSpace: public HeapObject {
inline int Size() { return size(); } inline int Size() { return size(); }
DECLARE_CAST(FreeSpace) // Accessors for the next field.
inline FreeSpace* next();
inline FreeSpace** next_address();
inline void set_next(FreeSpace* next);
inline static FreeSpace* cast(HeapObject* obj);
// Dispatched behavior. // Dispatched behavior.
DECLARE_PRINTER(FreeSpace) DECLARE_PRINTER(FreeSpace)
...@@ -4470,9 +4478,7 @@ class FreeSpace: public HeapObject { ...@@ -4470,9 +4478,7 @@ class FreeSpace: public HeapObject {
// Layout description. // Layout description.
// Size is smi tagged when it is stored. // Size is smi tagged when it is stored.
static const int kSizeOffset = HeapObject::kHeaderSize; static const int kSizeOffset = HeapObject::kHeaderSize;
static const int kHeaderSize = kSizeOffset + kPointerSize; static const int kNextOffset = POINTER_SIZE_ALIGN(kSizeOffset + kPointerSize);
static const int kAlignedSize = OBJECT_POINTER_ALIGN(kHeaderSize);
private: private:
DISALLOW_IMPLICIT_CONSTRUCTORS(FreeSpace); DISALLOW_IMPLICIT_CONSTRUCTORS(FreeSpace);
......
...@@ -664,7 +664,7 @@ void Deserializer::Deserialize(Isolate* isolate) { ...@@ -664,7 +664,7 @@ void Deserializer::Deserialize(Isolate* isolate) {
DCHECK(isolate_->handle_scope_implementer()->blocks()->is_empty()); DCHECK(isolate_->handle_scope_implementer()->blocks()->is_empty());
isolate_->heap()->IterateSmiRoots(this); isolate_->heap()->IterateSmiRoots(this);
isolate_->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG); isolate_->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG);
isolate_->heap()->RepairFreeListsAfterBoot(); isolate_->heap()->RepairFreeListsAfterDeserialization();
isolate_->heap()->IterateWeakRoots(this, VISIT_ALL); isolate_->heap()->IterateWeakRoots(this, VISIT_ALL);
isolate_->heap()->set_native_contexts_list( isolate_->heap()->set_native_contexts_list(
......
...@@ -486,27 +486,39 @@ static inline bool FillUpOnePage(v8::internal::NewSpace* space) { ...@@ -486,27 +486,39 @@ static inline bool FillUpOnePage(v8::internal::NewSpace* space) {
v8::internal::AllocationResult allocation = v8::internal::AllocationResult allocation =
space->AllocateRaw(v8::internal::Page::kMaxRegularHeapObjectSize); space->AllocateRaw(v8::internal::Page::kMaxRegularHeapObjectSize);
if (allocation.IsRetry()) return false; if (allocation.IsRetry()) return false;
v8::internal::FreeListNode* node = v8::internal::HeapObject* free_space = NULL;
v8::internal::FreeListNode::cast(allocation.ToObjectChecked()); CHECK(allocation.To(&free_space));
node->set_size(space->heap(), v8::internal::Page::kMaxRegularHeapObjectSize); space->heap()->CreateFillerObjectAt(
free_space->address(), v8::internal::Page::kMaxRegularHeapObjectSize);
return true; return true;
} }
static inline void SimulateFullSpace(v8::internal::NewSpace* space) { // Helper function that simulates a fill new-space in the heap.
int new_linear_size = static_cast<int>(*space->allocation_limit_address() - static inline void AllocateAllButNBytes(v8::internal::NewSpace* space,
int extra_bytes) {
int space_remaining = static_cast<int>(*space->allocation_limit_address() -
*space->allocation_top_address()); *space->allocation_top_address());
if (new_linear_size > 0) { CHECK(space_remaining >= extra_bytes);
// Fill up the current page. int new_linear_size = space_remaining - extra_bytes;
v8::internal::AllocationResult allocation = if (new_linear_size == 0) return;
space->AllocateRaw(new_linear_size); v8::internal::AllocationResult allocation =
v8::internal::FreeListNode* node = space->AllocateRaw(new_linear_size);
v8::internal::FreeListNode::cast(allocation.ToObjectChecked()); v8::internal::HeapObject* free_space = NULL;
node->set_size(space->heap(), new_linear_size); CHECK(allocation.To(&free_space));
space->heap()->CreateFillerObjectAt(free_space->address(), new_linear_size);
}
static inline void FillCurrentPage(v8::internal::NewSpace* space) {
AllocateAllButNBytes(space, 0);
}
static inline void SimulateFullSpace(v8::internal::NewSpace* space) {
FillCurrentPage(space);
while (FillUpOnePage(space)) {
} }
// Fill up all remaining pages.
while (FillUpOnePage(space))
;
} }
......
...@@ -3872,21 +3872,6 @@ TEST(Regress169209) { ...@@ -3872,21 +3872,6 @@ TEST(Regress169209) {
} }
// Helper function that simulates a fill new-space in the heap.
static inline void AllocateAllButNBytes(v8::internal::NewSpace* space,
int extra_bytes) {
int space_remaining = static_cast<int>(
*space->allocation_limit_address() - *space->allocation_top_address());
CHECK(space_remaining >= extra_bytes);
int new_linear_size = space_remaining - extra_bytes;
v8::internal::AllocationResult allocation =
space->AllocateRaw(new_linear_size);
v8::internal::FreeListNode* node =
v8::internal::FreeListNode::cast(allocation.ToObjectChecked());
node->set_size(space->heap(), new_linear_size);
}
TEST(Regress169928) { TEST(Regress169928) {
i::FLAG_allow_natives_syntax = true; i::FLAG_allow_natives_syntax = true;
i::FLAG_crankshaft = false; i::FLAG_crankshaft = false;
......
...@@ -156,6 +156,23 @@ static void Serialize(v8::Isolate* isolate) { ...@@ -156,6 +156,23 @@ static void Serialize(v8::Isolate* isolate) {
} }
Vector<const uint8_t> ConstructSource(Vector<const uint8_t> head,
Vector<const uint8_t> body,
Vector<const uint8_t> tail, int repeats) {
int source_length = head.length() + body.length() * repeats + tail.length();
uint8_t* source = NewArray<uint8_t>(static_cast<size_t>(source_length));
CopyChars(source, head.start(), head.length());
for (int i = 0; i < repeats; i++) {
CopyChars(source + head.length() + i * body.length(), body.start(),
body.length());
}
CopyChars(source + head.length() + repeats * body.length(), tail.start(),
tail.length());
return Vector<const uint8_t>(const_cast<const uint8_t*>(source),
source_length);
}
// Test that the whole heap can be serialized. // Test that the whole heap can be serialized.
UNINITIALIZED_TEST(Serialize) { UNINITIALIZED_TEST(Serialize) {
if (!Snapshot::HaveASnapshotToStartFrom()) { if (!Snapshot::HaveASnapshotToStartFrom()) {
...@@ -546,7 +563,6 @@ UNINITIALIZED_TEST(CustomContextSerialization) { ...@@ -546,7 +563,6 @@ UNINITIALIZED_TEST(CustomContextSerialization) {
params.enable_serializer = true; params.enable_serializer = true;
v8::Isolate* v8_isolate = v8::Isolate::New(params); v8::Isolate* v8_isolate = v8::Isolate::New(params);
Isolate* isolate = reinterpret_cast<Isolate*>(v8_isolate); Isolate* isolate = reinterpret_cast<Isolate*>(v8_isolate);
Heap* heap = isolate->heap();
{ {
v8::Isolate::Scope isolate_scope(v8_isolate); v8::Isolate::Scope isolate_scope(v8_isolate);
...@@ -569,6 +585,16 @@ UNINITIALIZED_TEST(CustomContextSerialization) { ...@@ -569,6 +585,16 @@ UNINITIALIZED_TEST(CustomContextSerialization) {
"var r = Math.random() + Math.cos(0);" "var r = Math.random() + Math.cos(0);"
"var f = (function(a, b) { return a + b; }).bind(1, 2, 3);" "var f = (function(a, b) { return a + b; }).bind(1, 2, 3);"
"var s = parseInt('12345');"); "var s = parseInt('12345');");
Vector<const uint8_t> source = ConstructSource(
STATIC_CHAR_VECTOR("function g() { return [,"),
STATIC_CHAR_VECTOR("1,"),
STATIC_CHAR_VECTOR("];} a = g(); b = g(); b.push(1);"), 100000);
v8::Handle<v8::String> source_str = v8::String::NewFromOneByte(
v8_isolate, source.start(), v8::String::kNormalString,
source.length());
CompileRun(source_str);
source.Dispose();
} }
// Make sure all builtin scripts are cached. // Make sure all builtin scripts are cached.
{ {
...@@ -579,7 +605,7 @@ UNINITIALIZED_TEST(CustomContextSerialization) { ...@@ -579,7 +605,7 @@ UNINITIALIZED_TEST(CustomContextSerialization) {
} }
// If we don't do this then we end up with a stray root pointing at the // If we don't do this then we end up with a stray root pointing at the
// context even after we have disposed of env. // context even after we have disposed of env.
heap->CollectAllGarbage(Heap::kNoGCFlags); isolate->heap()->CollectAllAvailableGarbage("snapshotting");
int file_name_length = StrLength(FLAG_testing_serialization_file) + 10; int file_name_length = StrLength(FLAG_testing_serialization_file) + 10;
Vector<char> startup_name = Vector<char>::New(file_name_length + 1); Vector<char> startup_name = Vector<char>::New(file_name_length + 1);
...@@ -667,6 +693,10 @@ UNINITIALIZED_DEPENDENT_TEST(CustomContextDeserialization, ...@@ -667,6 +693,10 @@ UNINITIALIZED_DEPENDENT_TEST(CustomContextDeserialization,
CHECK_EQ(5, f); CHECK_EQ(5, f);
v8::Handle<v8::String> s = CompileRun("s")->ToString(v8_isolate); v8::Handle<v8::String> s = CompileRun("s")->ToString(v8_isolate);
CHECK(s->Equals(v8_str("12345"))); CHECK(s->Equals(v8_str("12345")));
int a = CompileRun("a.length")->ToNumber(v8_isolate)->Int32Value();
CHECK_EQ(100001, a);
int b = CompileRun("b.length")->ToNumber(v8_isolate)->Int32Value();
CHECK_EQ(100002, b);
} }
} }
v8_isolate->Dispose(); v8_isolate->Dispose();
...@@ -819,23 +849,6 @@ TEST(SerializeToplevelInternalizedString) { ...@@ -819,23 +849,6 @@ TEST(SerializeToplevelInternalizedString) {
} }
Vector<const uint8_t> ConstructSource(Vector<const uint8_t> head,
Vector<const uint8_t> body,
Vector<const uint8_t> tail, int repeats) {
int source_length = head.length() + body.length() * repeats + tail.length();
uint8_t* source = NewArray<uint8_t>(static_cast<size_t>(source_length));
CopyChars(source, head.start(), head.length());
for (int i = 0; i < repeats; i++) {
CopyChars(source + head.length() + i * body.length(), body.start(),
body.length());
}
CopyChars(source + head.length() + repeats * body.length(), tail.start(),
tail.length());
return Vector<const uint8_t>(const_cast<const uint8_t*>(source),
source_length);
}
TEST(SerializeToplevelLargeCodeObject) { TEST(SerializeToplevelLargeCodeObject) {
FLAG_serialize_toplevel = true; FLAG_serialize_toplevel = true;
LocalContext context; LocalContext context;
......
...@@ -459,18 +459,6 @@ TEST(SizeOfFirstPageIsLargeEnough) { ...@@ -459,18 +459,6 @@ TEST(SizeOfFirstPageIsLargeEnough) {
} }
static inline void FillCurrentPage(v8::internal::NewSpace* space) {
int new_linear_size = static_cast<int>(*space->allocation_limit_address() -
*space->allocation_top_address());
if (new_linear_size == 0) return;
v8::internal::AllocationResult allocation =
space->AllocateRaw(new_linear_size);
v8::internal::FreeListNode* node =
v8::internal::FreeListNode::cast(allocation.ToObjectChecked());
node->set_size(space->heap(), new_linear_size);
}
UNINITIALIZED_TEST(NewSpaceGrowsToTargetCapacity) { UNINITIALIZED_TEST(NewSpaceGrowsToTargetCapacity) {
FLAG_target_semi_space_size = 2; FLAG_target_semi_space_size = 2;
if (FLAG_optimize_for_size) return; if (FLAG_optimize_for_size) return;
...@@ -502,9 +490,9 @@ UNINITIALIZED_TEST(NewSpaceGrowsToTargetCapacity) { ...@@ -502,9 +490,9 @@ UNINITIALIZED_TEST(NewSpaceGrowsToTargetCapacity) {
// Turn the allocation into a proper object so isolate teardown won't // Turn the allocation into a proper object so isolate teardown won't
// crash. // crash.
v8::internal::FreeListNode* node = HeapObject* free_space = NULL;
v8::internal::FreeListNode::cast(allocation.ToObjectChecked()); CHECK(allocation.To(&free_space));
node->set_size(new_space->heap(), 80); new_space->heap()->CreateFillerObjectAt(free_space->address(), 80);
} }
} }
isolate->Dispose(); isolate->Dispose();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment