Commit fdb0784d authored by mlippautz's avatar mlippautz Committed by Commit bot

[heap] Remove LocalStoreBuffer and add slots in parallel

Now that we have page-local remembered sets (due to refilling in page
granularity) we can perform all updates on the sets during compaction in
parallel without caching slots locally.

BUG=chromium:524425
LOG=N
NOTRY=true

Review URL: https://codereview.chromium.org/1811573002

Cr-Commit-Position: refs/heads/master@{#34952}
parent 7ec8ecce
This diff is collapsed.
......@@ -25,7 +25,6 @@ class CodeFlusher;
class MarkCompactCollector;
class MarkingVisitor;
class RootMarkingVisitor;
class LocalSlotsBuffer;
class Marking : public AllStatic {
public:
......@@ -487,9 +486,7 @@ class MarkCompactCollector {
void UpdateSlotsRecordedIn(SlotsBuffer* buffer);
void MigrateObject(HeapObject* dst, HeapObject* src, int size,
AllocationSpace to_old_space,
LocalSlotsBuffer* old_to_old_slots,
LocalSlotsBuffer* old_to_new_slots);
AllocationSpace to_old_space);
void InvalidateCode(Code* code);
......@@ -811,11 +808,6 @@ class MarkCompactCollector {
// swept in parallel.
void ParallelSweepSpacesComplete();
// Updates store buffer and slot buffer for a pointer in a migrating object.
void RecordMigratedSlot(Object* value, Address slot,
LocalSlotsBuffer* old_to_old_slots,
LocalSlotsBuffer* old_to_new_slots);
#ifdef DEBUG
friend class MarkObjectVisitor;
static void VisitObject(HeapObject* obj);
......
......@@ -239,86 +239,6 @@ class RememberedSet {
static bool IsValidSlot(Heap* heap, MemoryChunk* chunk, Object** slot);
};
// Buffer for keeping thead local migration slots during compaction.
// TODO(ulan): Remove this once every thread gets local pages in compaction
// space.
class LocalSlotsBuffer BASE_EMBEDDED {
public:
LocalSlotsBuffer() : top_(new Node(nullptr)) {}
~LocalSlotsBuffer() {
Node* current = top_;
while (current != nullptr) {
Node* tmp = current->next;
delete current;
current = tmp;
}
}
void Record(Address addr) {
EnsureSpaceFor(1);
uintptr_t entry = reinterpret_cast<uintptr_t>(addr);
DCHECK_GE(entry, static_cast<uintptr_t>(NUMBER_OF_SLOT_TYPES));
Insert(entry);
}
void Record(SlotType type, Address addr) {
EnsureSpaceFor(2);
Insert(static_cast<uintptr_t>(type));
uintptr_t entry = reinterpret_cast<uintptr_t>(addr);
DCHECK_GE(entry, static_cast<uintptr_t>(NUMBER_OF_SLOT_TYPES));
Insert(entry);
}
template <typename UntypedCallback, typename TypedCallback>
void Iterate(UntypedCallback untyped_callback, TypedCallback typed_callback) {
Node* current = top_;
bool typed = false;
SlotType type;
Address addr;
while (current != nullptr) {
for (int i = 0; i < current->count; i++) {
uintptr_t entry = current->buffer[i];
if (entry < NUMBER_OF_SLOT_TYPES) {
DCHECK(!typed);
typed = true;
type = static_cast<SlotType>(entry);
} else {
addr = reinterpret_cast<Address>(entry);
if (typed) {
typed_callback(type, addr);
typed = false;
} else {
untyped_callback(addr);
}
}
}
current = current->next;
}
}
private:
void EnsureSpaceFor(int count) {
if (top_->remaining_free_slots() < count) top_ = new Node(top_);
}
void Insert(uintptr_t entry) { top_->buffer[top_->count++] = entry; }
static const int kBufferSize = 16 * KB;
struct Node : Malloced {
explicit Node(Node* next_node) : next(next_node), count(0) {}
inline int remaining_free_slots() { return kBufferSize - count; }
Node* next;
uintptr_t buffer[kBufferSize];
int count;
};
Node* top_;
};
} // namespace internal
} // namespace v8
......
// Copyright 2016 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <limits>
#include <set>
#include "src/globals.h"
#include "src/heap/remembered-set.h"
#include "src/heap/spaces.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace v8 {
namespace internal {
TEST(LocalSlotsBuffer, InsertAndIterate) {
LocalSlotsBuffer buffer;
std::set<Address> untyped;
std::set<std::pair<SlotType, Address> > typed;
for (int k = 1000; k < 10000; k += NUMBER_OF_SLOT_TYPES) {
untyped.insert(reinterpret_cast<Address>(k));
buffer.Record(reinterpret_cast<Address>(k));
for (int i = 0; i < NUMBER_OF_SLOT_TYPES; i++) {
typed.insert(std::make_pair(static_cast<SlotType>(i),
reinterpret_cast<Address>(k + i)));
buffer.Record(static_cast<SlotType>(i), reinterpret_cast<Address>(k + i));
}
}
buffer.Iterate(
[&untyped](Address addr) {
EXPECT_NE(untyped.count(addr), 0);
untyped.erase(addr);
},
[&typed](SlotType type, Address addr) {
EXPECT_NE(typed.count(std::make_pair(type, addr)), 0);
typed.erase(std::make_pair(type, addr));
});
EXPECT_EQ(untyped.size(), 0);
EXPECT_EQ(typed.size(), 0);
}
} // namespace internal
} // namespace v8
......@@ -112,7 +112,6 @@
'heap/heap-unittest.cc',
'heap/scavenge-job-unittest.cc',
'heap/slot-set-unittest.cc',
'heap/remembered-set-unittest.cc',
'locked-queue-unittest.cc',
'run-all-unittests.cc',
'test-utils.h',
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment