Commit 2a49c903 authored by Piotr Bialecki's avatar Piotr Bialecki Committed by Commit Bot

Revert "[heap] Add concurrent typed slot recording"

This reverts commit 9eb090d2.

Reason for revert: breaks trybot android-pie-arm64-dbg, repro steps: build cctest with target_cpu="arm64" in the args.

See thread: 
https://chromium.slack.com/archives/CGJ5WKRUH/p1598563610118900

Original change's description:
> [heap] Add concurrent typed slot recording
> 
> Since the typed slot set is not thread-safe, each concurrent marking
> barrier collects typed slots locally and publishes them to the main
> typed slot set in safepoints.
> Bug: v8:10315
> 
> Change-Id: If1f5c5df786df88aac7bc27088afe91a4173c826
> Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2370302
> Reviewed-by: Dominik Inführ <dinfuehr@chromium.org>
> Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#69576}

TBR=ulan@chromium.org,dinfuehr@chromium.org

# Not skipping CQ checks because original CL landed > 1 day ago.

Bug: v8:10315
Change-Id: Iade0443e5eccef06e3ea77913e18fd1f563995f5
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2380613
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Cr-Commit-Position: refs/heads/master@{#69597}
parent 9aa222da
...@@ -45,21 +45,17 @@ void MarkingBarrier::Write(HeapObject host, HeapObjectSlot slot, ...@@ -45,21 +45,17 @@ void MarkingBarrier::Write(HeapObject host, HeapObjectSlot slot,
} }
void MarkingBarrier::Write(Code host, RelocInfo* reloc_info, HeapObject value) { void MarkingBarrier::Write(Code host, RelocInfo* reloc_info, HeapObject value) {
DCHECK(is_main_thread_barrier_);
if (MarkValue(host, value)) { if (MarkValue(host, value)) {
if (is_compacting_) { if (is_compacting_) {
if (is_main_thread_barrier_) {
// An optimization to avoid allocating additional typed slots for the
// main thread.
collector_->RecordRelocSlot(host, reloc_info, value); collector_->RecordRelocSlot(host, reloc_info, value);
} else {
RecordRelocSlot(host, reloc_info, value);
}
} }
} }
} }
void MarkingBarrier::Write(JSArrayBuffer host, void MarkingBarrier::Write(JSArrayBuffer host,
ArrayBufferExtension* extension) { ArrayBufferExtension* extension) {
DCHECK(is_main_thread_barrier_);
if (!V8_CONCURRENT_MARKING_BOOL && marking_state_.IsBlack(host)) { if (!V8_CONCURRENT_MARKING_BOOL && marking_state_.IsBlack(host)) {
// The extension will be marked when the marker visits the host object. // The extension will be marked when the marker visits the host object.
return; return;
...@@ -78,19 +74,6 @@ void MarkingBarrier::Write(Map host, DescriptorArray descriptor_array, ...@@ -78,19 +74,6 @@ void MarkingBarrier::Write(Map host, DescriptorArray descriptor_array,
} }
} }
void MarkingBarrier::RecordRelocSlot(Code host, RelocInfo* rinfo,
HeapObject target) {
MarkCompactCollector::RecordRelocSlotInfo info =
MarkCompactCollector::PrepareRecordRelocSlot(host, rinfo, target);
if (info.should_record) {
auto& typed_slots = typed_slots_map_[info.memory_chunk];
if (!typed_slots) {
typed_slots.reset(new TypedSlots());
}
typed_slots->Insert(info.slot_type, info.offset);
}
}
// static // static
void MarkingBarrier::ActivateAll(Heap* heap, bool is_compacting) { void MarkingBarrier::ActivateAll(Heap* heap, bool is_compacting) {
heap->marking_barrier()->Activate(is_compacting); heap->marking_barrier()->Activate(is_compacting);
...@@ -126,13 +109,6 @@ void MarkingBarrier::Publish() { ...@@ -126,13 +109,6 @@ void MarkingBarrier::Publish() {
DCHECK_IMPLIES(!is_main_thread_barrier_, FLAG_local_heaps); DCHECK_IMPLIES(!is_main_thread_barrier_, FLAG_local_heaps);
if (is_activated_) { if (is_activated_) {
worklist_.Publish(); worklist_.Publish();
for (auto& it : typed_slots_map_) {
MemoryChunk* memory_chunk = it.first;
std::unique_ptr<TypedSlots>& typed_slots = it.second;
RememberedSet<OLD_TO_OLD>::MergeTyped(memory_chunk,
std::move(typed_slots));
}
typed_slots_map_.clear();
} }
} }
...@@ -170,7 +146,6 @@ void MarkingBarrier::Deactivate() { ...@@ -170,7 +146,6 @@ void MarkingBarrier::Deactivate() {
p->SetOldGenerationPageFlags(false); p->SetOldGenerationPageFlags(false);
} }
} }
DCHECK(typed_slots_map_.empty());
DCHECK(worklist_.IsLocalEmpty()); DCHECK(worklist_.IsLocalEmpty());
} }
......
...@@ -45,8 +45,6 @@ class MarkingBarrier { ...@@ -45,8 +45,6 @@ class MarkingBarrier {
inline bool WhiteToGreyAndPush(HeapObject value); inline bool WhiteToGreyAndPush(HeapObject value);
void RecordRelocSlot(Code host, RelocInfo* rinfo, HeapObject target);
void ActivateSpace(PagedSpace*); void ActivateSpace(PagedSpace*);
void ActivateSpace(NewSpace*); void ActivateSpace(NewSpace*);
...@@ -58,9 +56,6 @@ class MarkingBarrier { ...@@ -58,9 +56,6 @@ class MarkingBarrier {
IncrementalMarking* incremental_marking_; IncrementalMarking* incremental_marking_;
MarkingWorklist::Local worklist_; MarkingWorklist::Local worklist_;
MarkingState marking_state_; MarkingState marking_state_;
std::unordered_map<MemoryChunk*, std::unique_ptr<TypedSlots>,
MemoryChunk::Hasher>
typed_slots_map_;
bool is_compacting_ = false; bool is_compacting_ = false;
bool is_activated_ = false; bool is_activated_ = false;
bool is_main_thread_barrier_; bool is_main_thread_barrier_;
......
...@@ -129,9 +129,8 @@ i::ReadOnlyHeap* CcTest::read_only_heap() { ...@@ -129,9 +129,8 @@ i::ReadOnlyHeap* CcTest::read_only_heap() {
return i_isolate()->read_only_heap(); return i_isolate()->read_only_heap();
} }
void CcTest::CollectGarbage(i::AllocationSpace space, i::Isolate* isolate) { void CcTest::CollectGarbage(i::AllocationSpace space) {
i::Isolate* iso = isolate ? isolate : i_isolate(); heap()->CollectGarbage(space, i::GarbageCollectionReason::kTesting);
iso->heap()->CollectGarbage(space, i::GarbageCollectionReason::kTesting);
} }
void CcTest::CollectAllGarbage(i::Isolate* isolate) { void CcTest::CollectAllGarbage(i::Isolate* isolate) {
......
...@@ -137,8 +137,7 @@ class CcTest { ...@@ -137,8 +137,7 @@ class CcTest {
static i::Heap* heap(); static i::Heap* heap();
static i::ReadOnlyHeap* read_only_heap(); static i::ReadOnlyHeap* read_only_heap();
static void CollectGarbage(i::AllocationSpace space, static void CollectGarbage(i::AllocationSpace space);
i::Isolate* isolate = nullptr);
static void CollectAllGarbage(i::Isolate* isolate = nullptr); static void CollectAllGarbage(i::Isolate* isolate = nullptr);
static void CollectAllAvailableGarbage(i::Isolate* isolate = nullptr); static void CollectAllAvailableGarbage(i::Isolate* isolate = nullptr);
static void PreciseCollectAllGarbage(i::Isolate* isolate = nullptr); static void PreciseCollectAllGarbage(i::Isolate* isolate = nullptr);
......
...@@ -18,11 +18,9 @@ namespace v8 { ...@@ -18,11 +18,9 @@ namespace v8 {
namespace internal { namespace internal {
namespace heap { namespace heap {
void InvokeScavenge(Isolate* isolate) { void InvokeScavenge() { CcTest::CollectGarbage(i::NEW_SPACE); }
CcTest::CollectGarbage(i::NEW_SPACE, isolate);
}
void InvokeMarkSweep(Isolate* isolate) { CcTest::CollectAllGarbage(isolate); } void InvokeMarkSweep() { CcTest::CollectAllGarbage(); }
void SealCurrentObjects(Heap* heap) { void SealCurrentObjects(Heap* heap) {
CcTest::CollectAllGarbage(); CcTest::CollectAllGarbage();
......
...@@ -66,9 +66,9 @@ void GcAndSweep(Heap* heap, AllocationSpace space); ...@@ -66,9 +66,9 @@ void GcAndSweep(Heap* heap, AllocationSpace space);
void ForceEvacuationCandidate(Page* page); void ForceEvacuationCandidate(Page* page);
void InvokeScavenge(Isolate* isolate = nullptr); void InvokeScavenge();
void InvokeMarkSweep(Isolate* isolate = nullptr); void InvokeMarkSweep();
template <typename GlobalOrPersistent> template <typename GlobalOrPersistent>
bool InYoungGeneration(v8::Isolate* isolate, const GlobalOrPersistent& global) { bool InYoungGeneration(v8::Isolate* isolate, const GlobalOrPersistent& global) {
......
...@@ -8,10 +8,6 @@ ...@@ -8,10 +8,6 @@
#include "src/base/platform/condition-variable.h" #include "src/base/platform/condition-variable.h"
#include "src/base/platform/mutex.h" #include "src/base/platform/mutex.h"
#include "src/base/platform/semaphore.h" #include "src/base/platform/semaphore.h"
#include "src/codegen/assembler-inl.h"
#include "src/codegen/assembler.h"
#include "src/codegen/macro-assembler-inl.h"
#include "src/codegen/macro-assembler.h"
#include "src/common/globals.h" #include "src/common/globals.h"
#include "src/handles/handles-inl.h" #include "src/handles/handles-inl.h"
#include "src/handles/local-handles-inl.h" #include "src/handles/local-handles-inl.h"
...@@ -290,8 +286,7 @@ UNINITIALIZED_TEST(ConcurrentWriteBarrier) { ...@@ -290,8 +286,7 @@ UNINITIALIZED_TEST(ConcurrentWriteBarrier) {
HandleScope handle_scope(i_isolate); HandleScope handle_scope(i_isolate);
Handle<FixedArray> fixed_array_handle( Handle<FixedArray> fixed_array_handle(
i_isolate->factory()->NewFixedArray(1)); i_isolate->factory()->NewFixedArray(1));
Handle<HeapNumber> value_handle( Handle<HeapNumber> value_handle(i_isolate->factory()->NewHeapNumber(1.1));
i_isolate->factory()->NewHeapNumber<AllocationType::kOld>(1.1));
fixed_array = *fixed_array_handle; fixed_array = *fixed_array_handle;
value = *value_handle; value = *value_handle;
} }
...@@ -306,80 +301,6 @@ UNINITIALIZED_TEST(ConcurrentWriteBarrier) { ...@@ -306,80 +301,6 @@ UNINITIALIZED_TEST(ConcurrentWriteBarrier) {
thread->Join(); thread->Join();
CHECK(heap->incremental_marking()->marking_state()->IsBlackOrGrey(value)); CHECK(heap->incremental_marking()->marking_state()->IsBlackOrGrey(value));
heap::InvokeMarkSweep(i_isolate);
isolate->Dispose();
}
class ConcurrentRecordRelocSlotThread final : public v8::base::Thread {
public:
explicit ConcurrentRecordRelocSlotThread(Heap* heap, Code code,
HeapObject value)
: v8::base::Thread(base::Thread::Options("ThreadWithLocalHeap")),
heap_(heap),
code_(code),
value_(value) {}
void Run() override {
LocalHeap local_heap(heap_);
int mode_mask = RelocInfo::EmbeddedObjectModeMask();
for (RelocIterator it(code_, mode_mask); !it.done(); it.next()) {
DCHECK(RelocInfo::IsEmbeddedObjectMode(it.rinfo()->rmode()));
it.rinfo()->set_target_object(heap_, value_);
}
}
Heap* heap_;
Code code_;
HeapObject value_;
};
UNINITIALIZED_TEST(ConcurrentRecordRelocSlot) {
FLAG_manual_evacuation_candidates_selection = true;
ManualGCScope manual_gc_scope;
FLAG_concurrent_allocation = true;
FLAG_local_heaps = true;
v8::Isolate::CreateParams create_params;
create_params.array_buffer_allocator = CcTest::array_buffer_allocator();
v8::Isolate* isolate = v8::Isolate::New(create_params);
Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate);
Heap* heap = i_isolate->heap();
Code code;
HeapObject value;
{
HandleScope handle_scope(i_isolate);
i::byte buffer[i::Assembler::kDefaultBufferSize];
MacroAssembler masm(i_isolate, v8::internal::CodeObjectRequired::kYes,
ExternalAssemblerBuffer(buffer, sizeof(buffer)));
masm.Push(ReadOnlyRoots(heap).undefined_value_handle());
CodeDesc desc;
masm.GetCode(i_isolate, &desc);
Handle<Code> code_handle =
Factory::CodeBuilder(i_isolate, desc, CodeKind::STUB).Build();
heap::AbandonCurrentlyFreeMemory(heap->old_space());
Handle<HeapNumber> value_handle(
i_isolate->factory()->NewHeapNumber<AllocationType::kOld>(1.1));
heap::ForceEvacuationCandidate(Page::FromHeapObject(*value_handle));
code = *code_handle;
value = *value_handle;
}
heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
i::GarbageCollectionReason::kTesting);
CHECK(heap->incremental_marking()->marking_state()->IsWhite(value));
{
CodeSpaceMemoryModificationScope modification_scope(heap);
auto thread =
std::make_unique<ConcurrentRecordRelocSlotThread>(heap, code, value);
CHECK(thread->Start());
thread->Join();
}
CHECK(heap->incremental_marking()->marking_state()->IsBlackOrGrey(value));
heap::InvokeMarkSweep(i_isolate);
isolate->Dispose(); isolate->Dispose();
} }
......
...@@ -455,7 +455,7 @@ TEST(TracedGlobalToUnmodifiedJSObjectDiesOnMarkSweep) { ...@@ -455,7 +455,7 @@ TEST(TracedGlobalToUnmodifiedJSObjectDiesOnMarkSweep) {
CcTest::InitializeVM(); CcTest::InitializeVM();
TracedGlobalTest( TracedGlobalTest(
CcTest::isolate(), ConstructJSObject, CcTest::isolate(), ConstructJSObject,
[](const TracedGlobal<v8::Object>& global) {}, [] { InvokeMarkSweep(); }, [](const TracedGlobal<v8::Object>& global) {}, InvokeMarkSweep,
SurvivalMode::kDies); SurvivalMode::kDies);
} }
...@@ -469,7 +469,7 @@ TEST(TracedGlobalToUnmodifiedJSObjectSurvivesMarkSweepWhenHeldAliveOtherwise) { ...@@ -469,7 +469,7 @@ TEST(TracedGlobalToUnmodifiedJSObjectSurvivesMarkSweepWhenHeldAliveOtherwise) {
v8::HandleScope scope(isolate); v8::HandleScope scope(isolate);
strong_global = v8::Global<v8::Object>(isolate, global.Get(isolate)); strong_global = v8::Global<v8::Object>(isolate, global.Get(isolate));
}, },
[]() { InvokeMarkSweep(); }, SurvivalMode::kSurvives); InvokeMarkSweep, SurvivalMode::kSurvives);
} }
TEST(TracedGlobalToUnmodifiedJSObjectSurvivesScavenge) { TEST(TracedGlobalToUnmodifiedJSObjectSurvivesScavenge) {
...@@ -478,7 +478,7 @@ TEST(TracedGlobalToUnmodifiedJSObjectSurvivesScavenge) { ...@@ -478,7 +478,7 @@ TEST(TracedGlobalToUnmodifiedJSObjectSurvivesScavenge) {
CcTest::InitializeVM(); CcTest::InitializeVM();
TracedGlobalTest( TracedGlobalTest(
CcTest::isolate(), ConstructJSObject, CcTest::isolate(), ConstructJSObject,
[](const TracedGlobal<v8::Object>& global) {}, []() { InvokeScavenge(); }, [](const TracedGlobal<v8::Object>& global) {}, InvokeScavenge,
SurvivalMode::kSurvives); SurvivalMode::kSurvives);
} }
...@@ -492,7 +492,7 @@ TEST(TracedGlobalToUnmodifiedJSObjectSurvivesScavengeWhenExcludedFromRoots) { ...@@ -492,7 +492,7 @@ TEST(TracedGlobalToUnmodifiedJSObjectSurvivesScavengeWhenExcludedFromRoots) {
tracer.ConsiderTracedGlobalAsRoot(false); tracer.ConsiderTracedGlobalAsRoot(false);
TracedGlobalTest( TracedGlobalTest(
CcTest::isolate(), ConstructJSObject, CcTest::isolate(), ConstructJSObject,
[](const TracedGlobal<v8::Object>& global) {}, []() { InvokeScavenge(); }, [](const TracedGlobal<v8::Object>& global) {}, InvokeScavenge,
SurvivalMode::kSurvives); SurvivalMode::kSurvives);
} }
...@@ -506,7 +506,7 @@ TEST(TracedGlobalToUnmodifiedJSApiObjectSurvivesScavengePerDefault) { ...@@ -506,7 +506,7 @@ TEST(TracedGlobalToUnmodifiedJSApiObjectSurvivesScavengePerDefault) {
tracer.ConsiderTracedGlobalAsRoot(true); tracer.ConsiderTracedGlobalAsRoot(true);
TracedGlobalTest( TracedGlobalTest(
CcTest::isolate(), ConstructJSApiObject<TracedGlobal<v8::Object>>, CcTest::isolate(), ConstructJSApiObject<TracedGlobal<v8::Object>>,
[](const TracedGlobal<v8::Object>& global) {}, []() { InvokeScavenge(); }, [](const TracedGlobal<v8::Object>& global) {}, InvokeScavenge,
SurvivalMode::kSurvives); SurvivalMode::kSurvives);
} }
...@@ -520,7 +520,7 @@ TEST(TracedGlobalToUnmodifiedJSApiObjectDiesOnScavengeWhenExcludedFromRoots) { ...@@ -520,7 +520,7 @@ TEST(TracedGlobalToUnmodifiedJSApiObjectDiesOnScavengeWhenExcludedFromRoots) {
tracer.ConsiderTracedGlobalAsRoot(false); tracer.ConsiderTracedGlobalAsRoot(false);
TracedGlobalTest( TracedGlobalTest(
CcTest::isolate(), ConstructJSApiObject<TracedGlobal<v8::Object>>, CcTest::isolate(), ConstructJSApiObject<TracedGlobal<v8::Object>>,
[](const TracedGlobal<v8::Object>& global) {}, []() { InvokeScavenge(); }, [](const TracedGlobal<v8::Object>& global) {}, InvokeScavenge,
SurvivalMode::kDies); SurvivalMode::kDies);
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment