Commit 6ed44953 authored by Ulan Degenbaev's avatar Ulan Degenbaev Committed by Commit Bot

[heap] Consolidate marking write barrier logic in MarkingBarrier

This moves marking write barrier related functions from Heap and
IncrementalMarking into a separate class: MarkingBarrier.

Additionally, a new WriteBarrier class is added at the heap API level
that dispatches to MarkingBarrier.

Future CLs will move slots recording in MarkingBarrier and apply
the same refactoring to the generational barrier. An instance of
MarkingBarrier will be added to each LocalHeap and enable it to
emit a write barrier from a background thread.

Bug: v8:10315
Change-Id: Icc147b48563d88c85d99ead99b1e201f523721d0
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2280083Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarJakob Gruber <jgruber@chromium.org>
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#68703}
parent 7356376f
......@@ -2502,6 +2502,7 @@ v8_source_set("v8_base_without_compiler") {
"src/heap/heap-controller.h",
"src/heap/heap-inl.h",
"src/heap/heap-write-barrier-inl.h",
"src/heap/heap-write-barrier.cc",
"src/heap/heap-write-barrier.h",
"src/heap/heap.cc",
"src/heap/heap.h",
......@@ -2525,6 +2526,8 @@ v8_source_set("v8_base_without_compiler") {
"src/heap/mark-compact-inl.h",
"src/heap/mark-compact.cc",
"src/heap/mark-compact.h",
"src/heap/marking-barrier.cc",
"src/heap/marking-barrier.h",
"src/heap/marking-visitor-inl.h",
"src/heap/marking-visitor.h",
"src/heap/marking-worklist.cc",
......
......@@ -484,15 +484,12 @@ TF_BUILTIN(RecordWrite, RecordWriteCodeStubAssembler) {
BIND(&call_incremental_wb);
{
TNode<ExternalReference> function = ExternalConstant(
ExternalReference::incremental_marking_record_write_function());
TNode<ExternalReference> isolate_constant =
ExternalConstant(ExternalReference::isolate_address(isolate()));
ExternalReference::write_barrier_marking_from_code_function());
TNode<Smi> fp_mode = UncheckedCast<Smi>(Parameter(Descriptor::kFPMode));
TNode<IntPtrT> object =
BitcastTaggedToWord(Parameter(Descriptor::kObject));
CallCFunction3WithCallerSavedRegistersMode<Int32T, IntPtrT, IntPtrT,
ExternalReference>(
function, object, slot, isolate_constant, fp_mode, &exit);
CallCFunction2WithCallerSavedRegistersMode<Int32T, IntPtrT, IntPtrT>(
function, object, slot, fp_mode, &exit);
}
}
......
......@@ -16,7 +16,6 @@
#include "src/numbers/hash-seed-inl.h"
#include "src/objects/elements.h"
#include "src/objects/ordered-hash-table.h"
// For IncrementalMarking::RecordWriteFromCode. TODO(jkummerow): Drop.
#include "src/execution/isolate.h"
#include "src/execution/microtask-queue.h"
#include "src/execution/simulator-base.h"
......@@ -214,8 +213,8 @@ struct IsValidExternalReferenceType<Result (Class::*)(Args...)> {
return ExternalReference(Redirect(FUNCTION_ADDR(Target), Type)); \
}
FUNCTION_REFERENCE(incremental_marking_record_write_function,
IncrementalMarking::RecordWriteFromCode)
FUNCTION_REFERENCE(write_barrier_marking_from_code_function,
WriteBarrier::MarkingFromCode)
FUNCTION_REFERENCE(insert_remembered_set_function,
Heap::InsertIntoRememberedSetFromCode)
......
......@@ -141,8 +141,6 @@ class StatsCounter;
V(ieee754_sinh_function, "base::ieee754::sinh") \
V(ieee754_tan_function, "base::ieee754::tan") \
V(ieee754_tanh_function, "base::ieee754::tanh") \
V(incremental_marking_record_write_function, \
"IncrementalMarking::RecordWrite") \
V(insert_remembered_set_function, "Heap::InsertIntoRememberedSetFromCode") \
V(invalidate_prototype_chains_function, \
"JSObject::InvalidatePrototypeChains()") \
......@@ -219,6 +217,7 @@ class StatsCounter;
V(wasm_memory_init, "wasm::memory_init") \
V(wasm_memory_copy, "wasm::memory_copy") \
V(wasm_memory_fill, "wasm::memory_fill") \
V(write_barrier_marking_from_code_function, "WriteBarrier::MarkingFromCode") \
V(call_enqueue_microtask_function, "MicrotaskQueue::CallEnqueueMicrotask") \
V(call_enter_context_function, "call_enter_context_function") \
V(atomic_pair_load_function, "atomic_pair_load_function") \
......
......@@ -369,7 +369,7 @@ void RelocInfo::set_target_address(Address target,
if (write_barrier_mode == UPDATE_WRITE_BARRIER && !host().is_null() &&
IsCodeTargetMode(rmode_) && !FLAG_disable_write_barriers) {
Code target_code = Code::GetCodeFromTargetAddress(target);
MarkingBarrierForCode(host(), this, target_code);
WriteBarrier::Marking(host(), this, target_code);
}
}
......
......@@ -26,22 +26,11 @@ V8_EXPORT_PRIVATE bool Heap_PageFlagsAreConsistent(HeapObject object);
V8_EXPORT_PRIVATE void Heap_GenerationalBarrierSlow(HeapObject object,
Address slot,
HeapObject value);
V8_EXPORT_PRIVATE void Heap_MarkingBarrierSlow(HeapObject object, Address slot,
HeapObject value);
V8_EXPORT_PRIVATE void Heap_WriteBarrierForCodeSlow(Code host);
V8_EXPORT_PRIVATE void Heap_MarkingBarrierForArrayBufferExtensionSlow(
HeapObject object, ArrayBufferExtension* extension);
V8_EXPORT_PRIVATE void Heap_GenerationalBarrierForCodeSlow(Code host,
RelocInfo* rinfo,
HeapObject object);
V8_EXPORT_PRIVATE void Heap_MarkingBarrierForCodeSlow(Code host,
RelocInfo* rinfo,
HeapObject object);
V8_EXPORT_PRIVATE void Heap_MarkingBarrierForDescriptorArraySlow(
Heap* heap, HeapObject host, HeapObject descriptor_array,
int number_of_own_descriptors);
V8_EXPORT_PRIVATE void Heap_GenerationalEphemeronKeyBarrierSlow(
Heap* heap, EphemeronHashTable table, Address slot);
......@@ -121,17 +110,6 @@ inline void GenerationalEphemeronKeyBarrierInternal(EphemeronHashTable table,
Heap_GenerationalEphemeronKeyBarrierSlow(table_chunk->GetHeap(), table, slot);
}
inline void MarkingBarrierInternal(HeapObject object, Address slot,
HeapObject value) {
DCHECK(Heap_PageFlagsAreConsistent(object));
heap_internals::MemoryChunk* value_chunk =
heap_internals::MemoryChunk::FromHeapObject(value);
if (!value_chunk->IsMarking()) return;
Heap_MarkingBarrierSlow(object, slot, value);
}
} // namespace heap_internals
inline void WriteBarrierForCode(Code host, RelocInfo* rinfo, Object value) {
......@@ -142,21 +120,13 @@ inline void WriteBarrierForCode(Code host, RelocInfo* rinfo, Object value) {
inline void WriteBarrierForCode(Code host, RelocInfo* rinfo, HeapObject value) {
GenerationalBarrierForCode(host, rinfo, value);
MarkingBarrierForCode(host, rinfo, value);
WriteBarrier::Marking(host, rinfo, value);
}
inline void WriteBarrierForCode(Code host) {
Heap_WriteBarrierForCodeSlow(host);
}
inline void MarkingBarrierForArrayBufferExtension(
HeapObject object, ArrayBufferExtension* extension) {
heap_internals::MemoryChunk* object_chunk =
heap_internals::MemoryChunk::FromHeapObject(object);
if (!extension || !object_chunk->IsMarking()) return;
Heap_MarkingBarrierForArrayBufferExtensionSlow(object, extension);
}
inline void GenerationalBarrier(HeapObject object, ObjectSlot slot,
Object value) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
......@@ -201,52 +171,6 @@ inline void GenerationalBarrierForCode(Code host, RelocInfo* rinfo,
Heap_GenerationalBarrierForCodeSlow(host, rinfo, object);
}
inline void MarkingBarrier(HeapObject object, ObjectSlot slot, Object value) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
DCHECK(!HasWeakHeapObjectTag(value));
if (!value.IsHeapObject()) return;
MarkingBarrier(object, slot, HeapObject::cast(value));
}
inline void MarkingBarrier(HeapObject object, ObjectSlot slot,
HeapObject value) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
DCHECK_IMPLIES(slot.address() != kNullAddress, !HasWeakHeapObjectTag(*slot));
heap_internals::MarkingBarrierInternal(object, slot.address(),
HeapObject::cast(value));
}
inline void MarkingBarrier(HeapObject object, MaybeObjectSlot slot,
MaybeObject value) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
HeapObject value_heap_object;
if (!value->GetHeapObject(&value_heap_object)) return;
heap_internals::MarkingBarrierInternal(object, slot.address(),
value_heap_object);
}
inline void MarkingBarrierForCode(Code host, RelocInfo* rinfo,
HeapObject object) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
DCHECK(!HasWeakHeapObjectTag(object));
heap_internals::MemoryChunk* object_chunk =
heap_internals::MemoryChunk::FromHeapObject(object);
if (!object_chunk->IsMarking()) return;
Heap_MarkingBarrierForCodeSlow(host, rinfo, object);
}
inline void MarkingBarrierForDescriptorArray(Heap* heap, HeapObject host,
HeapObject descriptor_array,
int number_of_own_descriptors) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return;
heap_internals::MemoryChunk* chunk =
heap_internals::MemoryChunk::FromHeapObject(descriptor_array);
if (!chunk->IsMarking()) return;
Heap_MarkingBarrierForDescriptorArraySlow(heap, host, descriptor_array,
number_of_own_descriptors);
}
inline WriteBarrierMode GetWriteBarrierModeForObject(
HeapObject object, const DisallowHeapAllocation* promise) {
if (FLAG_disable_write_barriers) return SKIP_WRITE_BARRIER;
......@@ -273,6 +197,55 @@ inline bool IsReadOnlyHeapObject(HeapObject object) {
return chunk->InReadOnlySpace();
}
base::Optional<Heap*> WriteBarrier::GetHeapIfMarking(HeapObject object) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) return {};
heap_internals::MemoryChunk* chunk =
heap_internals::MemoryChunk::FromHeapObject(object);
if (!chunk->IsMarking()) return {};
return chunk->GetHeap();
}
void WriteBarrier::Marking(HeapObject host, ObjectSlot slot, Object value) {
DCHECK(!HasWeakHeapObjectTag(value));
if (!value.IsHeapObject()) return;
Marking(host, HeapObjectSlot(slot), HeapObject::cast(value));
}
void WriteBarrier::Marking(HeapObject host, MaybeObjectSlot slot,
MaybeObject value) {
HeapObject value_heap_object;
if (!value->GetHeapObject(&value_heap_object)) return;
Marking(host, HeapObjectSlot(slot), value_heap_object);
}
void WriteBarrier::Marking(HeapObject host, HeapObjectSlot slot,
HeapObject value) {
auto heap = GetHeapIfMarking(host);
if (!heap) return;
MarkingSlow(*heap, host, slot, value);
}
void WriteBarrier::Marking(Code host, RelocInfo* reloc_info, HeapObject value) {
auto heap = GetHeapIfMarking(host);
if (!heap) return;
MarkingSlow(*heap, host, reloc_info, value);
}
void WriteBarrier::Marking(JSArrayBuffer host,
ArrayBufferExtension* extension) {
if (!extension) return;
auto heap = GetHeapIfMarking(host);
if (!heap) return;
MarkingSlow(*heap, host, extension);
}
void WriteBarrier::Marking(Map host, DescriptorArray descriptor_array,
int number_of_own_descriptors) {
auto heap = GetHeapIfMarking(host);
if (!heap) return;
MarkingSlow(*heap, host, descriptor_array, number_of_own_descriptors);
}
} // namespace internal
} // namespace v8
......
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/heap-write-barrier.h"
#include "src/heap/heap-write-barrier-inl.h"
#include "src/heap/marking-barrier.h"
#include "src/objects/descriptor-array.h"
#include "src/objects/maybe-object.h"
#include "src/objects/slots-inl.h"
#include "src/objects/slots.h"
namespace v8 {
namespace internal {
void WriteBarrier::MarkingSlow(Heap* heap, HeapObject host, HeapObjectSlot slot,
HeapObject value) {
heap->marking_barrier()->Write(host, slot, value);
}
void WriteBarrier::MarkingSlow(Heap* heap, Code host, RelocInfo* reloc_info,
HeapObject value) {
heap->marking_barrier()->Write(host, reloc_info, value);
}
void WriteBarrier::MarkingSlow(Heap* heap, JSArrayBuffer host,
ArrayBufferExtension* extension) {
heap->marking_barrier()->Write(host, extension);
}
void WriteBarrier::MarkingSlow(Heap* heap, Map host,
DescriptorArray descriptor_array,
int number_of_own_descriptors) {
heap->marking_barrier()->Write(host, descriptor_array,
number_of_own_descriptors);
}
int WriteBarrier::MarkingFromCode(Address raw_host, Address raw_slot) {
HeapObject host = HeapObject::cast(Object(raw_host));
MaybeObjectSlot slot(raw_slot);
WriteBarrier::Marking(host, slot, *slot);
// Called by RecordWriteCodeStubAssembler, which doesnt accept void type
return 0;
}
} // namespace internal
} // namespace v8
......@@ -6,6 +6,7 @@
#define V8_HEAP_HEAP_WRITE_BARRIER_H_
#include "include/v8-internal.h"
#include "src/base/optional.h"
#include "src/common/globals.h"
namespace v8 {
......@@ -13,10 +14,14 @@ namespace internal {
class ArrayBufferExtension;
class Code;
class DescriptorArray;
class EphemeronHashTable;
class FixedArray;
class Heap;
class JSArrayBuffer;
class Map;
class MarkCompactCollector;
class RelocInfo;
class EphemeronHashTable;
// Note: In general it is preferred to use the macros defined in
// object-macros.h.
......@@ -35,20 +40,31 @@ void GenerationalEphemeronKeyBarrier(EphemeronHashTable table, ObjectSlot slot,
Object value);
void GenerationalBarrierForCode(Code host, RelocInfo* rinfo, HeapObject object);
// Marking write barrier.
void MarkingBarrier(HeapObject object, ObjectSlot slot, Object value);
void MarkingBarrier(HeapObject object, ObjectSlot slot, HeapObject value);
void MarkingBarrier(HeapObject object, MaybeObjectSlot slot, MaybeObject value);
void MarkingBarrierForCode(Code host, RelocInfo* rinfo, HeapObject object);
void MarkingBarrierForArrayBufferExtension(HeapObject object,
ArrayBufferExtension* extension);
inline bool IsReadOnlyHeapObject(HeapObject object);
void MarkingBarrierForDescriptorArray(Heap* heap, HeapObject host,
HeapObject descriptor_array,
class V8_EXPORT_PRIVATE WriteBarrier {
public:
static inline void Marking(HeapObject host, ObjectSlot, Object value);
static inline void Marking(HeapObject host, HeapObjectSlot, HeapObject value);
static inline void Marking(HeapObject host, MaybeObjectSlot,
MaybeObject value);
static inline void Marking(Code host, RelocInfo*, HeapObject value);
static inline void Marking(JSArrayBuffer host, ArrayBufferExtension*);
static inline void Marking(Map host, DescriptorArray,
int number_of_own_descriptors);
// It is invoked from generated code and has to take raw addresses.
static int MarkingFromCode(Address raw_host, Address raw_slot);
inline bool IsReadOnlyHeapObject(HeapObject object);
private:
static void MarkingSlow(Heap* heap, HeapObject host, HeapObjectSlot,
HeapObject value);
static void MarkingSlow(Heap* heap, Code host, RelocInfo*, HeapObject value);
static void MarkingSlow(Heap* heap, JSArrayBuffer host,
ArrayBufferExtension*);
static void MarkingSlow(Heap* heap, Map host, DescriptorArray,
int number_of_own_descriptors);
static inline base::Optional<Heap*> GetHeapIfMarking(HeapObject object);
};
} // namespace internal
} // namespace v8
......
......@@ -48,6 +48,8 @@
#include "src/heap/local-heap.h"
#include "src/heap/mark-compact-inl.h"
#include "src/heap/mark-compact.h"
#include "src/heap/marking-barrier-inl.h"
#include "src/heap/marking-barrier.h"
#include "src/heap/memory-chunk-inl.h"
#include "src/heap/memory-measurement.h"
#include "src/heap/memory-reducer.h"
......@@ -111,37 +113,15 @@ void Heap_GenerationalBarrierSlow(HeapObject object, Address slot,
Heap::GenerationalBarrierSlow(object, slot, value);
}
void Heap_MarkingBarrierSlow(HeapObject object, Address slot,
HeapObject value) {
Heap::MarkingBarrierSlow(object, slot, value);
}
void Heap_WriteBarrierForCodeSlow(Code host) {
Heap::WriteBarrierForCodeSlow(host);
}
void Heap_MarkingBarrierForArrayBufferExtensionSlow(
HeapObject object, ArrayBufferExtension* extension) {
Heap::MarkingBarrierForArrayBufferExtensionSlow(object, extension);
}
void Heap_GenerationalBarrierForCodeSlow(Code host, RelocInfo* rinfo,
HeapObject object) {
Heap::GenerationalBarrierForCodeSlow(host, rinfo, object);
}
void Heap_MarkingBarrierForCodeSlow(Code host, RelocInfo* rinfo,
HeapObject object) {
Heap::MarkingBarrierForCodeSlow(host, rinfo, object);
}
void Heap_MarkingBarrierForDescriptorArraySlow(Heap* heap, HeapObject host,
HeapObject descriptor_array,
int number_of_own_descriptors) {
Heap::MarkingBarrierForDescriptorArraySlow(heap, host, descriptor_array,
number_of_own_descriptors);
}
void Heap_GenerationalEphemeronKeyBarrierSlow(Heap* heap,
EphemeronHashTable table,
Address slot) {
......@@ -5306,6 +5286,9 @@ void Heap::SetUp() {
concurrent_marking_.reset(new ConcurrentMarking(this, nullptr, nullptr));
}
marking_barrier_.reset(new MarkingBarrier(this, mark_compact_collector(),
incremental_marking()));
for (int i = FIRST_SPACE; i <= LAST_SPACE; i++) {
space_[i] = nullptr;
}
......@@ -6636,20 +6619,10 @@ void Heap::WriteBarrierForCodeSlow(Code code) {
for (RelocIterator it(code, RelocInfo::EmbeddedObjectModeMask()); !it.done();
it.next()) {
GenerationalBarrierForCode(code, it.rinfo(), it.rinfo()->target_object());
MarkingBarrierForCode(code, it.rinfo(), it.rinfo()->target_object());
WriteBarrier::Marking(code, it.rinfo(), it.rinfo()->target_object());
}
}
void Heap::MarkingBarrierForArrayBufferExtensionSlow(
HeapObject object, ArrayBufferExtension* extension) {
if (V8_CONCURRENT_MARKING_BOOL || GetIsolateFromWritableObject(object)
->heap()
->incremental_marking()
->marking_state()
->IsBlack(object))
extension->Mark();
}
void Heap::GenerationalBarrierSlow(HeapObject object, Address slot,
HeapObject value) {
MemoryChunk* chunk = MemoryChunk::FromHeapObject(object);
......@@ -6676,8 +6649,7 @@ void Heap::EphemeronKeyWriteBarrierFromCode(Address raw_object,
if (!ObjectInYoungGeneration(table) && ObjectInYoungGeneration(key)) {
isolate->heap()->RecordEphemeronKeyWrite(table, key_slot_address);
}
isolate->heap()->incremental_marking()->RecordWrite(table, key_slot,
maybe_key);
WriteBarrier::Marking(table, key_slot, maybe_key);
}
enum RangeWriteBarrierMode {
......@@ -6695,7 +6667,7 @@ void Heap::WriteBarrierForRangeImpl(MemoryChunk* source_page, HeapObject object,
STATIC_ASSERT(!(kModeMask & kDoEvacuationSlotRecording) ||
(kModeMask & kDoMarking));
IncrementalMarking* incremental_marking = this->incremental_marking();
MarkingBarrier* marking_barrier = this->marking_barrier();
MarkCompactCollector* collector = this->mark_compact_collector();
for (TSlot slot = start_slot; slot < end_slot; ++slot) {
......@@ -6710,7 +6682,7 @@ void Heap::WriteBarrierForRangeImpl(MemoryChunk* source_page, HeapObject object,
}
if ((kModeMask & kDoMarking) &&
incremental_marking->BaseRecordWrite(object, value_heap_object)) {
marking_barrier->MarkValue(object, value_heap_object)) {
if (kModeMask & kDoEvacuationSlotRecording) {
collector->RecordSlot(source_page, HeapObjectSlot(slot),
value_heap_object);
......@@ -6801,35 +6773,6 @@ void Heap::GenerationalBarrierForCodeSlow(Code host, RelocInfo* rinfo,
static_cast<uint32_t>(offset));
}
void Heap::MarkingBarrierSlow(HeapObject object, Address slot,
HeapObject value) {
Heap* heap = Heap::FromWritableHeapObject(object);
heap->incremental_marking()->RecordWriteSlow(object, HeapObjectSlot(slot),
value);
}
void Heap::MarkingBarrierForCodeSlow(Code host, RelocInfo* rinfo,
HeapObject object) {
Heap* heap = Heap::FromWritableHeapObject(host);
DCHECK(heap->incremental_marking()->IsMarking());
heap->incremental_marking()->RecordWriteIntoCode(host, rinfo, object);
}
void Heap::MarkingBarrierForDescriptorArraySlow(Heap* heap, HeapObject host,
HeapObject raw_descriptor_array,
int number_of_own_descriptors) {
DCHECK(heap->incremental_marking()->IsMarking());
DescriptorArray descriptor_array =
DescriptorArray::cast(raw_descriptor_array);
int16_t raw_marked = descriptor_array.raw_number_of_marked_descriptors();
if (NumberOfMarkedDescriptors::decode(heap->mark_compact_collector()->epoch(),
raw_marked) <
number_of_own_descriptors) {
heap->mark_compact_collector()->MarkDescriptorArrayFromWriteBarrier(
host, descriptor_array, number_of_own_descriptors);
}
}
bool Heap::PageFlagsAreConsistent(HeapObject object) {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) {
return true;
......
......@@ -77,6 +77,7 @@ class Isolate;
class JSFinalizationRegistry;
class LocalEmbedderHeapTracer;
class LocalHeap;
class MarkingBarrier;
class MemoryAllocator;
class MemoryChunk;
class MemoryMeasurement;
......@@ -419,9 +420,6 @@ class Heap {
V8_EXPORT_PRIVATE static void WriteBarrierForCodeSlow(Code host);
V8_EXPORT_PRIVATE static void MarkingBarrierForArrayBufferExtensionSlow(
HeapObject object, ArrayBufferExtension* extension);
V8_EXPORT_PRIVATE static void GenerationalBarrierSlow(HeapObject object,
Address slot,
HeapObject value);
......@@ -431,19 +429,6 @@ class Heap {
Address raw_object, Address address, Isolate* isolate);
V8_EXPORT_PRIVATE static void GenerationalBarrierForCodeSlow(
Code host, RelocInfo* rinfo, HeapObject value);
V8_EXPORT_PRIVATE static void MarkingBarrierSlow(HeapObject object,
Address slot,
HeapObject value);
V8_EXPORT_PRIVATE static void MarkingBarrierForCodeSlow(Code host,
RelocInfo* rinfo,
HeapObject value);
static void MarkingBarrierForArrayBufferExtension(
JSArrayBuffer object, ArrayBufferExtension* extension);
V8_EXPORT_PRIVATE static void MarkingBarrierForDescriptorArraySlow(
Heap* heap, HeapObject host, HeapObject descriptor_array,
int number_of_own_descriptors);
V8_EXPORT_PRIVATE static bool PageFlagsAreConsistent(HeapObject object);
// Notifies the heap that is ok to start marking or other activities that
......@@ -1014,6 +999,8 @@ class Heap {
return incremental_marking_.get();
}
MarkingBarrier* marking_barrier() { return marking_barrier_.get(); }
// ===========================================================================
// Concurrent marking API. ===================================================
// ===========================================================================
......@@ -2180,6 +2167,7 @@ class Heap {
std::unique_ptr<ScavengeJob> scavenge_job_;
std::unique_ptr<AllocationObserver> scavenge_task_observer_;
std::unique_ptr<LocalEmbedderHeapTracer> local_embedder_heap_tracer_;
std::unique_ptr<MarkingBarrier> marking_barrier_;
StrongRootsList* strong_roots_list_ = nullptr;
// This counter is increased before each GC and never reset.
......
......@@ -33,37 +33,6 @@ void IncrementalMarking::TransferColor(HeapObject from, HeapObject to) {
}
}
bool IncrementalMarking::BaseRecordWrite(HeapObject obj, HeapObject value) {
DCHECK(!marking_state()->IsImpossible(value));
DCHECK(!marking_state()->IsImpossible(obj));
// The write barrier stub generated with V8_CONCURRENT_MARKING does not
// check the color of the source object.
const bool need_recording =
V8_CONCURRENT_MARKING_BOOL || marking_state()->IsBlack(obj);
if (need_recording && WhiteToGreyAndPush(value)) {
RestartIfNotMarking();
}
return is_compacting_ && need_recording;
}
template <typename TSlot>
void IncrementalMarking::RecordWrite(HeapObject obj, TSlot slot,
typename TSlot::TObject value) {
static_assert(std::is_same<TSlot, ObjectSlot>::value ||
std::is_same<TSlot, MaybeObjectSlot>::value,
"Only ObjectSlot and MaybeObjectSlot are expected here");
DCHECK_NE(slot.address(), kNullAddress);
DCHECK_IMPLIES(!TSlot::kCanBeWeak, !HAS_WEAK_HEAP_OBJECT_TAG((*slot).ptr()));
DCHECK_IMPLIES(!TSlot::kCanBeWeak, !HAS_WEAK_HEAP_OBJECT_TAG(value.ptr()));
// When writing a weak reference, treat it as strong for the purposes of the
// marking barrier.
HeapObject value_heap_object;
if (IsMarking() && value.GetHeapObject(&value_heap_object)) {
RecordWriteSlow(obj, HeapObjectSlot(slot), value_heap_object);
}
}
bool IncrementalMarking::WhiteToGreyAndPush(HeapObject obj) {
if (marking_state()->WhiteToGrey(obj)) {
marking_worklists()->Push(obj);
......
......@@ -14,6 +14,7 @@
#include "src/heap/heap-inl.h"
#include "src/heap/incremental-marking-inl.h"
#include "src/heap/mark-compact-inl.h"
#include "src/heap/marking-barrier.h"
#include "src/heap/marking-visitor-inl.h"
#include "src/heap/marking-visitor.h"
#include "src/heap/memory-chunk.h"
......@@ -58,33 +59,6 @@ IncrementalMarking::IncrementalMarking(Heap* heap,
SetState(STOPPED);
}
void IncrementalMarking::RecordWriteSlow(HeapObject obj, HeapObjectSlot slot,
HeapObject value) {
if (BaseRecordWrite(obj, value) && slot.address() != kNullAddress) {
// Object is not going to be rescanned we need to record the slot.
collector_->RecordSlot(obj, slot, value);
}
}
int IncrementalMarking::RecordWriteFromCode(Address raw_obj,
Address slot_address,
Isolate* isolate) {
HeapObject obj = HeapObject::cast(Object(raw_obj));
MaybeObjectSlot slot(slot_address);
isolate->heap()->incremental_marking()->RecordWrite(obj, slot, *slot);
// Called by RecordWriteCodeStubAssembler, which doesnt accept void type
return 0;
}
void IncrementalMarking::RecordWriteIntoCode(Code host, RelocInfo* rinfo,
HeapObject value) {
DCHECK(IsMarking());
if (BaseRecordWrite(host, value)) {
// Object is not going to be rescanned. We need to record the slot.
collector_->RecordRelocSlot(host, rinfo, value);
}
}
void IncrementalMarking::MarkBlackAndVisitObjectDueToLayoutChange(
HeapObject obj) {
TRACE_EVENT0("v8", "V8.GCIncrementalMarkingLayoutChange");
......@@ -155,77 +129,6 @@ class IncrementalMarkingRootMarkingVisitor : public RootVisitor {
Heap* heap_;
};
void IncrementalMarking::DeactivateIncrementalWriteBarrierForSpace(
PagedSpace* space) {
for (Page* p : *space) {
p->SetOldGenerationPageFlags(false);
}
}
void IncrementalMarking::DeactivateIncrementalWriteBarrierForSpace(
NewSpace* space) {
for (Page* p : *space) {
p->SetYoungGenerationPageFlags(false);
}
}
void IncrementalMarking::DeactivateIncrementalWriteBarrier() {
DeactivateIncrementalWriteBarrierForSpace(heap_->old_space());
DeactivateIncrementalWriteBarrierForSpace(heap_->map_space());
DeactivateIncrementalWriteBarrierForSpace(heap_->code_space());
DeactivateIncrementalWriteBarrierForSpace(heap_->new_space());
for (LargePage* p : *heap_->new_lo_space()) {
p->SetYoungGenerationPageFlags(false);
DCHECK(p->IsLargePage());
}
for (LargePage* p : *heap_->lo_space()) {
p->SetOldGenerationPageFlags(false);
}
for (LargePage* p : *heap_->code_lo_space()) {
p->SetOldGenerationPageFlags(false);
}
}
void IncrementalMarking::ActivateIncrementalWriteBarrier(PagedSpace* space) {
for (Page* p : *space) {
p->SetOldGenerationPageFlags(true);
}
}
void IncrementalMarking::ActivateIncrementalWriteBarrier(NewSpace* space) {
for (Page* p : *space) {
p->SetYoungGenerationPageFlags(true);
}
}
void IncrementalMarking::ActivateIncrementalWriteBarrier() {
ActivateIncrementalWriteBarrier(heap_->old_space());
ActivateIncrementalWriteBarrier(heap_->map_space());
ActivateIncrementalWriteBarrier(heap_->code_space());
ActivateIncrementalWriteBarrier(heap_->new_space());
for (LargePage* p : *heap_->new_lo_space()) {
p->SetYoungGenerationPageFlags(true);
DCHECK(p->IsLargePage());
}
for (LargePage* p : *heap_->lo_space()) {
p->SetOldGenerationPageFlags(true);
}
for (LargePage* p : *heap_->code_lo_space()) {
p->SetOldGenerationPageFlags(true);
}
}
bool IncrementalMarking::WasActivated() { return was_activated_; }
......@@ -244,10 +147,6 @@ bool IncrementalMarking::IsBelowActivationThresholds() const {
heap_->GlobalSizeOfObjects() <= kGlobalActivationThreshold;
}
void IncrementalMarking::Deactivate() {
DeactivateIncrementalWriteBarrier();
}
void IncrementalMarking::Start(GarbageCollectionReason gc_reason) {
if (FLAG_trace_incremental_marking) {
const size_t old_generation_size_mb =
......@@ -338,7 +237,7 @@ void IncrementalMarking::StartMarking() {
SetState(MARKING);
ActivateIncrementalWriteBarrier();
heap_->marking_barrier()->Activate(is_compacting_);
heap_->isolate()->compilation_cache()->MarkCompactPrologue();
......
......@@ -180,25 +180,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
V8_INLINE void RestartIfNotMarking();
// {raw_obj} and {slot_address} are raw Address values instead of a
// HeapObject and a MaybeObjectSlot because this is called from
// generated code via ExternalReference.
static int RecordWriteFromCode(Address raw_obj, Address slot_address,
Isolate* isolate);
// Record a slot for compaction. Returns false for objects that are
// guaranteed to be rescanned or not guaranteed to survive.
//
// No slots in white objects should be recorded, as some slots are typed and
// cannot be interpreted correctly if the underlying object does not survive
// the incremental cycle (stays white).
V8_INLINE bool BaseRecordWrite(HeapObject obj, HeapObject value);
template <typename TSlot>
V8_INLINE void RecordWrite(HeapObject obj, TSlot slot,
typename TSlot::TObject value);
void RecordWriteSlow(HeapObject obj, HeapObjectSlot slot, HeapObject value);
void RecordWriteIntoCode(Code host, RelocInfo* rinfo, HeapObject value);
// Returns true if the function succeeds in transitioning the object
// from white to grey.
V8_INLINE bool WhiteToGreyAndPush(HeapObject obj);
......@@ -270,14 +251,6 @@ class V8_EXPORT_PRIVATE IncrementalMarking final {
// increase chances of reusing of map transition tree in future.
void RetainMaps();
void ActivateIncrementalWriteBarrier(PagedSpace* space);
void ActivateIncrementalWriteBarrier(NewSpace* space);
void ActivateIncrementalWriteBarrier();
void DeactivateIncrementalWriteBarrierForSpace(PagedSpace* space);
void DeactivateIncrementalWriteBarrierForSpace(NewSpace* space);
void DeactivateIncrementalWriteBarrier();
// Updates scheduled_bytes_to_mark_ to ensure marking progress based on
// time.
void ScheduleBytesToMarkBasedOnTime(double time_ms);
......
......@@ -24,6 +24,7 @@
#include "src/heap/large-spaces.h"
#include "src/heap/local-allocator-inl.h"
#include "src/heap/mark-compact-inl.h"
#include "src/heap/marking-barrier.h"
#include "src/heap/marking-visitor-inl.h"
#include "src/heap/marking-visitor.h"
#include "src/heap/memory-measurement-inl.h"
......@@ -2065,7 +2066,7 @@ void MarkCompactCollector::MarkLiveObjects() {
}
if (was_marked_incrementally_) {
heap()->incremental_marking()->Deactivate();
heap()->marking_barrier()->Deactivate();
}
epoch_++;
......
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_MARKING_BARRIER_INL_H_
#define V8_HEAP_MARKING_BARRIER_INL_H_
#include "src/heap/incremental-marking-inl.h"
#include "src/heap/incremental-marking.h"
#include "src/heap/marking-barrier.h"
namespace v8 {
namespace internal {
bool MarkingBarrier::MarkValue(HeapObject host, HeapObject value) {
DCHECK(is_activated_);
DCHECK(!marking_state_.IsImpossible(value));
DCHECK(!marking_state_.IsImpossible(host));
if (!V8_CONCURRENT_MARKING_BOOL && marking_state_.IsBlack(host)) {
// The value will be marked and the slot will be recorded when the marker
// visits the host object.
return false;
}
if (WhiteToGreyAndPush(value)) {
incremental_marking_->RestartIfNotMarking();
}
return true;
}
bool MarkingBarrier::WhiteToGreyAndPush(HeapObject obj) {
if (marking_state_.WhiteToGrey(obj)) {
collector_->marking_worklists()->Push(obj);
return true;
}
return false;
}
} // namespace internal
} // namespace v8
#endif // V8_HEAP_MARKING_BARRIER_INL_H_
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/marking-barrier.h"
#include "src/heap/heap-inl.h"
#include "src/heap/heap.h"
#include "src/heap/incremental-marking-inl.h"
#include "src/heap/incremental-marking.h"
#include "src/heap/mark-compact-inl.h"
#include "src/heap/mark-compact.h"
#include "src/heap/marking-barrier-inl.h"
#include "src/objects/js-array-buffer.h"
namespace v8 {
namespace internal {
MarkingBarrier::MarkingBarrier(Heap* heap, MarkCompactCollector* collector,
IncrementalMarking* incremental_marking)
: heap_(heap),
collector_(collector),
incremental_marking_(incremental_marking) {}
void MarkingBarrier::Write(HeapObject host, HeapObjectSlot slot,
HeapObject value) {
if (MarkValue(host, value)) {
if (is_compacting_ && slot.address()) {
collector_->RecordSlot(host, slot, value);
}
}
}
void MarkingBarrier::Write(Code host, RelocInfo* reloc_info, HeapObject value) {
if (MarkValue(host, value)) {
if (is_compacting_) {
collector_->RecordRelocSlot(host, reloc_info, value);
}
}
}
void MarkingBarrier::Write(JSArrayBuffer host,
ArrayBufferExtension* extension) {
if (!V8_CONCURRENT_MARKING_BOOL && marking_state_.IsBlack(host)) {
// The extension will be marked when the marker visits the host object.
return;
}
extension->Mark();
}
void MarkingBarrier::Write(Map host, DescriptorArray descriptor_array,
int number_of_own_descriptors) {
int16_t raw_marked = descriptor_array.raw_number_of_marked_descriptors();
if (NumberOfMarkedDescriptors::decode(collector_->epoch(), raw_marked) <
number_of_own_descriptors) {
collector_->MarkDescriptorArrayFromWriteBarrier(host, descriptor_array,
number_of_own_descriptors);
}
}
void MarkingBarrier::Deactivate(PagedSpace* space) {
for (Page* p : *space) {
p->SetOldGenerationPageFlags(false);
}
}
void MarkingBarrier::Deactivate(NewSpace* space) {
for (Page* p : *space) {
p->SetYoungGenerationPageFlags(false);
}
}
void MarkingBarrier::Deactivate() {
Deactivate(heap_->old_space());
Deactivate(heap_->map_space());
Deactivate(heap_->code_space());
Deactivate(heap_->new_space());
for (LargePage* p : *heap_->new_lo_space()) {
p->SetYoungGenerationPageFlags(false);
DCHECK(p->IsLargePage());
}
for (LargePage* p : *heap_->lo_space()) {
p->SetOldGenerationPageFlags(false);
}
for (LargePage* p : *heap_->code_lo_space()) {
p->SetOldGenerationPageFlags(false);
}
is_activated_ = false;
is_compacting_ = false;
}
void MarkingBarrier::Activate(PagedSpace* space) {
for (Page* p : *space) {
p->SetOldGenerationPageFlags(true);
}
}
void MarkingBarrier::Activate(NewSpace* space) {
for (Page* p : *space) {
p->SetYoungGenerationPageFlags(true);
}
}
void MarkingBarrier::Activate(bool is_compacting) {
DCHECK(!is_activated_);
is_compacting_ = is_compacting;
is_activated_ = true;
Activate(heap_->old_space());
Activate(heap_->map_space());
Activate(heap_->code_space());
Activate(heap_->new_space());
for (LargePage* p : *heap_->new_lo_space()) {
p->SetYoungGenerationPageFlags(true);
DCHECK(p->IsLargePage());
}
for (LargePage* p : *heap_->lo_space()) {
p->SetOldGenerationPageFlags(true);
}
for (LargePage* p : *heap_->code_lo_space()) {
p->SetOldGenerationPageFlags(true);
}
}
} // namespace internal
} // namespace v8
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_MARKING_BARRIER_H_
#define V8_HEAP_MARKING_BARRIER_H_
#include "include/v8-internal.h"
#include "src/common/globals.h"
#include "src/heap/mark-compact.h"
namespace v8 {
namespace internal {
class Heap;
class IncrementalMarking;
class PagedSpace;
class NewSpace;
class MarkingBarrier {
public:
MarkingBarrier(Heap*, MarkCompactCollector*, IncrementalMarking*);
void Activate(bool is_compacting);
void Deactivate();
void Write(HeapObject host, HeapObjectSlot, HeapObject value);
void Write(Code host, RelocInfo*, HeapObject value);
void Write(JSArrayBuffer host, ArrayBufferExtension*);
void Write(Map host, DescriptorArray, int number_of_own_descriptors);
// Returns true if the slot needs to be recorded.
inline bool MarkValue(HeapObject host, HeapObject value);
private:
using MarkingState = MarkCompactCollector::MarkingState;
inline bool WhiteToGreyAndPush(HeapObject value);
void Activate(PagedSpace*);
void Activate(NewSpace*);
void Deactivate(PagedSpace*);
void Deactivate(NewSpace*);
MarkingState marking_state_;
Heap* heap_;
MarkCompactCollector* collector_;
IncrementalMarking* incremental_marking_;
bool is_compacting_ = false;
bool is_activated_ = false;
};
} // namespace internal
} // namespace v8
#endif // V8_HEAP_MARKING_BARRIER_H_
......@@ -103,7 +103,7 @@ void JSArrayBuffer::set_extension(ArrayBufferExtension* extension) {
#else
base::AsAtomicPointer::Release_Store(extension_location(), extension);
#endif
MarkingBarrierForArrayBufferExtension(*this, extension);
WriteBarrier::Marking(*this, extension);
} else {
CHECK_EQ(extension, nullptr);
}
......
......@@ -549,7 +549,7 @@ void JSFunction::set_code(Code value) {
DCHECK(!ObjectInYoungGeneration(value));
RELAXED_WRITE_FIELD(*this, kCodeOffset, value);
#ifndef V8_DISABLE_WRITE_BARRIERS
MarkingBarrier(*this, RawField(kCodeOffset), value);
WriteBarrier::Marking(*this, RawField(kCodeOffset), value);
#endif
}
......
......@@ -697,8 +697,7 @@ void Map::AppendDescriptor(Isolate* isolate, Descriptor* desc) {
descriptors.Append(desc);
SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
#ifndef V8_DISABLE_WRITE_BARRIERS
MarkingBarrierForDescriptorArray(isolate->heap(), *this, descriptors,
number_of_own_descriptors + 1);
WriteBarrier::Marking(*this, descriptors, number_of_own_descriptors + 1);
#endif
}
// Properly mark the map if the {desc} is an "interesting symbol".
......
......@@ -638,7 +638,7 @@ void Map::ReplaceDescriptors(Isolate* isolate, DescriptorArray new_descriptors,
// all its elements.
Map current = *this;
#ifndef V8_DISABLE_WRITE_BARRIERS
MarkingBarrierForDescriptorArray(isolate->heap(), current, to_replace,
WriteBarrier::Marking(current, to_replace,
to_replace.number_of_descriptors());
#endif
while (current.instance_descriptors(isolate) == to_replace) {
......@@ -1136,7 +1136,7 @@ void Map::EnsureDescriptorSlack(Isolate* isolate, Handle<Map> map, int slack) {
// descriptors will not be trimmed in the mark-compactor, we need to mark
// all its elements.
#ifndef V8_DISABLE_WRITE_BARRIERS
MarkingBarrierForDescriptorArray(isolate->heap(), *map, *descriptors,
WriteBarrier::Marking(*map, *descriptors,
descriptors->number_of_descriptors());
#endif
......@@ -2580,8 +2580,7 @@ void Map::SetInstanceDescriptors(Isolate* isolate, DescriptorArray descriptors,
set_synchronized_instance_descriptors(descriptors);
SetNumberOfOwnDescriptors(number_of_own_descriptors);
#ifndef V8_DISABLE_WRITE_BARRIERS
MarkingBarrierForDescriptorArray(isolate->heap(), *this, descriptors,
number_of_own_descriptors);
WriteBarrier::Marking(*this, descriptors, number_of_own_descriptors);
#endif
}
......
......@@ -314,7 +314,7 @@
#define WRITE_BARRIER(object, offset, value) \
do { \
DCHECK_NOT_NULL(GetHeapFromWritableObject(object)); \
MarkingBarrier(object, (object).RawField(offset), value); \
WriteBarrier::Marking(object, (object).RawField(offset), value); \
GenerationalBarrier(object, (object).RawField(offset), value); \
} while (false)
#endif
......@@ -325,7 +325,7 @@
#define WEAK_WRITE_BARRIER(object, offset, value) \
do { \
DCHECK_NOT_NULL(GetHeapFromWritableObject(object)); \
MarkingBarrier(object, (object).RawMaybeWeakField(offset), value); \
WriteBarrier::Marking(object, (object).RawMaybeWeakField(offset), value); \
GenerationalBarrier(object, (object).RawMaybeWeakField(offset), value); \
} while (false)
#endif
......@@ -340,7 +340,7 @@
do { \
DCHECK_NOT_NULL(GetHeapFromWritableObject(object)); \
EphemeronHashTable table = EphemeronHashTable::cast(object); \
MarkingBarrier(object, (object).RawField(offset), value); \
WriteBarrier::Marking(object, (object).RawField(offset), value); \
GenerationalEphemeronKeyBarrier(table, (object).RawField(offset), value); \
} while (false)
#endif
......@@ -357,7 +357,7 @@
DCHECK_NE(mode, UPDATE_EPHEMERON_KEY_WRITE_BARRIER); \
if (mode != SKIP_WRITE_BARRIER) { \
if (mode == UPDATE_WRITE_BARRIER) { \
MarkingBarrier(object, (object).RawField(offset), value); \
WriteBarrier::Marking(object, (object).RawField(offset), value); \
} \
GenerationalBarrier(object, (object).RawField(offset), value); \
} \
......@@ -376,7 +376,8 @@
DCHECK_NE(mode, UPDATE_EPHEMERON_KEY_WRITE_BARRIER); \
if (mode != SKIP_WRITE_BARRIER) { \
if (mode == UPDATE_WRITE_BARRIER) { \
MarkingBarrier(object, (object).RawMaybeWeakField(offset), value); \
WriteBarrier::Marking(object, (object).RawMaybeWeakField(offset), \
value); \
} \
GenerationalBarrier(object, (object).RawMaybeWeakField(offset), value); \
} \
......@@ -393,7 +394,7 @@
EphemeronHashTable table = EphemeronHashTable::cast(object); \
if (mode != SKIP_WRITE_BARRIER) { \
if (mode == UPDATE_WRITE_BARRIER) { \
MarkingBarrier(object, (object).RawField(offset), value); \
WriteBarrier::Marking(object, (object).RawField(offset), value); \
} \
GenerationalEphemeronKeyBarrier(table, (object).RawField(offset), \
value); \
......
......@@ -709,7 +709,7 @@ void HeapObject::set_map(Map value) {
if (!value.is_null()) {
// TODO(1600) We are passing kNullAddress as a slot because maps can never
// be on an evacuation candidate.
MarkingBarrier(*this, ObjectSlot(kNullAddress), value);
WriteBarrier::Marking(*this, ObjectSlot(kNullAddress), value);
}
#endif
}
......@@ -729,7 +729,7 @@ void HeapObject::synchronized_set_map(Map value) {
if (!value.is_null()) {
// TODO(1600) We are passing kNullAddress as a slot because maps can never
// be on an evacuation candidate.
MarkingBarrier(*this, ObjectSlot(kNullAddress), value);
WriteBarrier::Marking(*this, ObjectSlot(kNullAddress), value);
}
#endif
}
......@@ -751,7 +751,7 @@ void HeapObject::set_map_after_allocation(Map value, WriteBarrierMode mode) {
DCHECK(!value.is_null());
// TODO(1600) We are passing kNullAddress as a slot because maps can never
// be on an evacuation candidate.
MarkingBarrier(*this, ObjectSlot(kNullAddress), value);
WriteBarrier::Marking(*this, ObjectSlot(kNullAddress), value);
}
#endif
}
......
......@@ -148,6 +148,7 @@ v8_source_set("cctest_sources") {
"heap/test-spaces.cc",
"heap/test-unmapper.cc",
"heap/test-weak-references.cc",
"heap/test-write-barrier.cc",
"interpreter/bytecode-expectations-printer.cc",
"interpreter/bytecode-expectations-printer.h",
"interpreter/interpreter-tester.cc",
......
......@@ -59,6 +59,8 @@
V(Regress791582) \
V(Regress845060) \
V(RegressMissingWriteBarrierInAllocate) \
V(WriteBarrier_Marking) \
V(WriteBarrier_MarkingExtension) \
V(WriteBarriersInCopyJSObject)
#define HEAP_TEST(Name) \
......
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/incremental-marking.h"
#include "src/heap/mark-compact.h"
#include "src/heap/spaces.h"
#include "src/objects/objects-inl.h"
#include "test/cctest/cctest.h"
#include "test/cctest/heap/heap-tester.h"
#include "test/cctest/heap/heap-utils.h"
namespace v8 {
namespace internal {
namespace heap {
HEAP_TEST(WriteBarrier_Marking) {
ManualGCScope manual_gc_scope;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
Factory* factory = isolate->factory();
MarkCompactCollector* collector = isolate->heap()->mark_compact_collector();
HandleScope outer(isolate);
Handle<FixedArray> objects = factory->NewFixedArray(3);
{
// Make sure that these objects are not immediately reachable from
// the roots to prevent them being marked grey at the start of marking.
HandleScope inner(isolate);
Handle<FixedArray> host = factory->NewFixedArray(1);
Handle<HeapNumber> value1 = factory->NewHeapNumber(1.1);
Handle<HeapNumber> value2 = factory->NewHeapNumber(1.2);
objects->set(0, *host);
objects->set(1, *value1);
objects->set(2, *value2);
}
heap::SimulateIncrementalMarking(CcTest::heap(), false);
FixedArray host = FixedArray::cast(objects->get(0));
HeapObject value1 = HeapObject::cast(objects->get(1));
HeapObject value2 = HeapObject::cast(objects->get(2));
CHECK(collector->marking_state()->IsWhite(host));
CHECK(collector->marking_state()->IsWhite(value1));
WriteBarrier::Marking(host, host.RawFieldOfElementAt(0), value1);
CHECK_EQ(V8_CONCURRENT_MARKING_BOOL,
collector->marking_state()->IsGrey(value1));
collector->marking_state()->WhiteToGrey(host);
collector->marking_state()->GreyToBlack(host);
CHECK(collector->marking_state()->IsWhite(value2));
WriteBarrier::Marking(host, host.RawFieldOfElementAt(0), value2);
CHECK(collector->marking_state()->IsGrey(value2));
heap::SimulateIncrementalMarking(CcTest::heap(), true);
CHECK(collector->marking_state()->IsBlack(host));
CHECK(collector->marking_state()->IsBlack(value1));
CHECK(collector->marking_state()->IsBlack(value2));
}
HEAP_TEST(WriteBarrier_MarkingExtension) {
ManualGCScope manual_gc_scope;
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
Factory* factory = isolate->factory();
MarkCompactCollector* collector = isolate->heap()->mark_compact_collector();
HandleScope outer(isolate);
Handle<FixedArray> objects = factory->NewFixedArray(1);
ArrayBufferExtension* extension;
{
HandleScope inner(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(CcTest::isolate(), 100);
Handle<JSArrayBuffer> host = v8::Utils::OpenHandle(*ab);
extension = host->extension();
objects->set(0, *host);
}
heap::SimulateIncrementalMarking(CcTest::heap(), false);
JSArrayBuffer host = JSArrayBuffer::cast(objects->get(0));
CHECK(collector->marking_state()->IsWhite(host));
CHECK(!extension->IsMarked());
WriteBarrier::Marking(host, extension);
CHECK_EQ(V8_CONCURRENT_MARKING_BOOL, extension->IsMarked());
heap::SimulateIncrementalMarking(CcTest::heap(), true);
CHECK(collector->marking_state()->IsBlack(host));
CHECK(extension->IsMarked());
}
} // namespace heap
} // namespace internal
} // namespace v8
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment