Commit 0197a15f authored by jochen's avatar jochen Committed by Commit bot

Explicitly track whether incremental marking was activated

In the gc-tracer, we check whether we're marking to figure out which
part of the mark compact we're in. If we aborted incremental marking for
whatever reason, the check fails and we might later run into trouble

BUG=none
R=hpayer@chromium.org
LOG=n

Review URL: https://codereview.chromium.org/783453003

Cr-Commit-Position: refs/heads/master@{#25663}
parent a25003cf
......@@ -121,7 +121,7 @@ void GCTracer::Start(GarbageCollector collector, const char* gc_reason,
if (collector == SCAVENGER) {
current_ = Event(Event::SCAVENGER, gc_reason, collector_reason);
} else if (collector == MARK_COMPACTOR) {
if (heap_->incremental_marking()->IsMarking()) {
if (heap_->incremental_marking()->WasActivated()) {
current_ =
Event(Event::INCREMENTAL_MARK_COMPACTOR, gc_reason, collector_reason);
} else {
......
......@@ -1210,15 +1210,22 @@ void Heap::MarkCompact() {
LOG(isolate_, ResourceEvent("markcompact", "end"));
MarkCompactEpilogue();
if (FLAG_allocation_site_pretenuring) {
EvaluateOldSpaceLocalPretenuring(size_of_objects_before_gc);
}
}
void Heap::MarkCompactEpilogue() {
gc_state_ = NOT_IN_GC;
isolate_->counters()->objs_since_last_full()->Set(0);
flush_monomorphic_ics_ = false;
if (FLAG_allocation_site_pretenuring) {
EvaluateOldSpaceLocalPretenuring(size_of_objects_before_gc);
}
incremental_marking()->Epilogue();
}
......
......@@ -1936,6 +1936,7 @@ class Heap {
// Code to be run before and after mark-compact.
void MarkCompactPrologue();
void MarkCompactEpilogue();
void ProcessNativeContexts(WeakObjectRetainer* retainer);
void ProcessArrayBuffers(WeakObjectRetainer* retainer);
......
......@@ -27,7 +27,8 @@ IncrementalMarking::IncrementalMarking(Heap* heap)
allocated_(0),
idle_marking_delay_counter_(0),
no_marking_scope_depth_(0),
unscanned_bytes_of_large_object_(0) {}
unscanned_bytes_of_large_object_(0),
was_activated_(false) {}
void IncrementalMarking::RecordWriteSlow(HeapObject* obj, Object** slot,
......@@ -423,6 +424,9 @@ bool IncrementalMarking::ShouldActivate() {
}
bool IncrementalMarking::WasActivated() { return was_activated_; }
bool IncrementalMarking::WorthActivating() {
#ifndef DEBUG
static const intptr_t kActivationThreshold = 8 * MB;
......@@ -490,6 +494,8 @@ void IncrementalMarking::Start(CompactionFlag flag) {
ResetStepCounters();
was_activated_ = true;
if (!heap_->mark_compact_collector()->sweeping_in_progress()) {
StartMarking(flag);
} else {
......@@ -785,6 +791,9 @@ void IncrementalMarking::MarkingComplete(CompletionAction action) {
}
void IncrementalMarking::Epilogue() { was_activated_ = false; }
void IncrementalMarking::OldSpaceStep(intptr_t allocated) {
if (IsStopped() && ShouldActivate()) {
// TODO(hpayer): Let's play safe for now, but compaction should be
......
......@@ -48,6 +48,8 @@ class IncrementalMarking {
bool ShouldActivate();
bool WasActivated();
enum CompactionFlag { ALLOW_COMPACTION, PREVENT_COMPACTION };
void Start(CompactionFlag flag = ALLOW_COMPACTION);
......@@ -66,6 +68,8 @@ class IncrementalMarking {
void MarkingComplete(CompletionAction action);
void Epilogue();
// It's hard to know how much work the incremental marker should do to make
// progress in the face of the mutator creating new work for it. We start
// of at a moderate rate of work and gradually increase the speed of the
......@@ -222,6 +226,8 @@ class IncrementalMarking {
int unscanned_bytes_of_large_object_;
bool was_activated_;
DISALLOW_IMPLICIT_CONSTRUCTORS(IncrementalMarking);
};
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment