Commit 667555c6 authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

[heap] Remove independent handles

Removes the handling of the flag independent. The flag will be removed in a followup.

The patch changes handling of V8::PersistentBase that are set to Weak:
- The Scavenger ignores the flag independent.
- The Scavenger keeps alive anything that is marked as Active.
- The Scavenger is free to drop weak handles of non-Active object if they 
  are otherwise dead.

Active:
- Any JSObject will always be marked Active.
- Any JSApiObject will be marked Active if it has been modified (=has elements, properties, etc.)

CQ_INCLUDE_TRYBOTS=master.tryserver.blink:linux_trusty_blink_rel;master.tryserver.chromium.linux:linux_optional_gpu_tests_rel;master.tryserver.chromium.mac:mac_optional_gpu_tests_rel;master.tryserver.chromium.win:win_optional_gpu_tests_rel;master.tryserver.chromium.android:android_optional_gpu_tests_rel

Bug: chromium:780749
Change-Id: If1f547f2419930ad9400bd3b11bdbf609cb57649
Reviewed-on: https://chromium-review.googlesource.com/741801
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarKentaro Hara <haraken@chromium.org>
Reviewed-by: 's avatarHannes Payer <hpayer@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#49277}
parent ff4e4ab4
......@@ -665,8 +665,7 @@ void GlobalHandles::IdentifyWeakHandles(WeakSlotCallback should_reset_handle) {
void GlobalHandles::IterateNewSpaceStrongAndDependentRoots(RootVisitor* v) {
for (Node* node : new_space_nodes_) {
if (node->IsStrongRetainer() ||
(node->IsWeakRetainer() && !node->is_independent() &&
node->is_active())) {
(node->IsWeakRetainer() && node->is_active())) {
v->VisitRootPointer(Root::kGlobalHandles, node->location());
}
}
......@@ -680,8 +679,7 @@ void GlobalHandles::IterateNewSpaceStrongAndDependentRootsAndIdentifyUnmodified(
node->set_active(true);
}
if (node->IsStrongRetainer() ||
(node->IsWeakRetainer() && !node->is_independent() &&
node->is_active())) {
(node->IsWeakRetainer() && node->is_active())) {
v->VisitRootPointer(Root::kGlobalHandles, node->location());
}
}
......@@ -696,13 +694,12 @@ void GlobalHandles::IdentifyWeakUnmodifiedObjects(
}
}
void GlobalHandles::MarkNewSpaceWeakUnmodifiedObjectsPending(
WeakSlotCallbackWithHeap is_unscavenged) {
WeakSlotCallbackWithHeap is_dead) {
for (Node* node : new_space_nodes_) {
DCHECK(node->is_in_new_space_list());
if ((node->is_independent() || !node->is_active()) && node->IsWeak() &&
is_unscavenged(isolate_->heap(), node->location())) {
if (node->IsWeak() && is_dead(isolate_->heap(), node->location())) {
DCHECK(!node->is_active());
if (!node->IsPhantomCallback() && !node->IsPhantomResetHandle()) {
node->MarkPending();
}
......@@ -714,8 +711,8 @@ void GlobalHandles::IterateNewSpaceWeakUnmodifiedRootsForFinalizers(
RootVisitor* v) {
for (Node* node : new_space_nodes_) {
DCHECK(node->is_in_new_space_list());
if ((node->is_independent() || !node->is_active()) &&
node->IsWeakRetainer() && (node->state() == Node::PENDING)) {
if (!node->is_active() && node->IsWeakRetainer() &&
(node->state() == Node::PENDING)) {
DCHECK(!node->IsPhantomCallback());
DCHECK(!node->IsPhantomResetHandle());
// Finalizers need to survive.
......@@ -728,8 +725,8 @@ void GlobalHandles::IterateNewSpaceWeakUnmodifiedRootsForPhantomHandles(
RootVisitor* v, WeakSlotCallbackWithHeap should_reset_handle) {
for (Node* node : new_space_nodes_) {
DCHECK(node->is_in_new_space_list());
if ((node->is_independent() || !node->is_active()) &&
node->IsWeakRetainer() && (node->state() != Node::PENDING)) {
if (!node->is_active() && node->IsWeakRetainer() &&
(node->state() != Node::PENDING)) {
DCHECK(node->IsPhantomResetHandle() || node->IsPhantomCallback());
if (should_reset_handle(isolate_->heap(), node->location())) {
if (node->IsPhantomResetHandle()) {
......@@ -774,15 +771,12 @@ int GlobalHandles::PostScavengeProcessing(
// the freed_nodes.
continue;
}
// Skip dependent or unmodified handles. Their weak callbacks might expect
// to be
// called between two global garbage collection callbacks which
// are not called for minor collections.
if (!node->is_independent() && (node->is_active())) {
node->set_active(false);
continue;
}
// Active nodes are kept alive, so no further processing is requires.
if (node->is_active()) {
node->set_active(false);
continue;
}
if (node->PostGarbageCollectionProcessing(isolate_)) {
if (initial_post_gc_processing_count != post_gc_processing_count_) {
......@@ -793,6 +787,7 @@ int GlobalHandles::PostScavengeProcessing(
return freed_nodes;
}
}
if (!node->IsRetainer()) {
freed_nodes++;
}
......
......@@ -154,10 +154,9 @@ class GlobalHandles {
void IterateNewSpaceStrongAndDependentRootsAndIdentifyUnmodified(
RootVisitor* v, size_t start, size_t end);
// Finds weak independent or unmodified handles satisfying
// the callback predicate and marks them as pending. See the note above.
// Marks weak unmodified handles satisfying |is_dead| as pending.
void MarkNewSpaceWeakUnmodifiedObjectsPending(
WeakSlotCallbackWithHeap is_unscavenged);
WeakSlotCallbackWithHeap is_dead);
// Iterates over weak independent or unmodified handles.
// See the note above.
......
......@@ -112,7 +112,6 @@ void SamplingHeapProfiler::SampleObject(Address soon_object, size_t size) {
Sample* sample = new Sample(size, node, loc, this);
samples_.insert(sample);
sample->global.SetWeak(sample, OnWeakCallback, WeakCallbackType::kParameter);
sample->global.MarkIndependent();
}
void SamplingHeapProfiler::OnWeakCallback(
......
......@@ -7659,84 +7659,6 @@ struct FlagAndPersistent {
v8::Global<v8::Object> handle;
};
static void SetFlag(const v8::WeakCallbackInfo<FlagAndPersistent>& data) {
data.GetParameter()->flag = true;
data.GetParameter()->handle.Reset();
}
static void IndependentWeakHandle(bool global_gc, bool interlinked) {
i::FLAG_stress_incremental_marking = false;
// Parallel scavenge introduces too much fragmentation.
i::FLAG_parallel_scavenge = false;
v8::Isolate* iso = CcTest::isolate();
v8::HandleScope scope(iso);
v8::Local<Context> context = Context::New(iso);
Context::Scope context_scope(context);
FlagAndPersistent object_a, object_b;
size_t big_heap_size = 0;
size_t big_array_size = 0;
{
v8::HandleScope handle_scope(iso);
Local<Object> a(v8::Object::New(iso));
Local<Object> b(v8::Object::New(iso));
object_a.handle.Reset(iso, a);
object_b.handle.Reset(iso, b);
if (interlinked) {
a->Set(context, v8_str("x"), b).FromJust();
b->Set(context, v8_str("x"), a).FromJust();
}
if (global_gc) {
CcTest::CollectAllGarbage();
} else {
CcTest::CollectGarbage(i::NEW_SPACE);
}
v8::Local<Value> big_array = v8::Array::New(CcTest::isolate(), 5000);
// Verify that we created an array where the space was reserved up front.
big_array_size =
v8::internal::JSArray::cast(*v8::Utils::OpenHandle(*big_array))
->elements()
->Size();
CHECK_LE(20000, big_array_size);
a->Set(context, v8_str("y"), big_array).FromJust();
big_heap_size = CcTest::heap()->SizeOfObjects();
}
object_a.flag = false;
object_b.flag = false;
object_a.handle.SetWeak(&object_a, &SetFlag,
v8::WeakCallbackType::kParameter);
object_b.handle.SetWeak(&object_b, &SetFlag,
v8::WeakCallbackType::kParameter);
CHECK(!object_b.handle.IsIndependent());
object_a.handle.MarkIndependent();
object_b.handle.MarkIndependent();
CHECK(object_b.handle.IsIndependent());
if (global_gc) {
CcTest::CollectAllGarbage();
} else {
CcTest::CollectGarbage(i::NEW_SPACE);
}
// A single GC should be enough to reclaim the memory, since we are using
// phantom handles.
CHECK_GT(big_heap_size - big_array_size, CcTest::heap()->SizeOfObjects());
CHECK(object_a.flag);
CHECK(object_b.flag);
}
TEST(IndependentWeakHandle) {
IndependentWeakHandle(false, false);
IndependentWeakHandle(false, true);
IndependentWeakHandle(true, false);
IndependentWeakHandle(true, true);
}
class Trivial {
public:
explicit Trivial(int x) : x_(x) {}
......@@ -7832,130 +7754,6 @@ THREADED_TEST(InternalFieldCallback) {
InternalFieldCallback(true);
}
static void ResetUseValueAndSetFlag(
const v8::WeakCallbackInfo<FlagAndPersistent>& data) {
// Blink will reset the handle, and then use the other handle, so they
// can't use the same backing slot.
data.GetParameter()->handle.Reset();
data.GetParameter()->flag = true;
}
void v8::internal::heap::HeapTester::ResetWeakHandle(bool global_gc) {
using v8::Context;
using v8::Local;
using v8::Object;
v8::Isolate* iso = CcTest::isolate();
v8::HandleScope scope(iso);
v8::Local<Context> context = Context::New(iso);
Context::Scope context_scope(context);
FlagAndPersistent object_a, object_b;
{
v8::HandleScope handle_scope(iso);
Local<Object> a(v8::Object::New(iso));
Local<Object> b(v8::Object::New(iso));
object_a.handle.Reset(iso, a);
object_b.handle.Reset(iso, b);
if (global_gc) {
CcTest::CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
} else {
CcTest::CollectGarbage(i::NEW_SPACE);
}
}
object_a.flag = false;
object_b.flag = false;
object_a.handle.SetWeak(&object_a, &ResetUseValueAndSetFlag,
v8::WeakCallbackType::kParameter);
object_b.handle.SetWeak(&object_b, &ResetUseValueAndSetFlag,
v8::WeakCallbackType::kParameter);
if (!global_gc) {
object_a.handle.MarkIndependent();
object_b.handle.MarkIndependent();
CHECK(object_b.handle.IsIndependent());
}
if (global_gc) {
CcTest::CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
} else {
CcTest::CollectGarbage(i::NEW_SPACE);
}
CHECK(object_a.flag);
CHECK(object_b.flag);
}
THREADED_HEAP_TEST(ResetWeakHandle) {
v8::internal::heap::HeapTester::ResetWeakHandle(false);
v8::internal::heap::HeapTester::ResetWeakHandle(true);
}
static void InvokeScavenge() { CcTest::CollectGarbage(i::NEW_SPACE); }
static void InvokeMarkSweep() { CcTest::CollectAllGarbage(); }
static void ForceScavenge2(
const v8::WeakCallbackInfo<FlagAndPersistent>& data) {
data.GetParameter()->flag = true;
InvokeScavenge();
}
static void ForceScavenge1(
const v8::WeakCallbackInfo<FlagAndPersistent>& data) {
data.GetParameter()->handle.Reset();
data.SetSecondPassCallback(ForceScavenge2);
}
static void ForceMarkSweep2(
const v8::WeakCallbackInfo<FlagAndPersistent>& data) {
data.GetParameter()->flag = true;
InvokeMarkSweep();
}
static void ForceMarkSweep1(
const v8::WeakCallbackInfo<FlagAndPersistent>& data) {
data.GetParameter()->handle.Reset();
data.SetSecondPassCallback(ForceMarkSweep2);
}
THREADED_TEST(GCFromWeakCallbacks) {
v8::Isolate* isolate = CcTest::isolate();
v8::Locker locker(CcTest::isolate());
v8::HandleScope scope(isolate);
v8::Local<Context> context = Context::New(isolate);
Context::Scope context_scope(context);
static const int kNumberOfGCTypes = 2;
typedef v8::WeakCallbackInfo<FlagAndPersistent>::Callback Callback;
Callback gc_forcing_callback[kNumberOfGCTypes] = {&ForceScavenge1,
&ForceMarkSweep1};
typedef void (*GCInvoker)();
GCInvoker invoke_gc[kNumberOfGCTypes] = {&InvokeScavenge, &InvokeMarkSweep};
for (int outer_gc = 0; outer_gc < kNumberOfGCTypes; outer_gc++) {
for (int inner_gc = 0; inner_gc < kNumberOfGCTypes; inner_gc++) {
FlagAndPersistent object;
{
v8::HandleScope handle_scope(isolate);
object.handle.Reset(isolate, v8::Object::New(isolate));
}
object.flag = false;
object.handle.SetWeak(&object, gc_forcing_callback[inner_gc],
v8::WeakCallbackType::kParameter);
object.handle.MarkIndependent();
invoke_gc[outer_gc]();
EmptyMessageQueues(isolate);
CHECK(object.flag);
}
}
}
v8::Local<Function> args_fun;
......
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment