Commit d6bd3eba authored by hpayer's avatar hpayer Committed by Commit bot

Revert of [heap] Take page lock when scavenging old to new references in...

Revert of [heap] Take page lock when scavenging old to new references in Scavenger. (patchset #2 id:20001 of https://codereview.chromium.org/2781363002/ )

Reason for revert:
This results in a deadlock when the scavenger decides to perform sweeping and re-grabs the same mutex.

Original issue's description:
> [heap] Take page lock when scavenging old to new references in Scavenger.
>
> BUG=v8:5807
>
> Review-Url: https://codereview.chromium.org/2781363002
> Cr-Commit-Position: refs/heads/master@{#44268}
> Committed: https://chromium.googlesource.com/v8/v8/+/4024e6a1bb0b803a5cd16fcd11f39cc305b943c6

TBR=ulan@chromium.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=v8:5807

Review-Url: https://codereview.chromium.org/2786313003
Cr-Commit-Position: refs/heads/master@{#44302}
parent a450c185
...@@ -1723,14 +1723,12 @@ void Heap::Scavenge() { ...@@ -1723,14 +1723,12 @@ void Heap::Scavenge() {
{ {
// Copy objects reachable from the old generation. // Copy objects reachable from the old generation.
TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_OLD_TO_NEW_POINTERS); TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_OLD_TO_NEW_POINTERS);
RememberedSet<OLD_TO_NEW>::Iterate( RememberedSet<OLD_TO_NEW>::Iterate(this, [this](Address addr) {
this, SYNCHRONIZED, [this](Address addr) { return Scavenger::CheckAndScavengeObject(this, addr);
return Scavenger::CheckAndScavengeObject(this, addr); });
});
RememberedSet<OLD_TO_NEW>::IterateTyped( RememberedSet<OLD_TO_NEW>::IterateTyped(
this, SYNCHRONIZED, this, [this](SlotType type, Address host_addr, Address addr) {
[this](SlotType type, Address host_addr, Address addr) {
return UpdateTypedSlotHelper::UpdateTypedSlot( return UpdateTypedSlotHelper::UpdateTypedSlot(
isolate(), type, addr, [this](Object** addr) { isolate(), type, addr, [this](Object** addr) {
// We expect that objects referenced by code are long living. // We expect that objects referenced by code are long living.
......
...@@ -2369,12 +2369,11 @@ void MinorMarkCompactCollector::MarkLiveObjects() { ...@@ -2369,12 +2369,11 @@ void MinorMarkCompactCollector::MarkLiveObjects() {
{ {
TRACE_GC(heap()->tracer(), TRACE_GC(heap()->tracer(),
GCTracer::Scope::MINOR_MC_MARK_OLD_TO_NEW_POINTERS); GCTracer::Scope::MINOR_MC_MARK_OLD_TO_NEW_POINTERS);
RememberedSet<OLD_TO_NEW>::Iterate( RememberedSet<OLD_TO_NEW>::Iterate(heap(), [this](Address addr) {
heap(), NON_SYNCHRONIZED, return CheckAndMarkObject(heap(), addr);
[this](Address addr) { return CheckAndMarkObject(heap(), addr); }); });
RememberedSet<OLD_TO_NEW>::IterateTyped( RememberedSet<OLD_TO_NEW>::IterateTyped(
heap(), NON_SYNCHRONIZED, heap(), [this](SlotType type, Address host_addr, Address addr) {
[this](SlotType type, Address host_addr, Address addr) {
return UpdateTypedSlotHelper::UpdateTypedSlot( return UpdateTypedSlotHelper::UpdateTypedSlot(
isolate(), type, addr, [this](Object** addr) { isolate(), type, addr, [this](Object** addr) {
return CheckAndMarkObject(heap(), return CheckAndMarkObject(heap(),
......
...@@ -13,8 +13,6 @@ ...@@ -13,8 +13,6 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
enum RememberedSetIterationMode { SYNCHRONIZED, NON_SYNCHRONIZED };
// TODO(ulan): Investigate performance of de-templatizing this class. // TODO(ulan): Investigate performance of de-templatizing this class.
template <RememberedSetType type> template <RememberedSetType type>
class RememberedSet : public AllStatic { class RememberedSet : public AllStatic {
...@@ -100,13 +98,9 @@ class RememberedSet : public AllStatic { ...@@ -100,13 +98,9 @@ class RememberedSet : public AllStatic {
// Iterates and filters the remembered set with the given callback. // Iterates and filters the remembered set with the given callback.
// The callback should take (Address slot) and return SlotCallbackResult. // The callback should take (Address slot) and return SlotCallbackResult.
template <typename Callback> template <typename Callback>
static void Iterate(Heap* heap, RememberedSetIterationMode mode, static void Iterate(Heap* heap, Callback callback) {
Callback callback) { IterateMemoryChunks(
IterateMemoryChunks(heap, [mode, callback](MemoryChunk* chunk) { heap, [callback](MemoryChunk* chunk) { Iterate(chunk, callback); });
if (mode == SYNCHRONIZED) chunk->mutex()->Lock();
Iterate(chunk, callback);
if (mode == SYNCHRONIZED) chunk->mutex()->Unlock();
});
} }
// Iterates over all memory chunks that contains non-empty slot sets. // Iterates over all memory chunks that contains non-empty slot sets.
...@@ -183,12 +177,9 @@ class RememberedSet : public AllStatic { ...@@ -183,12 +177,9 @@ class RememberedSet : public AllStatic {
// The callback should take (SlotType slot_type, SlotAddress slot) and return // The callback should take (SlotType slot_type, SlotAddress slot) and return
// SlotCallbackResult. // SlotCallbackResult.
template <typename Callback> template <typename Callback>
static void IterateTyped(Heap* heap, RememberedSetIterationMode mode, static void IterateTyped(Heap* heap, Callback callback) {
Callback callback) { IterateMemoryChunks(heap, [callback](MemoryChunk* chunk) {
IterateMemoryChunks(heap, [mode, callback](MemoryChunk* chunk) {
if (mode == SYNCHRONIZED) chunk->mutex()->Lock();
IterateTyped(chunk, callback); IterateTyped(chunk, callback);
if (mode == SYNCHRONIZED) chunk->mutex()->Unlock();
}); });
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment