Commit dd9ef711 authored by Ulan Degenbaev's avatar Ulan Degenbaev Committed by Commit Bot

Revert "[heap] Correctly check for black allocated objects in concurrent marker."

This reverts commit 2690e2fc.

Reason for revert: this is not needed because objects in the worklist
are guaranteed to be not black allocated.

Original change's description:
> [heap] Correctly check for black allocated objects in concurrent marker.
> 
> The markbit check should be performed before using the map of the
> object.
> 
> Change-Id: Ia19e48fd4660387d239e1e330368808727359c7f
> Reviewed-on: https://chromium-review.googlesource.com/c/1301496
> Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
> Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
> Reviewed-by: Hannes Payer <hpayer@chromium.org>
> Cr-Commit-Position: refs/heads/master@{#57040}

TBR=ulan@chromium.org,hpayer@chromium.org,mlippautz@chromium.org

Change-Id: I4f188197620c511060fda4f60c80a3c389007054
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Reviewed-on: https://chromium-review.googlesource.com/c/1301993Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Ulan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#57043}
parent 21784e3d
......@@ -105,8 +105,6 @@ class ConcurrentMarkingVisitor final
}
bool ShouldVisit(HeapObject* object) {
// TODO(ulan): It is sufficient to flip the second markbit without
// doing checking for the first bit.
return marking_state_.GreyToBlack(object);
}
......@@ -408,8 +406,6 @@ class ConcurrentMarkingVisitor final
}
}
ConcurrentMarkingState* marking_state() { return &marking_state_; }
private:
// Helper class for collecting in-object slot addresses and values.
class SlotSnapshottingVisitor final : public ObjectVisitor {
......@@ -624,21 +620,7 @@ void ConcurrentMarking::Run(int task_id, TaskState* task_state) {
Address addr = object->address();
if (new_space_top <= addr && addr < new_space_limit) {
on_hold_->Push(task_id, object);
} else if (!visitor.marking_state()->IsBlackAssumingNotWhite(object)) {
// We need to ensure that the object is fully initialized.
// The new_space_top check above takes care of the newly allocated
// objects in the young generation. Now we need to rule out the black
// allocated objects in the old generation.
//
// Black allocation performs the following operations:
// 1) Set marking bitmap range to black using relaxed stores.
// 2) Issue base::SeqCst_MemoryFence().
// 3) Allocate, initialize, and publish an object.
//
// Since we loaded the object from the worklist, it must have
// been published. Since IsBlackAssumingNotWhite was false,
// was grey after it was published, so it is not black allocated.
// Thus the following operation loads a valid map pointer.
} else {
Map* map = object->synchronized_map();
current_marked_bytes += visitor.Visit(map, object);
}
......
......@@ -64,10 +64,6 @@ class MarkingStateBase {
return Marking::IsBlackOrGrey<access_mode>(MarkBitFrom(obj));
}
V8_INLINE bool IsBlackAssumingNotWhite(HeapObject* obj) {
return Marking::IsBlackAssumingNotWhite<access_mode>(MarkBitFrom(obj));
}
V8_INLINE bool WhiteToGrey(HeapObject* obj);
V8_INLINE bool WhiteToBlack(HeapObject* obj);
V8_INLINE bool GreyToBlack(HeapObject* obj);
......
......@@ -248,13 +248,6 @@ class Marking : public AllStatic {
return mark_bit.Get<mode>();
}
// IsBlackAssumingNotWhite assumes that the color known to be
// black or grey. It only checks the second bit.
template <AccessMode mode = AccessMode::NON_ATOMIC>
V8_INLINE static bool IsBlackAssumingNotWhite(MarkBit mark_bit) {
return mark_bit.Next().Get<mode>();
}
template <AccessMode mode = AccessMode::NON_ATOMIC>
V8_INLINE static void MarkWhite(MarkBit markbit) {
STATIC_ASSERT(mode == AccessMode::NON_ATOMIC);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment