Commit 5f6aa2e5 authored by Jake Hughes's avatar Jake Hughes Committed by Commit Bot

[heap] Add object start bitmap for conservative stack scanning

With conservative stack scanning enabled, a snapshot of the call stack
upon entry to GC will be used to determine part of the root-set. When
the collector walks the stack, it looks at each value and determines
whether it could be a potential on-heap object pointer. However, unlike
with Handles, these on-stack pointers aren't guaranteed to point to the
start of the object: the compiler may decide hide these pointers, and
create interior pointers in C++ frames which the GC doesn't know about.

The solution to this is to include an object start bitmap in the header
of each page. Each bit in the bitmap represents a word in the page
payload which is set when an object is allocated. This means that when
the collector finds an arbitrary potential pointer into the page, it can
walk backwards through the bitmap until it finds the relevant object's
base pointer. To prevent the bitmap becoming stale after compaction, it
is rebuilt during object sweeping.

This is experimental, and currently only works with inline allocation
disabled, and single generational collection.

Bug: v8:10614
Change-Id: I28ebd9562f58f335f8b3c2d1189cdf39feaa1f52
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2375195
Commit-Queue: Anton Bikineev <bikineev@chromium.org>
Reviewed-by: 's avatarMichael Achenbach <machenbach@chromium.org>
Reviewed-by: 's avatarDominik Inführ <dinfuehr@chromium.org>
Reviewed-by: 's avatarAnton Bikineev <bikineev@chromium.org>
Cr-Commit-Position: refs/heads/master@{#69615}
parent 4c507931
......@@ -368,6 +368,9 @@ if (v8_enable_single_generation == true) {
"Requires unconditional write barriers or none (which disables incremental marking)")
}
assert(!v8_enable_conservative_stack_scanning || v8_enable_single_generation,
"Conservative stack scanning requires single generation")
v8_random_seed = "314159265"
v8_toolset_for_shell = "host"
......@@ -589,6 +592,9 @@ config("features") {
if (v8_enable_single_generation) {
defines += [ "V8_ENABLE_SINGLE_GENERATION" ]
}
if (v8_enable_conservative_stack_scanning) {
defines += [ "V8_ENABLE_CONSERVATIVE_STACK_SCANNING" ]
}
if (v8_disable_write_barriers) {
defines += [ "V8_DISABLE_WRITE_BARRIERS" ]
}
......@@ -3350,6 +3356,10 @@ v8_source_set("v8_base_without_compiler") {
sources += [ "src/heap/third-party/heap-api-stub.cc" ]
}
if (v8_enable_conservative_stack_scanning) {
sources += [ "src/heap/object-start-bitmap.h" ]
}
if (v8_enable_wasm_gdb_remote_debugging) {
sources += [
"src/debug/wasm/gdb-server/gdb-remote-util.cc",
......
......@@ -67,6 +67,9 @@ declare_args() {
# Add fuzzilli fuzzer support.
v8_fuzzilli = false
# Scan the call stack conservatively during garbage collection.
v8_enable_conservative_stack_scanning = false
v8_enable_google_benchmark = checkout_google_benchmark
}
......
......@@ -368,6 +368,15 @@ DEFINE_BOOL_READONLY(
DEFINE_NEG_IMPLICATION(single_generation, inline_new)
DEFINE_NEG_IMPLICATION(single_generation, turbo_allocation_folding)
#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
#define V8_ENABLE_CONSERVATIVE_STACK_SCANNING_BOOL true
#else
#define V8_ENABLE_CONSERVATIVE_STACK_SCANNING_BOOL false
#endif
DEFINE_BOOL_READONLY(conservative_stack_scanning,
V8_ENABLE_CONSERVATIVE_STACK_SCANNING_BOOL,
"use conservative stack scanning")
#ifdef V8_ENABLE_FUTURE
#define FUTURE_BOOL true
#else
......
......@@ -246,6 +246,15 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationType type,
->RegisterNewlyAllocatedCodeObject(object.address());
}
}
#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
if (AllocationType::kReadOnly != type) {
DCHECK_TAG_ALIGNED(object.address());
Page::FromHeapObject(object)->object_start_bitmap()->SetBit(
object.address());
}
#endif
OnAllocationEvent(object, size_in_bytes);
}
......
......@@ -9,6 +9,10 @@
#include "src/heap/list.h"
#include "src/heap/slot-set.h"
#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
#include "src/heap/object-start-bitmap.h"
#endif
namespace v8 {
namespace internal {
......@@ -61,6 +65,9 @@ class V8_EXPORT_PRIVATE MemoryChunkLayout {
FIELD(Bitmap*, YoungGenerationBitmap),
FIELD(CodeObjectRegistry*, CodeObjectRegistry),
FIELD(PossiblyEmptyBuckets, PossiblyEmptyBuckets),
#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
FIELD(ObjectStartBitmap, ObjectStartBitmap),
#endif
kMarkingBitmapOffset,
kMemoryChunkHeaderSize = kMarkingBitmapOffset,
kMemoryChunkHeaderStart = kSlotSetOffset,
......
......@@ -155,6 +155,10 @@ MemoryChunk* MemoryChunk::Initialize(BasicMemoryChunk* basic_chunk, Heap* heap,
chunk->possibly_empty_buckets_.Initialize();
#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
chunk->object_start_bitmap_ = ObjectStartBitmap(chunk->area_start());
#endif
#ifdef DEBUG
ValidateOffsets(chunk);
#endif
......
......@@ -227,6 +227,10 @@ class MemoryChunk : public BasicMemoryChunk {
// read-only space chunks.
void ReleaseAllocatedMemoryNeededForWritableChunk();
#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
ObjectStartBitmap* object_start_bitmap() { return &object_start_bitmap_; }
#endif
protected:
static MemoryChunk* Initialize(BasicMemoryChunk* basic_chunk, Heap* heap,
Executability executable);
......@@ -301,6 +305,10 @@ class MemoryChunk : public BasicMemoryChunk {
PossiblyEmptyBuckets possibly_empty_buckets_;
#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
ObjectStartBitmap object_start_bitmap_;
#endif
private:
friend class ConcurrentMarkingState;
friend class MajorMarkingState;
......
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_OBJECT_START_BITMAP_H_
#define V8_HEAP_OBJECT_START_BITMAP_H_
#include <limits.h>
#include <stdint.h>
#include <array>
#include "include/v8-internal.h"
#include "src/base/bits.h"
#include "src/base/macros.h"
#include "src/common/globals.h"
namespace v8 {
namespace internal {
static constexpr size_t kAllocationGranularity = kTaggedSize;
static constexpr size_t kAllocationMask = kAllocationGranularity - 1;
static const int kPageSize = 1 << kPageSizeBits;
// A bitmap for recording object starts. Objects have to be allocated at
// minimum granularity of kGranularity.
//
// Depends on internals such as:
// - kPageSize
// - kAllocationGranularity
//
// ObjectStartBitmap does not support concurrent access and is used only by the
// main thread.
class V8_EXPORT_PRIVATE ObjectStartBitmap {
public:
// Granularity of addresses added to the bitmap.
static constexpr size_t Granularity() { return kAllocationGranularity; }
// Maximum number of entries in the bitmap.
static constexpr size_t MaxEntries() {
return kReservedForBitmap * kBitsPerCell;
}
explicit inline ObjectStartBitmap(size_t offset = 0);
// Finds an object header based on a maybe_inner_ptr. Will search for an
// object start in decreasing address order.
//
// This must only be used when there exists at least one entry in the bitmap.
inline Address FindBasePtr(Address maybe_inner_ptr) const;
inline void SetBit(Address);
inline void ClearBit(Address);
inline bool CheckBit(Address) const;
// Iterates all object starts recorded in the bitmap.
//
// The callback is of type
// void(Address)
// and is passed the object start address as parameter.
template <typename Callback>
inline void Iterate(Callback) const;
// Clear the object start bitmap.
inline void Clear();
private:
inline void store(size_t cell_index, uint32_t value);
inline uint32_t load(size_t cell_index) const;
inline Address offset() const;
static constexpr size_t kBitsPerCell = sizeof(uint32_t) * CHAR_BIT;
static constexpr size_t kCellMask = kBitsPerCell - 1;
static constexpr size_t kBitmapSize =
(kPageSize + ((kBitsPerCell * kAllocationGranularity) - 1)) /
(kBitsPerCell * kAllocationGranularity);
static constexpr size_t kReservedForBitmap =
((kBitmapSize + kAllocationMask) & ~kAllocationMask);
inline void ObjectStartIndexAndBit(Address, size_t*, size_t*) const;
inline Address StartIndexToAddress(size_t object_start_index) const;
size_t offset_;
std::array<uint32_t, kReservedForBitmap> object_start_bit_map_;
};
ObjectStartBitmap::ObjectStartBitmap(size_t offset) : offset_(offset) {
Clear();
}
Address ObjectStartBitmap::FindBasePtr(Address maybe_inner_ptr) const {
DCHECK_LE(offset(), maybe_inner_ptr);
size_t object_offset = maybe_inner_ptr - offset();
size_t object_start_number = object_offset / kAllocationGranularity;
size_t cell_index = object_start_number / kBitsPerCell;
DCHECK_GT(object_start_bit_map_.size(), cell_index);
const size_t bit = object_start_number & kCellMask;
// check if maybe_inner_ptr is the base pointer
uint32_t byte = load(cell_index) & ((1 << (bit + 1)) - 1);
while (!byte && cell_index) {
DCHECK_LT(0u, cell_index);
byte = load(--cell_index);
}
const int leading_zeroes = v8::base::bits::CountLeadingZeros(byte);
object_start_number =
(cell_index * kBitsPerCell) + (kBitsPerCell - 1) - leading_zeroes;
Address base_ptr = StartIndexToAddress(object_start_number);
return base_ptr;
}
void ObjectStartBitmap::SetBit(Address base_ptr) {
size_t cell_index, object_bit;
ObjectStartIndexAndBit(base_ptr, &cell_index, &object_bit);
store(cell_index,
static_cast<uint32_t>(load(cell_index) | (1 << object_bit)));
}
void ObjectStartBitmap::ClearBit(Address base_ptr) {
size_t cell_index, object_bit;
ObjectStartIndexAndBit(base_ptr, &cell_index, &object_bit);
store(cell_index,
static_cast<uint32_t>(load(cell_index) & ~(1 << object_bit)));
}
bool ObjectStartBitmap::CheckBit(Address base_ptr) const {
size_t cell_index, object_bit;
ObjectStartIndexAndBit(base_ptr, &cell_index, &object_bit);
return load(cell_index) & (1 << object_bit);
}
void ObjectStartBitmap::store(size_t cell_index, uint32_t value) {
object_start_bit_map_[cell_index] = value;
return;
}
uint32_t ObjectStartBitmap::load(size_t cell_index) const {
return object_start_bit_map_[cell_index];
}
Address ObjectStartBitmap::offset() const { return offset_; }
void ObjectStartBitmap::ObjectStartIndexAndBit(Address base_ptr,
size_t* cell_index,
size_t* bit) const {
const size_t object_offset = base_ptr - offset();
DCHECK(!(object_offset & kAllocationMask));
const size_t object_start_number = object_offset / kAllocationGranularity;
*cell_index = object_start_number / kBitsPerCell;
DCHECK_GT(kBitmapSize, *cell_index);
*bit = object_start_number & kCellMask;
}
Address ObjectStartBitmap::StartIndexToAddress(
size_t object_start_index) const {
return offset() + (kAllocationGranularity * object_start_index);
}
template <typename Callback>
inline void ObjectStartBitmap::Iterate(Callback callback) const {
for (size_t cell_index = 0; cell_index < kReservedForBitmap; cell_index++) {
uint32_t value = object_start_bit_map_[cell_index];
while (value) {
const int trailing_zeroes = v8::base::bits::CountTrailingZeros(value);
const size_t object_start_number =
(cell_index * kBitsPerCell) + trailing_zeroes;
const Address object_address = StartIndexToAddress(object_start_number);
callback(object_address);
// Clear current object bit in temporary value to advance iteration.
value &= ~(1 << (object_start_number & kCellMask));
}
}
}
void ObjectStartBitmap::Clear() {
std::fill(object_start_bit_map_.begin(), object_start_bit_map_.end(), 0);
}
} // namespace internal
} // namespace v8
#endif // V8_HEAP_OBJECT_START_BITMAP_H_
......@@ -386,6 +386,10 @@ int Sweeper::RawSweep(
// The free ranges map is used for filtering typed slots.
FreeRangesMap free_ranges_map;
#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
p->object_start_bitmap()->Clear();
#endif
// Iterate over the page using the live objects and free the memory before
// the given live object.
Address free_start = p->area_start();
......@@ -410,6 +414,10 @@ int Sweeper::RawSweep(
int size = object.SizeFromMap(map);
live_bytes += size;
free_start = free_end + size;
#ifdef V8_ENABLE_CONSERVATIVE_STACK_SCANNING
p->object_start_bitmap()->SetBit(object.address());
#endif
}
// If there is free memory after the last live object also free that.
......
......@@ -61,7 +61,8 @@
V(WriteBarrier_Marking) \
V(WriteBarrier_MarkingExtension) \
V(WriteBarriersInCopyJSObject) \
V(DoNotEvacuatePinnedPages)
V(DoNotEvacuatePinnedPages) \
V(ObjectStartBitmap)
#define HEAP_TEST(Name) \
CcTest register_test_##Name(v8::internal::heap::HeapTester::Test##Name, \
......
......@@ -249,6 +249,44 @@ HEAP_TEST(DoNotEvacuatePinnedPages) {
}
}
HEAP_TEST(ObjectStartBitmap) {
if (!FLAG_single_generation || !FLAG_conservative_stack_scanning) return;
#if V8_ENABLE_CONSERVATIVE_STACK_SCANNING
CcTest::InitializeVM();
Isolate* isolate = CcTest::i_isolate();
v8::HandleScope sc(CcTest::isolate());
Heap* heap = isolate->heap();
heap::SealCurrentObjects(heap);
auto* factory = isolate->factory();
HeapObject obj = *factory->NewStringFromStaticChars("hello");
HeapObject obj2 = *factory->NewStringFromStaticChars("world");
Page* page = Page::FromAddress(obj.ptr());
CHECK(page->object_start_bitmap()->CheckBit(obj.address()));
CHECK(page->object_start_bitmap()->CheckBit(obj2.address()));
Address obj_inner_ptr = obj.ptr() + 2;
CHECK(page->object_start_bitmap()->FindBasePtr(obj_inner_ptr) ==
obj.address());
Address obj2_inner_ptr = obj2.ptr() + 2;
CHECK(page->object_start_bitmap()->FindBasePtr(obj2_inner_ptr) ==
obj2.address());
CcTest::CollectAllGarbage();
CHECK((obj).IsString());
CHECK((obj2).IsString());
CHECK(page->object_start_bitmap()->CheckBit(obj.address()));
CHECK(page->object_start_bitmap()->CheckBit(obj2.address()));
#endif
}
// TODO(1600): compaction of map space is temporary removed from GC.
#if 0
static Handle<Map> CreateMap(Isolate* isolate) {
......
......@@ -337,6 +337,10 @@ v8_source_set("unittests_sources") {
sources += [ "wasm/wasm-gdbserver-unittest.cc" ]
}
if (v8_enable_conservative_stack_scanning) {
sources += [ "heap/object-start-bitmap-unittest.cc" ]
}
if (v8_current_cpu == "arm") {
sources += [
"assembler/turbo-assembler-arm-unittest.cc",
......
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/object-start-bitmap.h"
#include "src/base/macros.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace v8 {
namespace internal {
class ObjectStartBitmap;
namespace {
bool IsEmpty(const ObjectStartBitmap& bitmap) {
size_t count = 0;
bitmap.Iterate([&count](Address) { count++; });
return count == 0;
}
// Abstraction for objects that hides ObjectStartBitmap::kGranularity and
// the base address as getting either of it wrong will result in failed DCHECKs.
class TestObject {
public:
static Address kBaseOffset;
explicit TestObject(size_t number) : number_(number) {
const size_t max_entries = ObjectStartBitmap::MaxEntries();
EXPECT_GE(max_entries, number_);
}
Address base_ptr() const {
return kBaseOffset + ObjectStartBitmap::Granularity() * number_;
}
// Allow implicitly converting Object to Address.
operator Address() const { return base_ptr(); }
private:
const size_t number_;
};
Address TestObject::kBaseOffset = reinterpret_cast<Address>(0x4000ul);
} // namespace
TEST(V8ObjectStartBitmapTest, MoreThanZeroEntriesPossible) {
const size_t max_entries = ObjectStartBitmap::MaxEntries();
EXPECT_LT(0u, max_entries);
}
TEST(V8ObjectStartBitmapTest, InitialEmpty) {
ObjectStartBitmap bitmap(TestObject::kBaseOffset);
EXPECT_TRUE(IsEmpty(bitmap));
}
TEST(V8ObjectStartBitmapTest, SetBitImpliesNonEmpty) {
ObjectStartBitmap bitmap(TestObject::kBaseOffset);
bitmap.SetBit(TestObject(0));
EXPECT_FALSE(IsEmpty(bitmap));
}
TEST(V8ObjectStartBitmapTest, SetBitCheckBit) {
ObjectStartBitmap bitmap(TestObject::kBaseOffset);
TestObject object(7);
bitmap.SetBit(object);
EXPECT_TRUE(bitmap.CheckBit(object));
}
TEST(V8ObjectStartBitmapTest, SetBitClearbitCheckBit) {
ObjectStartBitmap bitmap(TestObject::kBaseOffset);
TestObject object(77);
bitmap.SetBit(object);
bitmap.ClearBit(object);
EXPECT_FALSE(bitmap.CheckBit(object));
}
TEST(V8ObjectStartBitmapTest, SetBitClearBitImpliesEmpty) {
ObjectStartBitmap bitmap(TestObject::kBaseOffset);
TestObject object(123);
bitmap.SetBit(object);
bitmap.ClearBit(object);
EXPECT_TRUE(IsEmpty(bitmap));
}
TEST(V8ObjectStartBitmapTest, AdjacentObjectsAtBegin) {
ObjectStartBitmap bitmap(TestObject::kBaseOffset);
TestObject object0(0);
TestObject object1(1);
bitmap.SetBit(object0);
bitmap.SetBit(object1);
EXPECT_FALSE(bitmap.CheckBit(TestObject(3)));
size_t count = 0;
bitmap.Iterate([&count, object0, object1](Address current) {
if (count == 0) {
EXPECT_EQ(object0.base_ptr(), current);
} else if (count == 1) {
EXPECT_EQ(object1.base_ptr(), current);
}
count++;
});
EXPECT_EQ(2u, count);
}
TEST(V8ObjectStartBitmapTest, AdjacentObjectsAtEnd) {
ObjectStartBitmap bitmap(TestObject::kBaseOffset);
const size_t last_entry_index = ObjectStartBitmap::MaxEntries() - 1;
TestObject object0(last_entry_index - 1);
TestObject object1(last_entry_index);
bitmap.SetBit(object0);
bitmap.SetBit(object1);
EXPECT_FALSE(bitmap.CheckBit(TestObject(last_entry_index - 2)));
size_t count = 0;
bitmap.Iterate([&count, object0, object1](Address current) {
if (count == 0) {
EXPECT_EQ(object0.base_ptr(), current);
} else if (count == 1) {
EXPECT_EQ(object1.base_ptr(), current);
}
count++;
});
EXPECT_EQ(2u, count);
}
TEST(V8ObjectStartBitmapTest, FindBasePtrExact) {
ObjectStartBitmap bitmap(TestObject::kBaseOffset);
TestObject object(654);
bitmap.SetBit(object);
EXPECT_EQ(object.base_ptr(), bitmap.FindBasePtr(object.base_ptr()));
}
TEST(V8ObjectStartBitmapTest, FindBasePtrApproximate) {
static const size_t kInternalDelta = 37;
ObjectStartBitmap bitmap(TestObject::kBaseOffset);
TestObject object(654);
bitmap.SetBit(object);
EXPECT_EQ(object.base_ptr(),
bitmap.FindBasePtr(object.base_ptr() + kInternalDelta));
}
TEST(V8ObjectStartBitmapTest, FindBasePtrIteratingWholeBitmap) {
ObjectStartBitmap bitmap(TestObject::kBaseOffset);
TestObject object_to_find(TestObject(0));
Address hint_index = TestObject(ObjectStartBitmap::MaxEntries() - 1);
bitmap.SetBit(object_to_find);
EXPECT_EQ(object_to_find.base_ptr(), bitmap.FindBasePtr(hint_index));
}
TEST(V8ObjectStartBitmapTest, FindBasePtrNextCell) {
// This white box test makes use of the fact that cells are of type uint32_t.
const size_t kCellSize = sizeof(uint32_t);
ObjectStartBitmap bitmap(TestObject::kBaseOffset);
TestObject object_to_find(TestObject(kCellSize - 1));
Address hint = TestObject(kCellSize);
bitmap.SetBit(TestObject(0));
bitmap.SetBit(object_to_find);
EXPECT_EQ(object_to_find.base_ptr(), bitmap.FindBasePtr(hint));
}
TEST(V8ObjectStartBitmapTest, FindBasePtrSameCell) {
// This white box test makes use of the fact that cells are of type uint32_t.
const size_t kCellSize = sizeof(uint32_t);
ObjectStartBitmap bitmap(TestObject::kBaseOffset);
TestObject object_to_find(TestObject(kCellSize - 1));
bitmap.SetBit(TestObject(0));
bitmap.SetBit(object_to_find);
EXPECT_EQ(object_to_find.base_ptr(),
bitmap.FindBasePtr(object_to_find.base_ptr()));
}
} // namespace internal
} // namespace v8
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment