Commit 8686ea81 authored by Dan Elphick's avatar Dan Elphick Committed by Commit Bot

[heap] Split out paged-spaces.h

Splits out all of PagedSpace and subclasses into paged-spaces.h. Also
moves CodeObjectRegistry to code-object-registry.h.

Bug: v8:10473, v8:10506
Change-Id: I35fab1e545e958eb32f3e39a5e2ce8fb087c2a53
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2201763Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Commit-Queue: Dan Elphick <delphick@chromium.org>
Cr-Commit-Position: refs/heads/master@{#67811}
parent b079058b
......@@ -2401,6 +2401,8 @@ v8_source_set("v8_base_without_compiler") {
"src/heap/barrier.h",
"src/heap/basic-memory-chunk.cc",
"src/heap/basic-memory-chunk.h",
"src/heap/code-object-registry.cc",
"src/heap/code-object-registry.h",
"src/heap/code-stats.cc",
"src/heap/code-stats.h",
"src/heap/combined-heap.cc",
......@@ -2473,6 +2475,9 @@ v8_source_set("v8_base_without_compiler") {
"src/heap/off-thread-factory.h",
"src/heap/off-thread-heap.cc",
"src/heap/off-thread-heap.h",
"src/heap/paged-spaces-inl.h",
"src/heap/paged-spaces.cc",
"src/heap/paged-spaces.h",
"src/heap/read-only-heap-inl.h",
"src/heap/read-only-heap.cc",
"src/heap/read-only-heap.h",
......
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/code-object-registry.h"
#include <algorithm>
#include "src/base/logging.h"
namespace v8 {
namespace internal {
void CodeObjectRegistry::RegisterNewlyAllocatedCodeObject(Address code) {
auto result = code_object_registry_newly_allocated_.insert(code);
USE(result);
DCHECK(result.second);
}
void CodeObjectRegistry::RegisterAlreadyExistingCodeObject(Address code) {
code_object_registry_already_existing_.push_back(code);
}
void CodeObjectRegistry::Clear() {
code_object_registry_already_existing_.clear();
code_object_registry_newly_allocated_.clear();
}
void CodeObjectRegistry::Finalize() {
code_object_registry_already_existing_.shrink_to_fit();
}
bool CodeObjectRegistry::Contains(Address object) const {
return (code_object_registry_newly_allocated_.find(object) !=
code_object_registry_newly_allocated_.end()) ||
(std::binary_search(code_object_registry_already_existing_.begin(),
code_object_registry_already_existing_.end(),
object));
}
Address CodeObjectRegistry::GetCodeObjectStartFromInnerAddress(
Address address) const {
// Let's first find the object which comes right before address in the vector
// of already existing code objects.
Address already_existing_set_ = 0;
Address newly_allocated_set_ = 0;
if (!code_object_registry_already_existing_.empty()) {
auto it =
std::upper_bound(code_object_registry_already_existing_.begin(),
code_object_registry_already_existing_.end(), address);
if (it != code_object_registry_already_existing_.begin()) {
already_existing_set_ = *(--it);
}
}
// Next, let's find the object which comes right before address in the set
// of newly allocated code objects.
if (!code_object_registry_newly_allocated_.empty()) {
auto it = code_object_registry_newly_allocated_.upper_bound(address);
if (it != code_object_registry_newly_allocated_.begin()) {
newly_allocated_set_ = *(--it);
}
}
// The code objects which contains address has to be in one of the two
// data structures.
DCHECK(already_existing_set_ != 0 || newly_allocated_set_ != 0);
// The address which is closest to the given address is the code object.
return already_existing_set_ > newly_allocated_set_ ? already_existing_set_
: newly_allocated_set_;
}
} // namespace internal
} // namespace v8
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_CODE_OBJECT_REGISTRY_H_
#define V8_HEAP_CODE_OBJECT_REGISTRY_H_
#include <set>
#include <vector>
#include "src/base/macros.h"
#include "src/common/globals.h"
namespace v8 {
namespace internal {
// The CodeObjectRegistry holds all start addresses of code objects of a given
// MemoryChunk. Each MemoryChunk owns a separate CodeObjectRegistry. The
// CodeObjectRegistry allows fast lookup from an inner pointer of a code object
// to the actual code object.
class V8_EXPORT_PRIVATE CodeObjectRegistry {
public:
void RegisterNewlyAllocatedCodeObject(Address code);
void RegisterAlreadyExistingCodeObject(Address code);
void Clear();
void Finalize();
bool Contains(Address code) const;
Address GetCodeObjectStartFromInnerAddress(Address address) const;
private:
std::vector<Address> code_object_registry_already_existing_;
std::set<Address> code_object_registry_newly_allocated_;
};
} // namespace internal
} // namespace v8
#endif // V8_HEAP_CODE_OBJECT_REGISTRY_H_
......@@ -7,7 +7,7 @@
#include "src/codegen/code-comments.h"
#include "src/codegen/reloc-info.h"
#include "src/heap/large-spaces.h"
#include "src/heap/spaces-inl.h" // For PagedSpaceObjectIterator.
#include "src/heap/paged-spaces-inl.h" // For PagedSpaceObjectIterator.
#include "src/objects/objects-inl.h"
namespace v8 {
......
......@@ -23,7 +23,9 @@
// leak heap internals to users of this interface!
#include "src/execution/isolate-data.h"
#include "src/execution/isolate.h"
#include "src/heap/code-object-registry.h"
#include "src/heap/memory-chunk.h"
#include "src/heap/paged-spaces-inl.h"
#include "src/heap/read-only-spaces.h"
#include "src/heap/spaces-inl.h"
#include "src/objects/allocation-site-inl.h"
......
......@@ -30,6 +30,7 @@
#include "src/heap/array-buffer-sweeper.h"
#include "src/heap/array-buffer-tracker-inl.h"
#include "src/heap/barrier.h"
#include "src/heap/code-object-registry.h"
#include "src/heap/code-stats.h"
#include "src/heap/combined-heap.h"
#include "src/heap/concurrent-marking.h"
......@@ -51,6 +52,7 @@
#include "src/heap/object-stats.h"
#include "src/heap/objects-visiting-inl.h"
#include "src/heap/objects-visiting.h"
#include "src/heap/paged-spaces-inl.h"
#include "src/heap/read-only-heap.h"
#include "src/heap/remembered-set-inl.h"
#include "src/heap/safepoint.h"
......
......@@ -7,6 +7,7 @@
#include "src/common/globals.h"
#include "src/heap/heap.h"
#include "src/heap/paged-spaces.h"
#include "src/heap/spaces.h"
namespace v8 {
......
......@@ -16,6 +16,7 @@
#include "src/heap/array-buffer-collector.h"
#include "src/heap/array-buffer-sweeper.h"
#include "src/heap/array-buffer-tracker-inl.h"
#include "src/heap/code-object-registry.h"
#include "src/heap/gc-tracer.h"
#include "src/heap/incremental-marking-inl.h"
#include "src/heap/invalidated-slots-inl.h"
......
......@@ -6,6 +6,7 @@
#include "src/base/platform/platform.h"
#include "src/heap/array-buffer-tracker.h"
#include "src/heap/code-object-registry.h"
#include "src/heap/memory-chunk-inl.h"
#include "src/heap/spaces.h"
#include "src/objects/heap-object.h"
......
......@@ -5,8 +5,8 @@
#include "src/heap/off-thread-heap.h"
#include "src/common/globals.h"
#include "src/heap/paged-spaces-inl.h"
#include "src/heap/spaces-inl.h"
#include "src/heap/spaces.h"
#include "src/objects/objects-body-descriptors-inl.h"
#include "src/roots/roots.h"
#include "src/snapshot/references.h"
......
......@@ -9,6 +9,7 @@
#include "src/common/globals.h"
#include "src/heap/large-spaces.h"
#include "src/heap/paged-spaces.h"
#include "src/heap/spaces.h"
#include "src/objects/heap-object.h"
......
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_PAGED_SPACES_INL_H_
#define V8_HEAP_PAGED_SPACES_INL_H_
#include "src/heap/incremental-marking.h"
#include "src/heap/paged-spaces.h"
#include "src/objects/code-inl.h"
#include "src/objects/heap-object.h"
#include "src/objects/objects-inl.h"
namespace v8 {
namespace internal {
// -----------------------------------------------------------------------------
// PagedSpaceObjectIterator
HeapObject PagedSpaceObjectIterator::Next() {
do {
HeapObject next_obj = FromCurrentPage();
if (!next_obj.is_null()) return next_obj;
} while (AdvanceToNextPage());
return HeapObject();
}
HeapObject PagedSpaceObjectIterator::FromCurrentPage() {
while (cur_addr_ != cur_end_) {
if (cur_addr_ == space_->top() && cur_addr_ != space_->limit()) {
cur_addr_ = space_->limit();
continue;
}
HeapObject obj = HeapObject::FromAddress(cur_addr_);
const int obj_size = obj.Size();
cur_addr_ += obj_size;
DCHECK_LE(cur_addr_, cur_end_);
if (!obj.IsFreeSpaceOrFiller()) {
if (obj.IsCode()) {
DCHECK_IMPLIES(
space_->identity() != CODE_SPACE,
space_->identity() == RO_SPACE && Code::cast(obj).is_builtin());
DCHECK_CODEOBJECT_SIZE(obj_size, space_);
} else {
DCHECK_OBJECT_SIZE(obj_size);
}
return obj;
}
}
return HeapObject();
}
bool PagedSpace::Contains(Address addr) const {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) {
return true;
}
return Page::FromAddress(addr)->owner() == this;
}
bool PagedSpace::Contains(Object o) const {
if (!o.IsHeapObject()) return false;
return Page::FromAddress(o.ptr())->owner() == this;
}
void PagedSpace::UnlinkFreeListCategories(Page* page) {
DCHECK_EQ(this, page->owner());
page->ForAllFreeListCategories([this](FreeListCategory* category) {
free_list()->RemoveCategory(category);
});
}
size_t PagedSpace::RelinkFreeListCategories(Page* page) {
DCHECK_EQ(this, page->owner());
size_t added = 0;
page->ForAllFreeListCategories([this, &added](FreeListCategory* category) {
added += category->available();
category->Relink(free_list());
});
DCHECK_IMPLIES(!page->IsFlagSet(Page::NEVER_ALLOCATE_ON_PAGE),
page->AvailableInFreeList() ==
page->AvailableInFreeListFromAllocatedBytes());
return added;
}
bool PagedSpace::TryFreeLast(HeapObject object, int object_size) {
if (allocation_info_.top() != kNullAddress) {
const Address object_address = object.address();
if ((allocation_info_.top() - object_size) == object_address) {
allocation_info_.set_top(object_address);
return true;
}
}
return false;
}
bool PagedSpace::EnsureLinearAllocationArea(int size_in_bytes,
AllocationOrigin origin) {
if (allocation_info_.top() + size_in_bytes <= allocation_info_.limit()) {
return true;
}
return SlowRefillLinearAllocationArea(size_in_bytes, origin);
}
HeapObject PagedSpace::AllocateLinearly(int size_in_bytes) {
Address current_top = allocation_info_.top();
Address new_top = current_top + size_in_bytes;
DCHECK_LE(new_top, allocation_info_.limit());
allocation_info_.set_top(new_top);
return HeapObject::FromAddress(current_top);
}
HeapObject PagedSpace::TryAllocateLinearlyAligned(
int* size_in_bytes, AllocationAlignment alignment) {
Address current_top = allocation_info_.top();
int filler_size = Heap::GetFillToAlign(current_top, alignment);
Address new_top = current_top + filler_size + *size_in_bytes;
if (new_top > allocation_info_.limit()) return HeapObject();
allocation_info_.set_top(new_top);
if (filler_size > 0) {
*size_in_bytes += filler_size;
return Heap::PrecedeWithFiller(ReadOnlyRoots(heap()),
HeapObject::FromAddress(current_top),
filler_size);
}
return HeapObject::FromAddress(current_top);
}
AllocationResult PagedSpace::AllocateRawUnaligned(int size_in_bytes,
AllocationOrigin origin) {
DCHECK_IMPLIES(identity() == RO_SPACE, !IsDetached());
if (!EnsureLinearAllocationArea(size_in_bytes, origin)) {
return AllocationResult::Retry(identity());
}
HeapObject object = AllocateLinearly(size_in_bytes);
DCHECK(!object.is_null());
MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object.address(), size_in_bytes);
if (FLAG_trace_allocations_origins) {
UpdateAllocationOrigins(origin);
}
return object;
}
AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes,
AllocationAlignment alignment,
AllocationOrigin origin) {
DCHECK(identity() == OLD_SPACE || identity() == RO_SPACE);
DCHECK_IMPLIES(identity() == RO_SPACE, !IsDetached());
int allocation_size = size_in_bytes;
HeapObject object = TryAllocateLinearlyAligned(&allocation_size, alignment);
if (object.is_null()) {
// We don't know exactly how much filler we need to align until space is
// allocated, so assume the worst case.
int filler_size = Heap::GetMaximumFillToAlign(alignment);
allocation_size += filler_size;
if (!EnsureLinearAllocationArea(allocation_size, origin)) {
return AllocationResult::Retry(identity());
}
allocation_size = size_in_bytes;
object = TryAllocateLinearlyAligned(&allocation_size, alignment);
DCHECK(!object.is_null());
}
MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object.address(), size_in_bytes);
if (FLAG_trace_allocations_origins) {
UpdateAllocationOrigins(origin);
}
return object;
}
AllocationResult PagedSpace::AllocateRaw(int size_in_bytes,
AllocationAlignment alignment,
AllocationOrigin origin) {
if (top_on_previous_step_ && top() < top_on_previous_step_ &&
SupportsInlineAllocation()) {
// Generated code decreased the top() pointer to do folded allocations.
// The top_on_previous_step_ can be one byte beyond the current page.
DCHECK_NE(top(), kNullAddress);
DCHECK_EQ(Page::FromAllocationAreaAddress(top()),
Page::FromAllocationAreaAddress(top_on_previous_step_ - 1));
top_on_previous_step_ = top();
}
size_t bytes_since_last =
top_on_previous_step_ ? top() - top_on_previous_step_ : 0;
DCHECK_IMPLIES(!SupportsInlineAllocation(), bytes_since_last == 0);
#ifdef V8_HOST_ARCH_32_BIT
AllocationResult result =
alignment != kWordAligned
? AllocateRawAligned(size_in_bytes, alignment, origin)
: AllocateRawUnaligned(size_in_bytes, origin);
#else
AllocationResult result = AllocateRawUnaligned(size_in_bytes, origin);
#endif
HeapObject heap_obj;
if (!result.IsRetry() && result.To(&heap_obj) && !is_local_space()) {
AllocationStep(static_cast<int>(size_in_bytes + bytes_since_last),
heap_obj.address(), size_in_bytes);
StartNextInlineAllocationStep();
DCHECK_IMPLIES(
heap()->incremental_marking()->black_allocation(),
heap()->incremental_marking()->marking_state()->IsBlack(heap_obj));
}
return result;
}
} // namespace internal
} // namespace v8
#endif // V8_HEAP_PAGED_SPACES_INL_H_
This diff is collapsed.
This diff is collapsed.
......@@ -12,6 +12,7 @@
#include "src/base/macros.h"
#include "src/heap/list.h"
#include "src/heap/memory-chunk.h"
#include "src/heap/paged-spaces.h"
#include "src/heap/spaces.h"
namespace v8 {
......
......@@ -63,42 +63,6 @@ HeapObject SemiSpaceObjectIterator::Next() {
return HeapObject();
}
// -----------------------------------------------------------------------------
// PagedSpaceObjectIterator
HeapObject PagedSpaceObjectIterator::Next() {
do {
HeapObject next_obj = FromCurrentPage();
if (!next_obj.is_null()) return next_obj;
} while (AdvanceToNextPage());
return HeapObject();
}
HeapObject PagedSpaceObjectIterator::FromCurrentPage() {
while (cur_addr_ != cur_end_) {
if (cur_addr_ == space_->top() && cur_addr_ != space_->limit()) {
cur_addr_ = space_->limit();
continue;
}
HeapObject obj = HeapObject::FromAddress(cur_addr_);
const int obj_size = obj.Size();
cur_addr_ += obj_size;
DCHECK_LE(cur_addr_, cur_end_);
if (!obj.IsFreeSpaceOrFiller()) {
if (obj.IsCode()) {
DCHECK_IMPLIES(
space_->identity() != CODE_SPACE,
space_->identity() == RO_SPACE && Code::cast(obj).is_builtin());
DCHECK_CODEOBJECT_SIZE(obj_size, space_);
} else {
DCHECK_OBJECT_SIZE(obj_size);
}
return obj;
}
}
return HeapObject();
}
void Space::IncrementExternalBackingStoreBytes(ExternalBackingStoreType type,
size_t amount) {
base::CheckedIncrement(&external_backing_store_bytes_[type], amount);
......@@ -165,50 +129,6 @@ bool NewSpace::FromSpaceContains(Object o) const {
return from_space_.Contains(o);
}
bool PagedSpace::Contains(Address addr) const {
if (V8_ENABLE_THIRD_PARTY_HEAP_BOOL) {
return true;
}
return Page::FromAddress(addr)->owner() == this;
}
bool PagedSpace::Contains(Object o) const {
if (!o.IsHeapObject()) return false;
return Page::FromAddress(o.ptr())->owner() == this;
}
void PagedSpace::UnlinkFreeListCategories(Page* page) {
DCHECK_EQ(this, page->owner());
page->ForAllFreeListCategories([this](FreeListCategory* category) {
free_list()->RemoveCategory(category);
});
}
size_t PagedSpace::RelinkFreeListCategories(Page* page) {
DCHECK_EQ(this, page->owner());
size_t added = 0;
page->ForAllFreeListCategories([this, &added](FreeListCategory* category) {
added += category->available();
category->Relink(free_list());
});
DCHECK_IMPLIES(!page->IsFlagSet(Page::NEVER_ALLOCATE_ON_PAGE),
page->AvailableInFreeList() ==
page->AvailableInFreeListFromAllocatedBytes());
return added;
}
bool PagedSpace::TryFreeLast(HeapObject object, int object_size) {
if (allocation_info_.top() != kNullAddress) {
const Address object_address = object.address();
if ((allocation_info_.top() - object_size) == object_address) {
allocation_info_.set_top(object_address);
return true;
}
}
return false;
}
void Page::MarkNeverAllocateForTesting() {
DCHECK(this->owner_identity() != NEW_SPACE);
DCHECK(!IsFlagSet(NEVER_ALLOCATE_ON_PAGE));
......@@ -345,122 +265,6 @@ AllocationResult LocalAllocationBuffer::AllocateRawAligned(
return AllocationResult(HeapObject::FromAddress(current_top));
}
bool PagedSpace::EnsureLinearAllocationArea(int size_in_bytes,
AllocationOrigin origin) {
if (allocation_info_.top() + size_in_bytes <= allocation_info_.limit()) {
return true;
}
return SlowRefillLinearAllocationArea(size_in_bytes, origin);
}
HeapObject PagedSpace::AllocateLinearly(int size_in_bytes) {
Address current_top = allocation_info_.top();
Address new_top = current_top + size_in_bytes;
DCHECK_LE(new_top, allocation_info_.limit());
allocation_info_.set_top(new_top);
return HeapObject::FromAddress(current_top);
}
HeapObject PagedSpace::TryAllocateLinearlyAligned(
int* size_in_bytes, AllocationAlignment alignment) {
Address current_top = allocation_info_.top();
int filler_size = Heap::GetFillToAlign(current_top, alignment);
Address new_top = current_top + filler_size + *size_in_bytes;
if (new_top > allocation_info_.limit()) return HeapObject();
allocation_info_.set_top(new_top);
if (filler_size > 0) {
*size_in_bytes += filler_size;
return Heap::PrecedeWithFiller(ReadOnlyRoots(heap()),
HeapObject::FromAddress(current_top),
filler_size);
}
return HeapObject::FromAddress(current_top);
}
AllocationResult PagedSpace::AllocateRawUnaligned(int size_in_bytes,
AllocationOrigin origin) {
DCHECK_IMPLIES(identity() == RO_SPACE, !IsDetached());
if (!EnsureLinearAllocationArea(size_in_bytes, origin)) {
return AllocationResult::Retry(identity());
}
HeapObject object = AllocateLinearly(size_in_bytes);
DCHECK(!object.is_null());
MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object.address(), size_in_bytes);
if (FLAG_trace_allocations_origins) {
UpdateAllocationOrigins(origin);
}
return object;
}
AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes,
AllocationAlignment alignment,
AllocationOrigin origin) {
DCHECK(identity() == OLD_SPACE || identity() == RO_SPACE);
DCHECK_IMPLIES(identity() == RO_SPACE, !IsDetached());
int allocation_size = size_in_bytes;
HeapObject object = TryAllocateLinearlyAligned(&allocation_size, alignment);
if (object.is_null()) {
// We don't know exactly how much filler we need to align until space is
// allocated, so assume the worst case.
int filler_size = Heap::GetMaximumFillToAlign(alignment);
allocation_size += filler_size;
if (!EnsureLinearAllocationArea(allocation_size, origin)) {
return AllocationResult::Retry(identity());
}
allocation_size = size_in_bytes;
object = TryAllocateLinearlyAligned(&allocation_size, alignment);
DCHECK(!object.is_null());
}
MSAN_ALLOCATED_UNINITIALIZED_MEMORY(object.address(), size_in_bytes);
if (FLAG_trace_allocations_origins) {
UpdateAllocationOrigins(origin);
}
return object;
}
AllocationResult PagedSpace::AllocateRaw(int size_in_bytes,
AllocationAlignment alignment,
AllocationOrigin origin) {
if (top_on_previous_step_ && top() < top_on_previous_step_ &&
SupportsInlineAllocation()) {
// Generated code decreased the top() pointer to do folded allocations.
// The top_on_previous_step_ can be one byte beyond the current page.
DCHECK_NE(top(), kNullAddress);
DCHECK_EQ(Page::FromAllocationAreaAddress(top()),
Page::FromAllocationAreaAddress(top_on_previous_step_ - 1));
top_on_previous_step_ = top();
}
size_t bytes_since_last =
top_on_previous_step_ ? top() - top_on_previous_step_ : 0;
DCHECK_IMPLIES(!SupportsInlineAllocation(), bytes_since_last == 0);
#ifdef V8_HOST_ARCH_32_BIT
AllocationResult result =
alignment != kWordAligned
? AllocateRawAligned(size_in_bytes, alignment, origin)
: AllocateRawUnaligned(size_in_bytes, origin);
#else
AllocationResult result = AllocateRawUnaligned(size_in_bytes, origin);
#endif
HeapObject heap_obj;
if (!result.IsRetry() && result.To(&heap_obj) && !is_local_space()) {
AllocationStep(static_cast<int>(size_in_bytes + bytes_since_last),
heap_obj.address(), size_in_bytes);
StartNextInlineAllocationStep();
DCHECK_IMPLIES(
heap()->incremental_marking()->black_allocation(),
heap()->incremental_marking()->marking_state()->IsBlack(heap_obj));
}
return result;
}
// -----------------------------------------------------------------------------
// NewSpace
......
This diff is collapsed.
This diff is collapsed.
......@@ -6,6 +6,7 @@
#include "src/execution/vm-state-inl.h"
#include "src/heap/array-buffer-tracker-inl.h"
#include "src/heap/code-object-registry.h"
#include "src/heap/gc-tracer.h"
#include "src/heap/invalidated-slots-inl.h"
#include "src/heap/mark-compact-inl.h"
......
......@@ -6,10 +6,10 @@
#include "include/libplatform/libplatform.h"
#include "include/v8.h"
#include "src/execution/frames.h"
#include "src/execution/isolate.h"
#include "src/heap/heap-inl.h"
#include "src/heap/paged-spaces-inl.h"
#include "src/heap/read-only-heap.h"
#include "src/heap/spaces.h"
#include "src/objects/objects-inl.h"
......
......@@ -2,7 +2,8 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/heap/spaces.h"
#include "src/heap/code-object-registry.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace v8 {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment