Commit cef09fef authored by Jakob Kummerow's avatar Jakob Kummerow Committed by Commit Bot

[ubsan] Port FreeSpace to the new design

Bug: v8:3770
Change-Id: I968bcf38bfbf3672181ac4ad6b787c0b77a89cfd
Reviewed-on: https://chromium-review.googlesource.com/c/1380911
Commit-Queue: Jakob Kummerow <jkummerow@chromium.org>
Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#58295}
parent 7f21bbc1
......@@ -154,12 +154,13 @@ ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitStruct(
template <typename ResultType, typename ConcreteVisitor>
ResultType HeapVisitor<ResultType, ConcreteVisitor>::VisitFreeSpace(
Map map, FreeSpace* object) {
Map map, FreeSpace object) {
ConcreteVisitor* visitor = static_cast<ConcreteVisitor*>(this);
if (!visitor->ShouldVisit(object)) return ResultType();
if (visitor->ShouldVisitMapPointer())
if (visitor->ShouldVisitMapPointer()) {
visitor->VisitMapPointer(object, object->map_slot());
return static_cast<ResultType>(FreeSpace::cast(object)->size());
}
return static_cast<ResultType>(object->size());
}
template <typename ConcreteVisitor>
......
......@@ -120,7 +120,7 @@ class HeapVisitor : public ObjectVisitor {
V8_INLINE ResultType VisitJSObjectFast(Map map, JSObject object);
V8_INLINE ResultType VisitJSApiObject(Map map, JSObject object);
V8_INLINE ResultType VisitStruct(Map map, HeapObject* object);
V8_INLINE ResultType VisitFreeSpace(Map map, FreeSpace* object);
V8_INLINE ResultType VisitFreeSpace(Map map, FreeSpace object);
V8_INLINE ResultType VisitWeakArray(Map map, HeapObject* object);
template <typename T, typename = typename std::enable_if<
......
......@@ -1877,8 +1877,8 @@ bool PagedSpace::RefillLinearAllocationAreaFromFreeList(size_t size_in_bytes) {
}
size_t new_node_size = 0;
FreeSpace* new_node = free_list_.Allocate(size_in_bytes, &new_node_size);
if (new_node == nullptr) return false;
FreeSpace new_node = free_list_.Allocate(size_in_bytes, &new_node_size);
if (new_node.is_null()) return false;
DCHECK_GE(new_node_size, size_in_bytes);
......@@ -1889,7 +1889,7 @@ bool PagedSpace::RefillLinearAllocationAreaFromFreeList(size_t size_in_bytes) {
// Memory in the linear allocation area is counted as allocated. We may free
// a little of this again immediately - see below.
Page* page = Page::FromAddress(new_node->address());
Page* page = Page::FromHeapObject(new_node);
IncreaseAllocatedBytes(new_node_size, page);
Address start = new_node->address();
......@@ -2805,19 +2805,19 @@ size_t NewSpace::CommittedPhysicalMemory() {
void FreeListCategory::Reset() {
set_top(nullptr);
set_top(FreeSpace());
set_prev(nullptr);
set_next(nullptr);
available_ = 0;
}
FreeSpace* FreeListCategory::PickNodeFromList(size_t minimum_size,
size_t* node_size) {
FreeSpace FreeListCategory::PickNodeFromList(size_t minimum_size,
size_t* node_size) {
DCHECK(page()->CanAllocate());
FreeSpace* node = top();
if (node == nullptr || static_cast<size_t>(node->Size()) < minimum_size) {
FreeSpace node = top();
if (node.is_null() || static_cast<size_t>(node->Size()) < minimum_size) {
*node_size = 0;
return nullptr;
return FreeSpace();
}
set_top(node->next());
*node_size = node->Size();
......@@ -2825,11 +2825,11 @@ FreeSpace* FreeListCategory::PickNodeFromList(size_t minimum_size,
return node;
}
FreeSpace* FreeListCategory::SearchForNodeInList(size_t minimum_size,
size_t* node_size) {
FreeSpace FreeListCategory::SearchForNodeInList(size_t minimum_size,
size_t* node_size) {
DCHECK(page()->CanAllocate());
FreeSpace* prev_non_evac_node = nullptr;
for (FreeSpace* cur_node = top(); cur_node != nullptr;
FreeSpace prev_non_evac_node;
for (FreeSpace cur_node = top(); !cur_node.is_null();
cur_node = cur_node->next()) {
size_t size = cur_node->size();
if (size >= minimum_size) {
......@@ -2838,9 +2838,8 @@ FreeSpace* FreeListCategory::SearchForNodeInList(size_t minimum_size,
if (cur_node == top()) {
set_top(cur_node->next());
}
if (prev_non_evac_node != nullptr) {
MemoryChunk* chunk =
MemoryChunk::FromAddress(prev_non_evac_node->address());
if (!prev_non_evac_node.is_null()) {
MemoryChunk* chunk = MemoryChunk::FromHeapObject(prev_non_evac_node);
if (chunk->owner()->identity() == CODE_SPACE) {
chunk->heap()->UnprotectAndRegisterMemoryChunk(chunk);
}
......@@ -2852,13 +2851,13 @@ FreeSpace* FreeListCategory::SearchForNodeInList(size_t minimum_size,
prev_non_evac_node = cur_node;
}
return nullptr;
return FreeSpace();
}
void FreeListCategory::Free(Address start, size_t size_in_bytes,
FreeMode mode) {
DCHECK(page()->CanAllocate());
FreeSpace* free_space = FreeSpace::cast(HeapObject::FromAddress(start));
FreeSpace free_space = FreeSpace::cast(HeapObject::FromAddress(start));
free_space->set_next(top());
set_top(free_space);
available_ += size_in_bytes;
......@@ -2869,10 +2868,13 @@ void FreeListCategory::Free(Address start, size_t size_in_bytes,
void FreeListCategory::RepairFreeList(Heap* heap) {
FreeSpace* n = top();
while (n != nullptr) {
FreeSpace n = top();
while (!n.is_null()) {
ObjectSlot map_location(n->address());
if (*map_location == nullptr) {
// We can't use .is_null() here because ObjectSlot.load() returns an
// ObjectPtr (for which "is null" is not defined, as it would be
// indistinguishable from "is Smi(0)"). Only HeapObjectPtr has "is_null()".
if (map_location.load() == Map()) {
map_location.store(ReadOnlyRoots(heap).free_space_map());
} else {
DCHECK(*map_location == ReadOnlyRoots(heap).free_space_map());
......@@ -2923,14 +2925,14 @@ size_t FreeList::Free(Address start, size_t size_in_bytes, FreeMode mode) {
return 0;
}
FreeSpace* FreeList::FindNodeIn(FreeListCategoryType type, size_t minimum_size,
size_t* node_size) {
FreeSpace FreeList::FindNodeIn(FreeListCategoryType type, size_t minimum_size,
size_t* node_size) {
FreeListCategoryIterator it(this, type);
FreeSpace* node = nullptr;
FreeSpace node;
while (it.HasNext()) {
FreeListCategory* current = it.Next();
node = current->PickNodeFromList(minimum_size, node_size);
if (node != nullptr) {
if (!node.is_null()) {
DCHECK(IsVeryLong() || Available() == SumFreeLists());
return node;
}
......@@ -2939,26 +2941,25 @@ FreeSpace* FreeList::FindNodeIn(FreeListCategoryType type, size_t minimum_size,
return node;
}
FreeSpace* FreeList::TryFindNodeIn(FreeListCategoryType type,
size_t minimum_size, size_t* node_size) {
if (categories_[type] == nullptr) return nullptr;
FreeSpace* node =
categories_[type]->PickNodeFromList(minimum_size, node_size);
if (node != nullptr) {
FreeSpace FreeList::TryFindNodeIn(FreeListCategoryType type,
size_t minimum_size, size_t* node_size) {
if (categories_[type] == nullptr) return FreeSpace();
FreeSpace node = categories_[type]->PickNodeFromList(minimum_size, node_size);
if (!node.is_null()) {
DCHECK(IsVeryLong() || Available() == SumFreeLists());
}
return node;
}
FreeSpace* FreeList::SearchForNodeInList(FreeListCategoryType type,
size_t* node_size,
size_t minimum_size) {
FreeSpace FreeList::SearchForNodeInList(FreeListCategoryType type,
size_t* node_size,
size_t minimum_size) {
FreeListCategoryIterator it(this, type);
FreeSpace* node = nullptr;
FreeSpace node;
while (it.HasNext()) {
FreeListCategory* current = it.Next();
node = current->SearchForNodeInList(minimum_size, node_size);
if (node != nullptr) {
if (!node.is_null()) {
DCHECK(IsVeryLong() || Available() == SumFreeLists());
return node;
}
......@@ -2969,32 +2970,32 @@ FreeSpace* FreeList::SearchForNodeInList(FreeListCategoryType type,
return node;
}
FreeSpace* FreeList::Allocate(size_t size_in_bytes, size_t* node_size) {
FreeSpace FreeList::Allocate(size_t size_in_bytes, size_t* node_size) {
DCHECK_GE(kMaxBlockSize, size_in_bytes);
FreeSpace* node = nullptr;
FreeSpace node;
// First try the allocation fast path: try to allocate the minimum element
// size of a free list category. This operation is constant time.
FreeListCategoryType type =
SelectFastAllocationFreeListCategoryType(size_in_bytes);
for (int i = type; i < kHuge && node == nullptr; i++) {
for (int i = type; i < kHuge && node.is_null(); i++) {
node = FindNodeIn(static_cast<FreeListCategoryType>(i), size_in_bytes,
node_size);
}
if (node == nullptr) {
if (node.is_null()) {
// Next search the huge list for free list nodes. This takes linear time in
// the number of huge elements.
node = SearchForNodeInList(kHuge, node_size, size_in_bytes);
}
if (node == nullptr && type != kHuge) {
if (node.is_null() && type != kHuge) {
// We didn't find anything in the huge list. Now search the best fitting
// free list for a node that has at least the requested size.
type = SelectFreeListCategoryType(size_in_bytes);
node = TryFindNodeIn(type, size_in_bytes, node_size);
}
if (node != nullptr) {
if (!node.is_null()) {
Page::FromAddress(node->address())->IncreaseAllocatedBytes(*node_size);
}
......@@ -3080,8 +3081,8 @@ void FreeList::PrintCategories(FreeListCategoryType type) {
#ifdef DEBUG
size_t FreeListCategory::SumFreeList() {
size_t sum = 0;
FreeSpace* cur = top();
while (cur != nullptr) {
FreeSpace cur = top();
while (!cur.is_null()) {
DCHECK_EQ(cur->map(),
page()->heap()->isolate()->root(RootIndex::kFreeSpaceMap));
sum += cur->relaxed_read_size();
......@@ -3092,8 +3093,8 @@ size_t FreeListCategory::SumFreeList() {
int FreeListCategory::FreeListLength() {
int length = 0;
FreeSpace* cur = top();
while (cur != nullptr) {
FreeSpace cur = top();
while (!cur.is_null()) {
length++;
cur = cur->next();
if (length == kVeryLongFreeList) return length;
......
......@@ -157,14 +157,12 @@ class FreeListCategory {
page_(page),
type_(kInvalidCategory),
available_(0),
top_(nullptr),
prev_(nullptr),
next_(nullptr) {}
void Initialize(FreeListCategoryType type) {
type_ = type;
available_ = 0;
top_ = nullptr;
prev_ = nullptr;
next_ = nullptr;
}
......@@ -184,16 +182,16 @@ class FreeListCategory {
// Performs a single try to pick a node of at least |minimum_size| from the
// category. Stores the actual size in |node_size|. Returns nullptr if no
// node is found.
FreeSpace* PickNodeFromList(size_t minimum_size, size_t* node_size);
FreeSpace PickNodeFromList(size_t minimum_size, size_t* node_size);
// Picks a node of at least |minimum_size| from the category. Stores the
// actual size in |node_size|. Returns nullptr if no node is found.
FreeSpace* SearchForNodeInList(size_t minimum_size, size_t* node_size);
FreeSpace SearchForNodeInList(size_t minimum_size, size_t* node_size);
inline FreeList* owner();
inline Page* page() const { return page_; }
inline bool is_linked();
bool is_empty() { return top() == nullptr; }
bool is_empty() { return top().is_null(); }
size_t available() const { return available_; }
void set_free_list(FreeList* free_list) { free_list_ = free_list; }
......@@ -208,8 +206,8 @@ class FreeListCategory {
// {kVeryLongFreeList} by manually walking the list.
static const int kVeryLongFreeList = 500;
FreeSpace* top() { return top_; }
void set_top(FreeSpace* top) { top_ = top; }
FreeSpace top() { return top_; }
void set_top(FreeSpace top) { top_ = top; }
FreeListCategory* prev() { return prev_; }
void set_prev(FreeListCategory* prev) { prev_ = prev; }
FreeListCategory* next() { return next_; }
......@@ -228,8 +226,8 @@ class FreeListCategory {
// category.
size_t available_;
// |top_|: Points to the top FreeSpace* in the free list category.
FreeSpace* top_;
// |top_|: Points to the top FreeSpace in the free list category.
FreeSpace top_;
FreeListCategory* prev_;
FreeListCategory* next_;
......@@ -1808,8 +1806,8 @@ class V8_EXPORT_PRIVATE FreeList {
// bytes. Returns the actual node size in node_size which can be bigger than
// size_in_bytes. This method returns null if the allocation request cannot be
// handled by the free list.
V8_WARN_UNUSED_RESULT FreeSpace* Allocate(size_t size_in_bytes,
size_t* node_size);
V8_WARN_UNUSED_RESULT FreeSpace Allocate(size_t size_in_bytes,
size_t* node_size);
// Clear the free list.
void Reset();
......@@ -1912,18 +1910,18 @@ class V8_EXPORT_PRIVATE FreeList {
// Walks all available categories for a given |type| and tries to retrieve
// a node. Returns nullptr if the category is empty.
FreeSpace* FindNodeIn(FreeListCategoryType type, size_t minimum_size,
size_t* node_size);
FreeSpace FindNodeIn(FreeListCategoryType type, size_t minimum_size,
size_t* node_size);
// Tries to retrieve a node from the first category in a given |type|.
// Returns nullptr if the category is empty or the top entry is smaller
// than minimum_size.
FreeSpace* TryFindNodeIn(FreeListCategoryType type, size_t minimum_size,
size_t* node_size);
FreeSpace TryFindNodeIn(FreeListCategoryType type, size_t minimum_size,
size_t* node_size);
// Searches a given |type| for a node of at least |minimum_size|.
FreeSpace* SearchForNodeInList(FreeListCategoryType type, size_t* node_size,
size_t minimum_size);
FreeSpace SearchForNodeInList(FreeListCategoryType type, size_t* node_size,
size_t minimum_size);
// The tiny categories are not used for fast allocation.
FreeListCategoryType SelectFastAllocationFreeListCategoryType(
......
......@@ -1046,7 +1046,7 @@ int HeapObject::SizeFromMap(Map map) const {
BytecodeArray::unchecked_cast(this)->synchronized_length());
}
if (instance_type == FREE_SPACE_TYPE) {
return reinterpret_cast<const FreeSpace*>(this)->relaxed_read_size();
return FreeSpace::unchecked_cast(this)->relaxed_read_size();
}
if (instance_type == STRING_TYPE ||
instance_type == INTERNALIZED_STRING_TYPE) {
......
......@@ -15,12 +15,14 @@
namespace v8 {
namespace internal {
OBJECT_CONSTRUCTORS_IMPL(FreeSpace, HeapObjectPtr)
SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
RELAXED_SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
int FreeSpace::Size() { return size(); }
FreeSpace* FreeSpace::next() {
FreeSpace FreeSpace::next() {
#ifdef DEBUG
Heap* heap = Heap::FromWritableHeapObject(this);
Object* free_space_map = heap->isolate()->root(RootIndex::kFreeSpaceMap);
......@@ -29,10 +31,10 @@ FreeSpace* FreeSpace::next() {
map_slot().contains_value(kNullAddress));
#endif
DCHECK_LE(kNextOffset + kPointerSize, relaxed_read_size());
return reinterpret_cast<FreeSpace*>(Memory<Address>(address() + kNextOffset));
return FreeSpace::unchecked_cast(*ObjectSlot(address() + kNextOffset));
}
void FreeSpace::set_next(FreeSpace* next) {
void FreeSpace::set_next(FreeSpace next) {
#ifdef DEBUG
Heap* heap = Heap::FromWritableHeapObject(this);
Object* free_space_map = heap->isolate()->root(RootIndex::kFreeSpaceMap);
......@@ -41,15 +43,17 @@ void FreeSpace::set_next(FreeSpace* next) {
map_slot().contains_value(kNullAddress));
#endif
DCHECK_LE(kNextOffset + kPointerSize, relaxed_read_size());
base::Relaxed_Store(
reinterpret_cast<base::AtomicWord*>(address() + kNextOffset),
reinterpret_cast<base::AtomicWord>(next));
ObjectSlot(address() + kNextOffset).Relaxed_Store(next);
}
FreeSpace* FreeSpace::cast(HeapObject* o) {
FreeSpace FreeSpace::cast(HeapObject* o) {
SLOW_DCHECK(!Heap::FromWritableHeapObject(o)->deserialization_complete() ||
o->IsFreeSpace());
return reinterpret_cast<FreeSpace*>(o);
return bit_cast<FreeSpace>(ObjectPtr(o->ptr()));
}
FreeSpace FreeSpace::unchecked_cast(const Object* o) {
return bit_cast<FreeSpace>(ObjectPtr(o->ptr()));
}
} // namespace internal
......
......@@ -5,7 +5,7 @@
#ifndef V8_OBJECTS_FREE_SPACE_H_
#define V8_OBJECTS_FREE_SPACE_H_
#include "src/objects.h"
#include "src/objects/heap-object.h"
// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"
......@@ -18,7 +18,7 @@ namespace internal {
// the heap remains iterable. They have a size and a next pointer.
// The next pointer is the raw address of the next FreeSpace object (or NULL)
// in the free list.
class FreeSpace : public HeapObject {
class FreeSpace : public HeapObjectPtr {
public:
// [size]: size of the free space including the header.
inline int size() const;
......@@ -30,10 +30,11 @@ class FreeSpace : public HeapObject {
inline int Size();
// Accessors for the next field.
inline FreeSpace* next();
inline void set_next(FreeSpace* next);
inline FreeSpace next();
inline void set_next(FreeSpace next);
inline static FreeSpace* cast(HeapObject* obj);
inline static FreeSpace cast(HeapObject* obj);
inline static FreeSpace unchecked_cast(const Object* obj);
// Dispatched behavior.
DECL_PRINTER(FreeSpace)
......@@ -45,8 +46,7 @@ class FreeSpace : public HeapObject {
static const int kNextOffset = POINTER_SIZE_ALIGN(kSizeOffset + kPointerSize);
static const int kSize = kNextOffset + kPointerSize;
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(FreeSpace);
OBJECT_CONSTRUCTORS(FreeSpace, HeapObjectPtr);
};
} // namespace internal
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment