Commit 8abada5b authored by Michael Lippautz's avatar Michael Lippautz Committed by Commit Bot

[iwyu] Untangle heap/local-allocator(-inl).h

Bug: v8:7490
Change-Id: I72444df6f75bc61c467c2df51f87581ef44fe09e
Reviewed-on: https://chromium-review.googlesource.com/1151632
Commit-Queue: Michael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarClemens Hammacher <clemensh@chromium.org>
Cr-Commit-Position: refs/heads/master@{#54751}
parent d13e5d57
......@@ -1983,6 +1983,7 @@ v8_source_set("v8_base") {
"src/heap/invalidated-slots.h",
"src/heap/item-parallel-job.cc",
"src/heap/item-parallel-job.h",
"src/heap/local-allocator-inl.h",
"src/heap/local-allocator.h",
"src/heap/mark-compact-inl.h",
"src/heap/mark-compact.cc",
......
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_LOCAL_ALLOCATOR_INL_H_
#define V8_HEAP_LOCAL_ALLOCATOR_INL_H_
#include "src/heap/local-allocator.h"
#include "src/heap/spaces-inl.h"
namespace v8 {
namespace internal {
AllocationResult LocalAllocator::Allocate(AllocationSpace space,
int object_size,
AllocationAlignment alignment) {
switch (space) {
case NEW_SPACE:
return AllocateInNewSpace(object_size, alignment);
case OLD_SPACE:
return compaction_spaces_.Get(OLD_SPACE)->AllocateRaw(object_size,
alignment);
case CODE_SPACE:
return compaction_spaces_.Get(CODE_SPACE)
->AllocateRaw(object_size, alignment);
default:
UNREACHABLE();
break;
}
}
void LocalAllocator::FreeLast(AllocationSpace space, HeapObject* object,
int object_size) {
switch (space) {
case NEW_SPACE:
FreeLastInNewSpace(object, object_size);
return;
case OLD_SPACE:
FreeLastInOldSpace(object, object_size);
return;
default:
// Only new and old space supported.
UNREACHABLE();
break;
}
}
void LocalAllocator::FreeLastInNewSpace(HeapObject* object, int object_size) {
if (!new_space_lab_.TryFreeLast(object, object_size)) {
// We couldn't free the last object so we have to write a proper filler.
heap_->CreateFillerObjectAt(object->address(), object_size,
ClearRecordedSlots::kNo);
}
}
void LocalAllocator::FreeLastInOldSpace(HeapObject* object, int object_size) {
if (!compaction_spaces_.Get(OLD_SPACE)->TryFreeLast(object, object_size)) {
// We couldn't free the last object so we have to write a proper filler.
heap_->CreateFillerObjectAt(object->address(), object_size,
ClearRecordedSlots::kNo);
}
}
AllocationResult LocalAllocator::AllocateInLAB(int object_size,
AllocationAlignment alignment) {
AllocationResult allocation;
if (!new_space_lab_.IsValid() && !NewLocalAllocationBuffer()) {
return AllocationResult::Retry(OLD_SPACE);
}
allocation = new_space_lab_.AllocateRawAligned(object_size, alignment);
if (allocation.IsRetry()) {
if (!NewLocalAllocationBuffer()) {
return AllocationResult::Retry(OLD_SPACE);
} else {
allocation = new_space_lab_.AllocateRawAligned(object_size, alignment);
CHECK(!allocation.IsRetry());
}
}
return allocation;
}
bool LocalAllocator::NewLocalAllocationBuffer() {
if (lab_allocation_will_fail_) return false;
LocalAllocationBuffer saved_lab_ = new_space_lab_;
AllocationResult result =
new_space_->AllocateRawSynchronized(kLabSize, kWordAligned);
new_space_lab_ = LocalAllocationBuffer::FromResult(heap_, result, kLabSize);
if (new_space_lab_.IsValid()) {
new_space_lab_.TryMerge(&saved_lab_);
return true;
}
new_space_lab_ = saved_lab_;
lab_allocation_will_fail_ = true;
return false;
}
AllocationResult LocalAllocator::AllocateInNewSpace(
int object_size, AllocationAlignment alignment) {
if (object_size > kMaxLabObjectSize) {
return new_space_->AllocateRawSynchronized(object_size, alignment);
}
return AllocateInLAB(object_size, alignment);
}
} // namespace internal
} // namespace v8
#endif // V8_HEAP_LOCAL_ALLOCATOR_INL_H_
......@@ -41,95 +41,19 @@ class LocalAllocator {
}
}
AllocationResult Allocate(AllocationSpace space, int object_size,
AllocationAlignment alignment) {
switch (space) {
case NEW_SPACE:
return AllocateInNewSpace(object_size, alignment);
case OLD_SPACE:
return compaction_spaces_.Get(OLD_SPACE)->AllocateRaw(object_size,
alignment);
case CODE_SPACE:
return compaction_spaces_.Get(CODE_SPACE)
->AllocateRaw(object_size, alignment);
default:
UNREACHABLE();
break;
}
}
void FreeLast(AllocationSpace space, HeapObject* object, int object_size) {
switch (space) {
case NEW_SPACE:
FreeLastInNewSpace(object, object_size);
return;
case OLD_SPACE:
FreeLastInOldSpace(object, object_size);
return;
default:
// Only new and old space supported.
UNREACHABLE();
break;
}
}
inline AllocationResult Allocate(AllocationSpace space, int object_size,
AllocationAlignment alignment);
inline void FreeLast(AllocationSpace space, HeapObject* object,
int object_size);
private:
AllocationResult AllocateInNewSpace(int object_size,
AllocationAlignment alignment) {
if (object_size > kMaxLabObjectSize) {
return new_space_->AllocateRawSynchronized(object_size, alignment);
}
return AllocateInLAB(object_size, alignment);
}
inline bool NewLocalAllocationBuffer() {
if (lab_allocation_will_fail_) return false;
LocalAllocationBuffer saved_lab_ = new_space_lab_;
AllocationResult result =
new_space_->AllocateRawSynchronized(kLabSize, kWordAligned);
new_space_lab_ = LocalAllocationBuffer::FromResult(heap_, result, kLabSize);
if (new_space_lab_.IsValid()) {
new_space_lab_.TryMerge(&saved_lab_);
return true;
}
new_space_lab_ = saved_lab_;
lab_allocation_will_fail_ = true;
return false;
}
AllocationResult AllocateInLAB(int object_size,
AllocationAlignment alignment) {
AllocationResult allocation;
if (!new_space_lab_.IsValid() && !NewLocalAllocationBuffer()) {
return AllocationResult::Retry(OLD_SPACE);
}
allocation = new_space_lab_.AllocateRawAligned(object_size, alignment);
if (allocation.IsRetry()) {
if (!NewLocalAllocationBuffer()) {
return AllocationResult::Retry(OLD_SPACE);
} else {
allocation = new_space_lab_.AllocateRawAligned(object_size, alignment);
CHECK(!allocation.IsRetry());
}
}
return allocation;
}
void FreeLastInNewSpace(HeapObject* object, int object_size) {
if (!new_space_lab_.TryFreeLast(object, object_size)) {
// We couldn't free the last object so we have to write a proper filler.
heap_->CreateFillerObjectAt(object->address(), object_size,
ClearRecordedSlots::kNo);
}
}
void FreeLastInOldSpace(HeapObject* object, int object_size) {
if (!compaction_spaces_.Get(OLD_SPACE)->TryFreeLast(object, object_size)) {
// We couldn't free the last object so we have to write a proper filler.
heap_->CreateFillerObjectAt(object->address(), object_size,
ClearRecordedSlots::kNo);
}
}
inline AllocationResult AllocateInNewSpace(int object_size,
AllocationAlignment alignment);
inline bool NewLocalAllocationBuffer();
inline AllocationResult AllocateInLAB(int object_size,
AllocationAlignment alignment);
inline void FreeLastInNewSpace(HeapObject* object, int object_size);
inline void FreeLastInOldSpace(HeapObject* object, int object_size);
Heap* const heap_;
NewSpace* const new_space_;
......
......@@ -20,7 +20,7 @@
#include "src/heap/incremental-marking.h"
#include "src/heap/invalidated-slots-inl.h"
#include "src/heap/item-parallel-job.h"
#include "src/heap/local-allocator.h"
#include "src/heap/local-allocator-inl.h"
#include "src/heap/mark-compact-inl.h"
#include "src/heap/object-stats.h"
#include "src/heap/objects-visiting-inl.h"
......
......@@ -6,6 +6,8 @@
#define V8_HEAP_SCAVENGER_INL_H_
#include "src/heap/scavenger.h"
#include "src/heap/local-allocator-inl.h"
#include "src/objects-inl.h"
#include "src/objects/map.h"
......
......@@ -468,13 +468,6 @@ V8_WARN_UNUSED_RESULT inline AllocationResult NewSpace::AllocateRawSynchronized(
return AllocateRaw(size_in_bytes, alignment);
}
LocalAllocationBuffer LocalAllocationBuffer::InvalidBuffer() {
return LocalAllocationBuffer(
nullptr, LinearAllocationArea(kNullAddress, kNullAddress));
}
LocalAllocationBuffer LocalAllocationBuffer::FromResult(Heap* heap,
AllocationResult result,
intptr_t size) {
......
......@@ -1979,7 +1979,10 @@ class LocalAllocationBuffer {
// Indicates that a buffer cannot be used for allocations anymore. Can result
// from either reassigning a buffer, or trying to construct it from an
// invalid {AllocationResult}.
static inline LocalAllocationBuffer InvalidBuffer();
static LocalAllocationBuffer InvalidBuffer() {
return LocalAllocationBuffer(
nullptr, LinearAllocationArea(kNullAddress, kNullAddress));
}
// Creates a new LAB from a given {AllocationResult}. Results in
// InvalidBuffer if the result indicates a retry.
......
......@@ -49,7 +49,6 @@ AUTO_EXCLUDE = [
'src/field-type.h',
'src/heap/incremental-marking.h',
'src/heap/incremental-marking-inl.h',
'src/heap/local-allocator.h',
'src/heap/mark-compact.h',
'src/heap/objects-visiting.h',
'src/heap/scavenger.h',
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment