Commit 69621ef0 authored by Igor Sheludko's avatar Igor Sheludko Committed by Commit Bot

[cleanup] Introduce base::AddressRegion helper class

Bug: v8:8015
Cq-Include-Trybots: luci.chromium.try:linux_chromium_rel_ng
Change-Id: I2ce078b662e3dd93e0fac310b0d73c4cadbaccb3
Reviewed-on: https://chromium-review.googlesource.com/1226640
Commit-Queue: Igor Sheludko <ishell@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#55957}
parent 2c97e145
...@@ -3008,6 +3008,7 @@ v8_source_set("torque_base") { ...@@ -3008,6 +3008,7 @@ v8_source_set("torque_base") {
v8_component("v8_libbase") { v8_component("v8_libbase") {
sources = [ sources = [
"src/base/adapters.h", "src/base/adapters.h",
"src/base/address-region.h",
"src/base/atomic-utils.h", "src/base/atomic-utils.h",
"src/base/atomicops.h", "src/base/atomicops.h",
"src/base/atomicops_internals_atomicword_compat.h", "src/base/atomicops_internals_atomicword_compat.h",
......
...@@ -212,10 +212,10 @@ VirtualMemory::VirtualMemory(v8::PageAllocator* page_allocator, size_t size, ...@@ -212,10 +212,10 @@ VirtualMemory::VirtualMemory(v8::PageAllocator* page_allocator, size_t size,
size_t page_size = page_allocator_->AllocatePageSize(); size_t page_size = page_allocator_->AllocatePageSize();
alignment = RoundUp(alignment, page_size); alignment = RoundUp(alignment, page_size);
size = RoundUp(size, page_size); size = RoundUp(size, page_size);
address_ = reinterpret_cast<Address>(AllocatePages( Address address = reinterpret_cast<Address>(AllocatePages(
page_allocator_, hint, size, alignment, PageAllocator::kNoAccess)); page_allocator_, hint, size, alignment, PageAllocator::kNoAccess));
if (address_ != kNullAddress) { if (address != kNullAddress) {
size_ = size; region_ = base::AddressRegion(address, size);
} }
} }
...@@ -227,8 +227,7 @@ VirtualMemory::~VirtualMemory() { ...@@ -227,8 +227,7 @@ VirtualMemory::~VirtualMemory() {
void VirtualMemory::Reset() { void VirtualMemory::Reset() {
page_allocator_ = nullptr; page_allocator_ = nullptr;
address_ = kNullAddress; region_ = base::AddressRegion();
size_ = 0;
} }
bool VirtualMemory::SetPermissions(Address address, size_t size, bool VirtualMemory::SetPermissions(Address address, size_t size,
...@@ -245,14 +244,13 @@ size_t VirtualMemory::Release(Address free_start) { ...@@ -245,14 +244,13 @@ size_t VirtualMemory::Release(Address free_start) {
DCHECK(IsAddressAligned(free_start, page_allocator_->CommitPageSize())); DCHECK(IsAddressAligned(free_start, page_allocator_->CommitPageSize()));
// Notice: Order is important here. The VirtualMemory object might live // Notice: Order is important here. The VirtualMemory object might live
// inside the allocated region. // inside the allocated region.
const size_t free_size = size_ - (free_start - address_);
size_t old_size = size_; const size_t old_size = region_.size();
const size_t free_size = old_size - (free_start - region_.begin());
CHECK(InVM(free_start, free_size)); CHECK(InVM(free_start, free_size));
DCHECK_LT(address_, free_start); region_.set_size(old_size - free_size);
DCHECK_LT(free_start, address_ + size_); CHECK(ReleasePages(page_allocator_, reinterpret_cast<void*>(region_.begin()),
size_ -= free_size; old_size, region_.size()));
CHECK(ReleasePages(page_allocator_, reinterpret_cast<void*>(address_),
old_size, size_));
return free_size; return free_size;
} }
...@@ -261,21 +259,18 @@ void VirtualMemory::Free() { ...@@ -261,21 +259,18 @@ void VirtualMemory::Free() {
// Notice: Order is important here. The VirtualMemory object might live // Notice: Order is important here. The VirtualMemory object might live
// inside the allocated region. // inside the allocated region.
v8::PageAllocator* page_allocator = page_allocator_; v8::PageAllocator* page_allocator = page_allocator_;
Address address = address_; base::AddressRegion region = region_;
size_t size = size_;
CHECK(InVM(address, size));
Reset(); Reset();
// FreePages expects size to be aligned to allocation granularity. Trimming // FreePages expects size to be aligned to allocation granularity however
// may leave size at only commit granularity. Align it here. // ReleasePages may leave size at only commit granularity. Align it here.
CHECK(FreePages(page_allocator, reinterpret_cast<void*>(address), CHECK(FreePages(page_allocator, reinterpret_cast<void*>(region.begin()),
RoundUp(size, page_allocator->AllocatePageSize()))); RoundUp(region.size(), page_allocator->AllocatePageSize())));
} }
void VirtualMemory::TakeControl(VirtualMemory* from) { void VirtualMemory::TakeControl(VirtualMemory* from) {
DCHECK(!IsReserved()); DCHECK(!IsReserved());
page_allocator_ = from->page_allocator_; page_allocator_ = from->page_allocator_;
address_ = from->address_; region_ = from->region_;
size_ = from->size_;
from->Reset(); from->Reset();
} }
......
...@@ -6,6 +6,7 @@ ...@@ -6,6 +6,7 @@
#define V8_ALLOCATION_H_ #define V8_ALLOCATION_H_
#include "include/v8-platform.h" #include "include/v8-platform.h"
#include "src/base/address-region.h"
#include "src/base/compiler-specific.h" #include "src/base/compiler-specific.h"
#include "src/base/platform/platform.h" #include "src/base/platform/platform.h"
#include "src/globals.h" #include "src/globals.h"
...@@ -167,7 +168,7 @@ class V8_EXPORT_PRIVATE VirtualMemory final { ...@@ -167,7 +168,7 @@ class V8_EXPORT_PRIVATE VirtualMemory final {
// Construct a virtual memory by assigning it some already mapped address // Construct a virtual memory by assigning it some already mapped address
// and size. // and size.
VirtualMemory(v8::PageAllocator* page_allocator, Address address, size_t size) VirtualMemory(v8::PageAllocator* page_allocator, Address address, size_t size)
: page_allocator_(page_allocator), address_(address), size_(size) { : page_allocator_(page_allocator), region_(address, size) {
DCHECK_NOT_NULL(page_allocator); DCHECK_NOT_NULL(page_allocator);
} }
...@@ -185,32 +186,34 @@ class V8_EXPORT_PRIVATE VirtualMemory final { ...@@ -185,32 +186,34 @@ class V8_EXPORT_PRIVATE VirtualMemory final {
} }
// Returns whether the memory has been reserved. // Returns whether the memory has been reserved.
bool IsReserved() const { return address_ != kNullAddress; } bool IsReserved() const { return region_.begin() != kNullAddress; }
// Initialize or resets an embedded VirtualMemory object. // Initialize or resets an embedded VirtualMemory object.
void Reset(); void Reset();
v8::PageAllocator* page_allocator() { return page_allocator_; } v8::PageAllocator* page_allocator() { return page_allocator_; }
const base::AddressRegion& region() const { return region_; }
// Returns the start address of the reserved memory. // Returns the start address of the reserved memory.
// If the memory was reserved with an alignment, this address is not // If the memory was reserved with an alignment, this address is not
// necessarily aligned. The user might need to round it up to a multiple of // necessarily aligned. The user might need to round it up to a multiple of
// the alignment to get the start of the aligned block. // the alignment to get the start of the aligned block.
Address address() const { Address address() const {
DCHECK(IsReserved()); DCHECK(IsReserved());
return address_; return region_.begin();
} }
Address end() const { Address end() const {
DCHECK(IsReserved()); DCHECK(IsReserved());
return address_ + size_; return region_.end();
} }
// Returns the size of the reserved memory. The returned value is only // Returns the size of the reserved memory. The returned value is only
// meaningful when IsReserved() returns true. // meaningful when IsReserved() returns true.
// If the memory was reserved with an alignment, this size may be larger // If the memory was reserved with an alignment, this size may be larger
// than the requested size. // than the requested size.
size_t size() const { return size_; } size_t size() const { return region_.size(); }
// Sets permissions according to the access argument. address and size must be // Sets permissions according to the access argument. address and size must be
// multiples of CommitPageSize(). Returns true on success, otherwise false. // multiples of CommitPageSize(). Returns true on success, otherwise false.
...@@ -228,14 +231,13 @@ class V8_EXPORT_PRIVATE VirtualMemory final { ...@@ -228,14 +231,13 @@ class V8_EXPORT_PRIVATE VirtualMemory final {
void TakeControl(VirtualMemory* from); void TakeControl(VirtualMemory* from);
bool InVM(Address address, size_t size) { bool InVM(Address address, size_t size) {
return (address_ <= address) && ((address_ + size_) >= (address + size)); return region_.contains(address, size);
} }
private: private:
// Page allocator that controls the virtual memory. // Page allocator that controls the virtual memory.
v8::PageAllocator* page_allocator_ = nullptr; v8::PageAllocator* page_allocator_ = nullptr;
Address address_ = kNullAddress; // Start address of the virtual memory. base::AddressRegion region_;
size_t size_ = 0; // Size of the virtual memory.
DISALLOW_COPY_AND_ASSIGN(VirtualMemory); DISALLOW_COPY_AND_ASSIGN(VirtualMemory);
}; };
......
...@@ -8707,14 +8707,10 @@ void Isolate::SetStackLimit(uintptr_t stack_limit) { ...@@ -8707,14 +8707,10 @@ void Isolate::SetStackLimit(uintptr_t stack_limit) {
void Isolate::GetCodeRange(void** start, size_t* length_in_bytes) { void Isolate::GetCodeRange(void** start, size_t* length_in_bytes) {
i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this); i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
i::MemoryAllocator* memory_allocator = isolate->heap()->memory_allocator(); const base::AddressRegion& code_range =
if (memory_allocator->code_range_valid()) { isolate->heap()->memory_allocator()->code_range();
*start = reinterpret_cast<void*>(memory_allocator->code_range_start()); *start = reinterpret_cast<void*>(code_range.begin());
*length_in_bytes = memory_allocator->code_range_size(); *length_in_bytes = code_range.size();
} else {
*start = nullptr;
*length_in_bytes = 0;
}
} }
MemoryRange Isolate::GetBuiltinsCodeRange() { MemoryRange Isolate::GetBuiltinsCodeRange() {
......
...@@ -74,9 +74,12 @@ AssemblerOptions AssemblerOptions::Default( ...@@ -74,9 +74,12 @@ AssemblerOptions AssemblerOptions::Default(
options.enable_simulator_code = !serializer; options.enable_simulator_code = !serializer;
#endif #endif
options.inline_offheap_trampolines = !serializer; options.inline_offheap_trampolines = !serializer;
#if V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_ARM64 #if V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_ARM64
options.code_range_start = const base::AddressRegion& code_range =
isolate->heap()->memory_allocator()->code_range_start(); isolate->heap()->memory_allocator()->code_range();
DCHECK_IMPLIES(code_range.begin() != kNullAddress, !code_range.is_empty());
options.code_range_start = code_range.begin();
#endif #endif
return options; return options;
} }
......
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_BASE_ADDRESS_REGION_H_
#define V8_BASE_ADDRESS_REGION_H_
#include <type_traits>
#include "src/base/macros.h"
namespace v8 {
namespace base {
// Helper class representing an address region of certian size.
class AddressRegion {
public:
typedef uintptr_t Address;
AddressRegion() = default;
AddressRegion(Address address, size_t size)
: address_(address), size_(size) {}
Address begin() const { return address_; }
Address end() const { return address_ + size_; }
size_t size() const { return size_; }
void set_size(size_t size) { size_ = size; }
bool is_empty() const { return size_ == 0; }
bool contains(Address address) const {
STATIC_ASSERT(std::is_unsigned<Address>::value);
return (address - begin()) < size();
}
bool contains(Address address, size_t size) const {
STATIC_ASSERT(std::is_unsigned<Address>::value);
Address offset = address - begin();
return (offset < size_) && (offset <= size_ - size);
}
bool contains(const AddressRegion& region) const {
return contains(region.address_, region.size_);
}
private:
Address address_ = 0;
size_t size_ = 0;
};
} // namespace base
} // namespace v8
#endif // V8_BASE_ADDRESS_REGION_H_
...@@ -7,6 +7,7 @@ ...@@ -7,6 +7,7 @@
#include <set> #include <set>
#include "src/base/address-region.h"
#include "src/base/utils/random-number-generator.h" #include "src/base/utils/random-number-generator.h"
#include "testing/gtest/include/gtest/gtest_prod.h" // nogncheck #include "testing/gtest/include/gtest/gtest_prod.h" // nogncheck
...@@ -75,36 +76,17 @@ class V8_BASE_EXPORT RegionAllocator final { ...@@ -75,36 +76,17 @@ class V8_BASE_EXPORT RegionAllocator final {
void Print(std::ostream& os) const; void Print(std::ostream& os) const;
private: private:
class Region { class Region : public AddressRegion {
public: public:
Address begin() const { return address_; } Region(Address address, size_t size, bool is_used)
Address end() const { return address_ + size_; } : AddressRegion(address, size), is_used_(is_used) {}
size_t size() const { return size_; }
void set_size(size_t size) { size_ = size; }
bool contains(Address address) const {
STATIC_ASSERT(std::is_unsigned<Address>::value);
return (address - begin()) < size();
}
bool contains(Address address, size_t size) const {
STATIC_ASSERT(std::is_unsigned<Address>::value);
Address offset = address - begin();
return (offset < size_) && (offset <= size_ - size);
}
bool is_used() const { return is_used_; } bool is_used() const { return is_used_; }
void set_is_used(bool used) { is_used_ = used; } void set_is_used(bool used) { is_used_ = used; }
Region(Address address, size_t size, bool is_used)
: address_(address), size_(size), is_used_(is_used) {}
void Print(std::ostream& os) const; void Print(std::ostream& os) const;
private: private:
Address address_;
size_t size_;
bool is_used_; bool is_used_;
}; };
......
...@@ -49,10 +49,11 @@ AssemblerOptions BuiltinAssemblerOptions(Isolate* isolate, ...@@ -49,10 +49,11 @@ AssemblerOptions BuiltinAssemblerOptions(Isolate* isolate,
return options; return options;
} }
const base::AddressRegion& code_range =
isolate->heap()->memory_allocator()->code_range();
bool pc_relative_calls_fit_in_code_range = bool pc_relative_calls_fit_in_code_range =
isolate->heap()->memory_allocator()->code_range_valid() && !code_range.is_empty() &&
isolate->heap()->memory_allocator()->code_range_size() <= code_range.size() <= kMaxPCRelativeCodeRangeInMB * MB;
kMaxPCRelativeCodeRangeInMB * MB;
options.isolate_independent_code = true; options.isolate_independent_code = true;
options.use_pc_relative_calls_and_jumps = pc_relative_calls_fit_in_code_range; options.use_pc_relative_calls_and_jumps = pc_relative_calls_fit_in_code_range;
......
...@@ -64,9 +64,9 @@ void InitializeCode(Heap* heap, Handle<Code> code, int object_size, ...@@ -64,9 +64,9 @@ void InitializeCode(Heap* heap, Handle<Code> code, int object_size,
bool is_turbofanned, int stack_slots, bool is_turbofanned, int stack_slots,
int safepoint_table_offset, int handler_table_offset) { int safepoint_table_offset, int handler_table_offset) {
DCHECK(IsAligned(code->address(), kCodeAlignment)); DCHECK(IsAligned(code->address(), kCodeAlignment));
DCHECK(!heap->memory_allocator()->code_range_valid() || DCHECK_IMPLIES(
heap->memory_allocator()->code_range_contains(code->address()) || !heap->memory_allocator()->code_range().is_empty(),
object_size <= heap->code_space()->AreaSize()); heap->memory_allocator()->code_range().contains(code->address()));
bool has_unwinding_info = desc.unwinding_info != nullptr; bool has_unwinding_info = desc.unwinding_info != nullptr;
...@@ -2674,9 +2674,9 @@ Handle<Code> Factory::NewCodeForDeserialization(uint32_t size) { ...@@ -2674,9 +2674,9 @@ Handle<Code> Factory::NewCodeForDeserialization(uint32_t size) {
heap->ZapCodeObject(result->address(), size); heap->ZapCodeObject(result->address(), size);
result->set_map_after_allocation(*code_map(), SKIP_WRITE_BARRIER); result->set_map_after_allocation(*code_map(), SKIP_WRITE_BARRIER);
DCHECK(IsAligned(result->address(), kCodeAlignment)); DCHECK(IsAligned(result->address(), kCodeAlignment));
DCHECK(!heap->memory_allocator()->code_range_valid() || DCHECK_IMPLIES(
heap->memory_allocator()->code_range_contains(result->address()) || !heap->memory_allocator()->code_range().is_empty(),
static_cast<int>(size) <= heap->code_space()->AreaSize()); heap->memory_allocator()->code_range().contains(result->address()));
return handle(Code::cast(result), isolate()); return handle(Code::cast(result), isolate());
} }
...@@ -2738,9 +2738,9 @@ Handle<Code> Factory::CopyCode(Handle<Code> code) { ...@@ -2738,9 +2738,9 @@ Handle<Code> Factory::CopyCode(Handle<Code> code) {
if (FLAG_verify_heap) new_code->ObjectVerify(isolate()); if (FLAG_verify_heap) new_code->ObjectVerify(isolate());
#endif #endif
DCHECK(IsAligned(new_code->address(), kCodeAlignment)); DCHECK(IsAligned(new_code->address(), kCodeAlignment));
DCHECK(!heap->memory_allocator()->code_range_valid() || DCHECK_IMPLIES(
heap->memory_allocator()->code_range_contains(new_code->address()) || !heap->memory_allocator()->code_range().is_empty(),
obj_size <= heap->code_space()->AreaSize()); heap->memory_allocator()->code_range().contains(new_code->address()));
return new_code; return new_code;
} }
......
...@@ -546,32 +546,6 @@ bool LocalAllocationBuffer::TryFreeLast(HeapObject* object, int object_size) { ...@@ -546,32 +546,6 @@ bool LocalAllocationBuffer::TryFreeLast(HeapObject* object, int object_size) {
return false; return false;
} }
// -----------------------------------------------------------------------------
// MemoryAllocator
bool MemoryAllocator::code_range_valid() const {
return code_page_allocator_instance_.get() != nullptr;
}
Address MemoryAllocator::code_range_start() const {
DCHECK(code_range_valid());
// TODO(ishell): once a follow-up CL is landed add assert that
// |code_range_| >= |optional RW pages| + |code_page_allocator_instance_|
return code_range_start_;
}
size_t MemoryAllocator::code_range_size() const {
DCHECK(code_range_valid());
// TODO(ishell): once a follow-up CL is landed add assert that
// |code_range_| >= |optional RW pages| + |code_page_allocator_instance_|
return code_range_size_;
}
bool MemoryAllocator::code_range_contains(Address address) const {
DCHECK(code_range_valid());
return (address - code_range_start_) < code_range_size_;
}
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8
......
...@@ -123,8 +123,6 @@ MemoryAllocator::MemoryAllocator(Isolate* isolate, size_t capacity, ...@@ -123,8 +123,6 @@ MemoryAllocator::MemoryAllocator(Isolate* isolate, size_t capacity,
: isolate_(isolate), : isolate_(isolate),
data_page_allocator_(GetPlatformPageAllocator()), data_page_allocator_(GetPlatformPageAllocator()),
code_page_allocator_(nullptr), code_page_allocator_(nullptr),
code_range_start_(kNullAddress),
code_range_size_(0),
capacity_(RoundUp(capacity, Page::kPageSize)), capacity_(RoundUp(capacity, Page::kPageSize)),
size_(0), size_(0),
size_executable_(0), size_executable_(0),
...@@ -168,8 +166,7 @@ void MemoryAllocator::InitializeCodePageAllocator( ...@@ -168,8 +166,7 @@ void MemoryAllocator::InitializeCodePageAllocator(
V8::FatalProcessOutOfMemory(isolate_, V8::FatalProcessOutOfMemory(isolate_,
"CodeRange setup: allocate virtual memory"); "CodeRange setup: allocate virtual memory");
} }
code_range_start_ = reservation.address(); code_range_ = reservation.region();
code_range_size_ = reservation.size();
// We are sure that we have mapped a block of requested addresses. // We are sure that we have mapped a block of requested addresses.
DCHECK_GE(reservation.size(), requested); DCHECK_GE(reservation.size(), requested);
......
...@@ -1389,10 +1389,16 @@ class V8_EXPORT_PRIVATE MemoryAllocator { ...@@ -1389,10 +1389,16 @@ class V8_EXPORT_PRIVATE MemoryAllocator {
: data_page_allocator_; : data_page_allocator_;
} }
V8_INLINE bool code_range_valid() const; // A region of memory that may contain executable code including reserved
V8_INLINE Address code_range_start() const; // OS page with read-write access in the beginning.
V8_INLINE size_t code_range_size() const; const base::AddressRegion& code_range() const {
V8_INLINE bool code_range_contains(Address address) const; // |code_range_| >= |optional RW pages| + |code_page_allocator_instance_|
DCHECK_IMPLIES(!code_range_.is_empty(), code_page_allocator_instance_);
DCHECK_IMPLIES(!code_range_.is_empty(),
code_range_.contains(code_page_allocator_instance_->begin(),
code_page_allocator_instance_->size()));
return code_range_;
}
Unmapper* unmapper() { return &unmapper_; } Unmapper* unmapper() { return &unmapper_; }
...@@ -1472,14 +1478,12 @@ class V8_EXPORT_PRIVATE MemoryAllocator { ...@@ -1472,14 +1478,12 @@ class V8_EXPORT_PRIVATE MemoryAllocator {
// A part of the |heap_reservation_| that may contain executable code // A part of the |heap_reservation_| that may contain executable code
// including reserved page with read-write access in the beginning. // including reserved page with read-write access in the beginning.
// See details below. // See details below.
// TODO(ishell): introduce base::AddressRange code_range_; instead. base::AddressRegion code_range_;
Address code_range_start_;
size_t code_range_size_;
// This unique pointer owns the instance of bounded code allocator // This unique pointer owns the instance of bounded code allocator
// that controls executable pages allocation. It does not control the // that controls executable pages allocation. It does not control the
// optionally existing page in the beginning of the |code_range_|. // optionally existing page in the beginning of the |code_range_|.
// So, summarizing all above, the following condition holds: // So, summarizing all above, the following conditions hold:
// 1) |heap_reservation_| >= |code_range_| // 1) |heap_reservation_| >= |code_range_|
// 2) |code_range_| >= |optional RW pages| + |code_page_allocator_instance_|. // 2) |code_range_| >= |optional RW pages| + |code_page_allocator_instance_|.
// 3) |heap_reservation_| is AllocatePageSize()-aligned // 3) |heap_reservation_| is AllocatePageSize()-aligned
......
...@@ -205,8 +205,8 @@ bool TickSample::GetStackSample(Isolate* v8_isolate, RegisterState* regs, ...@@ -205,8 +205,8 @@ bool TickSample::GetStackSample(Isolate* v8_isolate, RegisterState* regs,
// Check whether we interrupted setup/teardown of a stack frame in JS code. // Check whether we interrupted setup/teardown of a stack frame in JS code.
// Avoid this check for C++ code, as that would trigger false positives. // Avoid this check for C++ code, as that would trigger false positives.
if (regs->pc && isolate->heap()->memory_allocator()->code_range_valid() && if (regs->pc &&
isolate->heap()->memory_allocator()->code_range_contains( isolate->heap()->memory_allocator()->code_range().contains(
reinterpret_cast<i::Address>(regs->pc)) && reinterpret_cast<i::Address>(regs->pc)) &&
IsNoFrameRegion(reinterpret_cast<i::Address>(regs->pc))) { IsNoFrameRegion(reinterpret_cast<i::Address>(regs->pc))) {
// The frame is not setup, so it'd be hard to iterate the stack. Bailout. // The frame is not setup, so it'd be hard to iterate the stack. Bailout.
......
...@@ -55,6 +55,7 @@ v8_source_set("unittests_sources") { ...@@ -55,6 +55,7 @@ v8_source_set("unittests_sources") {
"asmjs/asm-scanner-unittest.cc", "asmjs/asm-scanner-unittest.cc",
"asmjs/asm-types-unittest.cc", "asmjs/asm-types-unittest.cc",
"asmjs/switch-logic-unittest.cc", "asmjs/switch-logic-unittest.cc",
"base/address-region-unittest.cc",
"base/atomic-utils-unittest.cc", "base/atomic-utils-unittest.cc",
"base/bits-unittest.cc", "base/bits-unittest.cc",
"base/cpu-unittest.cc", "base/cpu-unittest.cc",
......
// Copyright 2018 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/base/address-region.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace v8 {
namespace base {
using Address = AddressRegion::Address;
TEST(AddressRegionTest, Contains) {
struct {
Address start;
size_t size;
} test_cases[] = {{153, 771}, {0, 227}, {-447, 447}};
for (size_t i = 0; i < arraysize(test_cases); i++) {
Address start = test_cases[i].start;
size_t size = test_cases[i].size;
Address end = start + size; // exclusive
AddressRegion region(start, size);
// Test single-argument contains().
CHECK(!region.contains(start - 1041));
CHECK(!region.contains(start - 1));
CHECK(!region.contains(end));
CHECK(!region.contains(end + 1));
CHECK(!region.contains(end + 113));
CHECK(region.contains(start));
CHECK(region.contains(start + 1));
CHECK(region.contains(start + size / 2));
CHECK(region.contains(end - 1));
// Test two-arguments contains().
CHECK(!region.contains(start - 17, 17));
CHECK(!region.contains(start - 17, size * 2));
CHECK(!region.contains(end, 1));
CHECK(!region.contains(end, static_cast<size_t>(0 - end)));
CHECK(region.contains(start, size));
CHECK(region.contains(start, 10));
CHECK(region.contains(start + 11, 120));
CHECK(region.contains(end - 13, 13));
CHECK(!region.contains(end, 0));
// Zero-size queries.
CHECK(!region.contains(start - 10, 0));
CHECK(!region.contains(start - 1, 0));
CHECK(!region.contains(end, 0));
CHECK(!region.contains(end + 10, 0));
CHECK(region.contains(start, 0));
CHECK(region.contains(start + 10, 0));
CHECK(region.contains(end - 1, 0));
}
}
} // namespace base
} // namespace v8
...@@ -322,56 +322,5 @@ TEST(RegionAllocatorTest, FindRegion) { ...@@ -322,56 +322,5 @@ TEST(RegionAllocatorTest, FindRegion) {
} }
} }
TEST(RegionAllocatorTest, Contains) {
using Region = RegionAllocator::Region;
struct {
Address start;
size_t size;
} test_cases[] = {{153, 771}, {0, 227}, {-447, 447}};
for (size_t i = 0; i < arraysize(test_cases); i++) {
Address start = test_cases[i].start;
size_t size = test_cases[i].size;
Address end = start + size; // exclusive
Region region(start, size, true);
// Test single-argument contains().
CHECK(!region.contains(start - 1041));
CHECK(!region.contains(start - 1));
CHECK(!region.contains(end));
CHECK(!region.contains(end + 1));
CHECK(!region.contains(end + 113));
CHECK(region.contains(start));
CHECK(region.contains(start + 1));
CHECK(region.contains(start + size / 2));
CHECK(region.contains(end - 1));
// Test two-arguments contains().
CHECK(!region.contains(start - 17, 17));
CHECK(!region.contains(start - 17, size * 2));
CHECK(!region.contains(end, 1));
CHECK(!region.contains(end, static_cast<size_t>(0 - end)));
CHECK(region.contains(start, size));
CHECK(region.contains(start, 10));
CHECK(region.contains(start + 11, 120));
CHECK(region.contains(end - 13, 13));
CHECK(!region.contains(end, 0));
// Zero-size queries.
CHECK(!region.contains(start - 10, 0));
CHECK(!region.contains(start - 1, 0));
CHECK(!region.contains(end, 0));
CHECK(!region.contains(end + 10, 0));
CHECK(region.contains(start, 0));
CHECK(region.contains(start + 10, 0));
CHECK(region.contains(end - 1, 0));
}
}
} // namespace base } // namespace base
} // namespace v8 } // namespace v8
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment