Commit 6d86857c authored by Clemens Hammacher's avatar Clemens Hammacher Committed by Commit Bot

[wasm] Remove AddressRange, use base::AddressRegion

R=ahaas@chromium.org

Bug: v8:8015
Change-Id: Ic449b76ab3957bb989bbb1fc9cc1fb4782db7acf
Reviewed-on: https://chromium-review.googlesource.com/1240119Reviewed-by: 's avatarAndreas Haas <ahaas@chromium.org>
Commit-Queue: Clemens Hammacher <clemensh@chromium.org>
Cr-Commit-Position: refs/heads/master@{#56240}
parent 980a9602
...@@ -5,14 +5,14 @@ ...@@ -5,14 +5,14 @@
#ifndef V8_BASE_ADDRESS_REGION_H_ #ifndef V8_BASE_ADDRESS_REGION_H_
#define V8_BASE_ADDRESS_REGION_H_ #define V8_BASE_ADDRESS_REGION_H_
#include <type_traits> #include <iostream>
#include "src/base/macros.h" #include "src/base/macros.h"
namespace v8 { namespace v8 {
namespace base { namespace base {
// Helper class representing an address region of certian size. // Helper class representing an address region of certain size.
class AddressRegion { class AddressRegion {
public: public:
typedef uintptr_t Address; typedef uintptr_t Address;
...@@ -41,14 +41,28 @@ class AddressRegion { ...@@ -41,14 +41,28 @@ class AddressRegion {
return (offset < size_) && (offset <= size_ - size); return (offset < size_) && (offset <= size_ - size);
} }
bool contains(const AddressRegion& region) const { bool contains(AddressRegion region) const {
return contains(region.address_, region.size_); return contains(region.address_, region.size_);
} }
bool operator==(AddressRegion other) const {
return address_ == other.address_ && size_ == other.size_;
}
bool operator!=(AddressRegion other) const {
return address_ != other.address_ || size_ != other.size_;
}
private: private:
Address address_ = 0; Address address_ = 0;
size_t size_ = 0; size_t size_ = 0;
}; };
ASSERT_TRIVIALLY_COPYABLE(AddressRegion);
inline std::ostream& operator<<(std::ostream& out, AddressRegion region) {
return out << "[" << reinterpret_cast<void*>(region.begin()) << "+"
<< region.size() << "]";
}
} // namespace base } // namespace base
} // namespace v8 } // namespace v8
......
...@@ -44,53 +44,56 @@ struct WasmCodeUniquePtrComparator { ...@@ -44,53 +44,56 @@ struct WasmCodeUniquePtrComparator {
} // namespace } // namespace
void DisjointAllocationPool::Merge(AddressRange range) { void DisjointAllocationPool::Merge(base::AddressRegion region) {
auto dest_it = ranges_.begin(); auto dest_it = regions_.begin();
auto dest_end = ranges_.end(); auto dest_end = regions_.end();
// Skip over dest ranges strictly before {range}. // Skip over dest regions strictly before {region}.
while (dest_it != dest_end && dest_it->end < range.start) ++dest_it; while (dest_it != dest_end && dest_it->end() < region.begin()) ++dest_it;
// After last dest range: insert and done. // After last dest region: insert and done.
if (dest_it == dest_end) { if (dest_it == dest_end) {
ranges_.push_back(range); regions_.push_back(region);
return; return;
} }
// Adjacent (from below) to dest: merge and done. // Adjacent (from below) to dest: merge and done.
if (dest_it->start == range.end) { if (dest_it->begin() == region.end()) {
dest_it->start = range.start; base::AddressRegion merged_region{region.begin(),
region.size() + dest_it->size()};
DCHECK_EQ(merged_region.end(), dest_it->end());
*dest_it = merged_region;
return; return;
} }
// Before dest: insert and done. // Before dest: insert and done.
if (dest_it->start > range.end) { if (dest_it->begin() > region.end()) {
ranges_.insert(dest_it, range); regions_.insert(dest_it, region);
return; return;
} }
// Src is adjacent from above. Merge and check whether the merged range is now // Src is adjacent from above. Merge and check whether the merged region is
// adjacent to the next range. // now adjacent to the next region.
DCHECK_EQ(dest_it->end, range.start); DCHECK_EQ(dest_it->end(), region.begin());
dest_it->end = range.end; dest_it->set_size(dest_it->size() + region.size());
DCHECK_EQ(dest_it->end(), region.end());
auto next_dest = dest_it; auto next_dest = dest_it;
++next_dest; ++next_dest;
if (next_dest != dest_end && dest_it->end == next_dest->start) { if (next_dest != dest_end && dest_it->end() == next_dest->begin()) {
dest_it->end = next_dest->end; dest_it->set_size(dest_it->size() + next_dest->size());
ranges_.erase(next_dest); DCHECK_EQ(dest_it->end(), next_dest->end());
regions_.erase(next_dest);
} }
} }
AddressRange DisjointAllocationPool::Allocate(size_t size) { base::AddressRegion DisjointAllocationPool::Allocate(size_t size) {
for (auto it = ranges_.begin(), end = ranges_.end(); it != end; ++it) { for (auto it = regions_.begin(), end = regions_.end(); it != end; ++it) {
size_t range_size = it->size(); if (size > it->size()) continue;
if (size > range_size) continue; base::AddressRegion ret{it->begin(), size};
AddressRange ret{it->start, it->start + size}; if (size == it->size()) {
if (size == range_size) { regions_.erase(it);
ranges_.erase(it);
} else { } else {
it->start += size; *it = base::AddressRegion{it->begin() + size, it->size() - size};
DCHECK_LT(it->start, it->end);
} }
return ret; return ret;
} }
...@@ -325,7 +328,7 @@ NativeModule::NativeModule(Isolate* isolate, const WasmFeatures& enabled, ...@@ -325,7 +328,7 @@ NativeModule::NativeModule(Isolate* isolate, const WasmFeatures& enabled,
: enabled_features_(enabled), : enabled_features_(enabled),
module_(std::move(module)), module_(std::move(module)),
compilation_state_(NewCompilationState(isolate, env)), compilation_state_(NewCompilationState(isolate, env)),
free_code_space_({code_space.address(), code_space.end()}), free_code_space_(code_space.region()),
wasm_code_manager_(code_manager), wasm_code_manager_(code_manager),
can_request_more_memory_(can_request_more), can_request_more_memory_(can_request_more),
use_trap_handler_(env.use_trap_handler) { use_trap_handler_(env.use_trap_handler) {
...@@ -653,10 +656,11 @@ void NativeModule::InstallCode(WasmCode* code) { ...@@ -653,10 +656,11 @@ void NativeModule::InstallCode(WasmCode* code) {
} }
Address NativeModule::AllocateForCode(size_t size) { Address NativeModule::AllocateForCode(size_t size) {
DCHECK_LT(0, size);
v8::PageAllocator* page_allocator = GetPlatformPageAllocator(); v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
// This happens under a lock assumed by the caller. // This happens under a lock assumed by the caller.
size = RoundUp(size, kCodeAlignment); size = RoundUp(size, kCodeAlignment);
AddressRange mem = free_code_space_.Allocate(size); base::AddressRegion mem = free_code_space_.Allocate(size);
if (mem.is_empty()) { if (mem.is_empty()) {
if (!can_request_more_memory_) return kNullAddress; if (!can_request_more_memory_) return kNullAddress;
...@@ -668,13 +672,14 @@ Address NativeModule::AllocateForCode(size_t size) { ...@@ -668,13 +672,14 @@ Address NativeModule::AllocateForCode(size_t size) {
if (!new_mem.IsReserved()) return kNullAddress; if (!new_mem.IsReserved()) return kNullAddress;
wasm_code_manager_->AssignRanges(new_mem.address(), new_mem.end(), this); wasm_code_manager_->AssignRanges(new_mem.address(), new_mem.end(), this);
free_code_space_.Merge({new_mem.address(), new_mem.end()}); free_code_space_.Merge(new_mem.region());
owned_code_space_.emplace_back(std::move(new_mem)); owned_code_space_.emplace_back(std::move(new_mem));
mem = free_code_space_.Allocate(size); mem = free_code_space_.Allocate(size);
DCHECK(!mem.is_empty()); DCHECK(!mem.is_empty());
} }
Address commit_start = RoundUp(mem.start, page_allocator->AllocatePageSize()); Address commit_start =
Address commit_end = RoundUp(mem.end, page_allocator->AllocatePageSize()); RoundUp(mem.begin(), page_allocator->AllocatePageSize());
Address commit_end = RoundUp(mem.end(), page_allocator->AllocatePageSize());
// {commit_start} will be either mem.start or the start of the next page. // {commit_start} will be either mem.start or the start of the next page.
// {commit_end} will be the start of the page after the one in which // {commit_end} will be the start of the page after the one in which
// the allocation ends. // the allocation ends.
...@@ -685,7 +690,7 @@ Address NativeModule::AllocateForCode(size_t size) { ...@@ -685,7 +690,7 @@ Address NativeModule::AllocateForCode(size_t size) {
// The end needs to be committed all through the end of the page. // The end needs to be committed all through the end of the page.
if (commit_start < commit_end) { if (commit_start < commit_end) {
#if V8_OS_WIN #if V8_OS_WIN
// On Windows, we cannot commit a range that straddles different // On Windows, we cannot commit a region that straddles different
// reservations of virtual memory. Because we bump-allocate, and because, if // reservations of virtual memory. Because we bump-allocate, and because, if
// we need more memory, we append that memory at the end of the // we need more memory, we append that memory at the end of the
// owned_code_space_ list, we traverse that list in reverse order to find // owned_code_space_ list, we traverse that list in reverse order to find
...@@ -712,10 +717,10 @@ Address NativeModule::AllocateForCode(size_t size) { ...@@ -712,10 +717,10 @@ Address NativeModule::AllocateForCode(size_t size) {
committed_code_space_.fetch_add(commit_size); committed_code_space_.fetch_add(commit_size);
#endif #endif
} }
DCHECK(IsAligned(mem.start, kCodeAlignment)); DCHECK(IsAligned(mem.begin(), kCodeAlignment));
allocated_code_space_.Merge(mem); allocated_code_space_.Merge(mem);
TRACE_HEAP("Code alloc for %p: %" PRIuPTR ",+%zu\n", this, mem.start, size); TRACE_HEAP("Code alloc for %p: %" PRIuPTR ",+%zu\n", this, mem.begin(), size);
return mem.start; return mem.begin();
} }
WasmCode* NativeModule::Lookup(Address pc) const { WasmCode* NativeModule::Lookup(Address pc) const {
...@@ -976,18 +981,18 @@ bool NativeModule::SetExecutable(bool executable) { ...@@ -976,18 +981,18 @@ bool NativeModule::SetExecutable(bool executable) {
return true; return true;
} }
#endif #endif
for (auto& range : allocated_code_space_.ranges()) { for (auto& region : allocated_code_space_.regions()) {
// allocated_code_space_ is fine-grained, so we need to // allocated_code_space_ is fine-grained, so we need to
// page-align it. // page-align it.
size_t range_size = size_t region_size =
RoundUp(range.size(), page_allocator->AllocatePageSize()); RoundUp(region.size(), page_allocator->AllocatePageSize());
if (!SetPermissions(page_allocator, range.start, range_size, if (!SetPermissions(page_allocator, region.begin(), region_size,
permission)) { permission)) {
return false; return false;
} }
TRACE_HEAP("Set %p:%p to executable:%d\n", TRACE_HEAP("Set %p:%p to executable:%d\n",
reinterpret_cast<void*>(range.start), reinterpret_cast<void*>(region.begin()),
reinterpret_cast<void*>(range.end), executable); reinterpret_cast<void*>(region.end()), executable);
} }
} }
is_executable_ = executable; is_executable_ = executable;
...@@ -1035,12 +1040,12 @@ NativeModule* WasmCodeManager::LookupNativeModule(Address pc) const { ...@@ -1035,12 +1040,12 @@ NativeModule* WasmCodeManager::LookupNativeModule(Address pc) const {
auto iter = lookup_map_.upper_bound(pc); auto iter = lookup_map_.upper_bound(pc);
if (iter == lookup_map_.begin()) return nullptr; if (iter == lookup_map_.begin()) return nullptr;
--iter; --iter;
Address range_start = iter->first; Address region_start = iter->first;
Address range_end = iter->second.first; Address region_end = iter->second.first;
NativeModule* candidate = iter->second.second; NativeModule* candidate = iter->second.second;
DCHECK_NOT_NULL(candidate); DCHECK_NOT_NULL(candidate);
return range_start <= pc && pc < range_end ? candidate : nullptr; return region_start <= pc && pc < region_end ? candidate : nullptr;
} }
WasmCode* WasmCodeManager::LookupCode(Address pc) const { WasmCode* WasmCodeManager::LookupCode(Address pc) const {
......
...@@ -33,47 +33,34 @@ class WasmCodeManager; ...@@ -33,47 +33,34 @@ class WasmCodeManager;
class WasmMemoryTracker; class WasmMemoryTracker;
struct WasmModule; struct WasmModule;
struct AddressRange { // Sorted, disjoint and non-overlapping memory regions. A region is of the
Address start;
Address end;
AddressRange(Address s, Address e) : start(s), end(e) {
DCHECK_LE(start, end);
DCHECK_IMPLIES(start == kNullAddress, end == kNullAddress);
}
AddressRange() : AddressRange(kNullAddress, kNullAddress) {}
size_t size() const { return static_cast<size_t>(end - start); }
bool is_empty() const { return start == end; }
operator bool() const { return start == kNullAddress; }
};
// Sorted, disjoint and non-overlapping memory ranges. A range is of the
// form [start, end). So there's no [start, end), [end, other_end), // form [start, end). So there's no [start, end), [end, other_end),
// because that should have been reduced to [start, other_end). // because that should have been reduced to [start, other_end).
class V8_EXPORT_PRIVATE DisjointAllocationPool final { class V8_EXPORT_PRIVATE DisjointAllocationPool final {
public: public:
DisjointAllocationPool() = default; DisjointAllocationPool() = default;
explicit DisjointAllocationPool(AddressRange range) : ranges_({range}) {} explicit DisjointAllocationPool(base::AddressRegion region)
: regions_({region}) {}
DisjointAllocationPool(DisjointAllocationPool&& other) = default; DisjointAllocationPool(DisjointAllocationPool&& other) = default;
DisjointAllocationPool& operator=(DisjointAllocationPool&& other) = default; DisjointAllocationPool& operator=(DisjointAllocationPool&& other) = default;
// Merge the parameter range into this object while preserving ordering of the // Merge the parameter region into this object while preserving ordering of
// ranges. The assumption is that the passed parameter is not intersecting // the regions. The assumption is that the passed parameter is not
// this object - for example, it was obtained from a previous Allocate. // intersecting this object - for example, it was obtained from a previous
void Merge(AddressRange); // Allocate.
void Merge(base::AddressRegion);
// Allocate a contiguous range of size {size}. Return an empty pool on // Allocate a contiguous region of size {size}. Return an empty pool on
// failure. // failure.
AddressRange Allocate(size_t size); base::AddressRegion Allocate(size_t size);
bool IsEmpty() const { return ranges_.empty(); } bool IsEmpty() const { return regions_.empty(); }
const std::list<AddressRange>& ranges() const { return ranges_; } const std::list<base::AddressRegion>& regions() const { return regions_; }
private: private:
std::list<AddressRange> ranges_; std::list<base::AddressRegion> regions_;
DISALLOW_COPY_AND_ASSIGN(DisjointAllocationPool) DISALLOW_COPY_AND_ASSIGN(DisjointAllocationPool)
}; };
......
...@@ -17,35 +17,34 @@ namespace wasm_heap_unittest { ...@@ -17,35 +17,34 @@ namespace wasm_heap_unittest {
class DisjointAllocationPoolTest : public ::testing::Test { class DisjointAllocationPoolTest : public ::testing::Test {
public: public:
Address A(size_t n) { return static_cast<Address>(n); }
void CheckPool(const DisjointAllocationPool& mem, void CheckPool(const DisjointAllocationPool& mem,
std::initializer_list<AddressRange> expected_ranges); std::initializer_list<base::AddressRegion> expected_regions);
void CheckRange(AddressRange range1, AddressRange range2); void CheckRange(base::AddressRegion region1, base::AddressRegion region2);
DisjointAllocationPool Make(std::initializer_list<AddressRange> ranges); DisjointAllocationPool Make(
std::initializer_list<base::AddressRegion> regions);
}; };
void DisjointAllocationPoolTest::CheckPool( void DisjointAllocationPoolTest::CheckPool(
const DisjointAllocationPool& mem, const DisjointAllocationPool& mem,
std::initializer_list<AddressRange> expected_ranges) { std::initializer_list<base::AddressRegion> expected_regions) {
const auto& ranges = mem.ranges(); const auto& regions = mem.regions();
CHECK_EQ(ranges.size(), expected_ranges.size()); CHECK_EQ(regions.size(), expected_regions.size());
auto iter = expected_ranges.begin(); auto iter = expected_regions.begin();
for (auto it = ranges.begin(), e = ranges.end(); it != e; ++it, ++iter) { for (auto it = regions.begin(), e = regions.end(); it != e; ++it, ++iter) {
CHECK_EQ(*it, *iter); CHECK_EQ(*it, *iter);
} }
} }
void DisjointAllocationPoolTest::CheckRange(AddressRange range1, void DisjointAllocationPoolTest::CheckRange(base::AddressRegion region1,
AddressRange range2) { base::AddressRegion region2) {
CHECK_EQ(range1.start, range2.start); CHECK_EQ(region1, region2);
CHECK_EQ(range1.end, range2.end);
} }
DisjointAllocationPool DisjointAllocationPoolTest::Make( DisjointAllocationPool DisjointAllocationPoolTest::Make(
std::initializer_list<AddressRange> ranges) { std::initializer_list<base::AddressRegion> regions) {
DisjointAllocationPool ret; DisjointAllocationPool ret;
for (auto& range : ranges) { for (auto& region : regions) {
ret.Merge(range); ret.Merge(region);
} }
return ret; return ret;
} }
...@@ -54,89 +53,89 @@ TEST_F(DisjointAllocationPoolTest, ConstructEmpty) { ...@@ -54,89 +53,89 @@ TEST_F(DisjointAllocationPoolTest, ConstructEmpty) {
DisjointAllocationPool a; DisjointAllocationPool a;
CHECK(a.IsEmpty()); CHECK(a.IsEmpty());
CheckPool(a, {}); CheckPool(a, {});
a.Merge({1, 5}); a.Merge({1, 4});
CheckPool(a, {{1, 5}}); CheckPool(a, {{1, 4}});
} }
TEST_F(DisjointAllocationPoolTest, ConstructWithRange) { TEST_F(DisjointAllocationPoolTest, ConstructWithRange) {
DisjointAllocationPool a({1, 5}); DisjointAllocationPool a({1, 4});
CHECK(!a.IsEmpty()); CHECK(!a.IsEmpty());
CheckPool(a, {{1, 5}}); CheckPool(a, {{1, 4}});
} }
TEST_F(DisjointAllocationPoolTest, SimpleExtract) { TEST_F(DisjointAllocationPoolTest, SimpleExtract) {
DisjointAllocationPool a = Make({{1, 5}}); DisjointAllocationPool a = Make({{1, 4}});
AddressRange b = a.Allocate(2); base::AddressRegion b = a.Allocate(2);
CheckPool(a, {{3, 5}}); CheckPool(a, {{3, 2}});
CheckRange(b, {1, 3}); CheckRange(b, {1, 2});
a.Merge(b); a.Merge(b);
CheckPool(a, {{1, 5}}); CheckPool(a, {{1, 4}});
CHECK_EQ(a.ranges().size(), 1); CHECK_EQ(a.regions().size(), 1);
CHECK_EQ(a.ranges().front().start, A(1)); CHECK_EQ(a.regions().front().begin(), 1);
CHECK_EQ(a.ranges().front().end, A(5)); CHECK_EQ(a.regions().front().end(), 5);
} }
TEST_F(DisjointAllocationPoolTest, ExtractAll) { TEST_F(DisjointAllocationPoolTest, ExtractAll) {
DisjointAllocationPool a({A(1), A(5)}); DisjointAllocationPool a({1, 4});
AddressRange b = a.Allocate(4); base::AddressRegion b = a.Allocate(4);
CheckRange(b, {1, 5}); CheckRange(b, {1, 4});
CHECK(a.IsEmpty()); CHECK(a.IsEmpty());
a.Merge(b); a.Merge(b);
CheckPool(a, {{1, 5}}); CheckPool(a, {{1, 4}});
} }
TEST_F(DisjointAllocationPoolTest, FailToExtract) { TEST_F(DisjointAllocationPoolTest, FailToExtract) {
DisjointAllocationPool a = Make({{1, 5}}); DisjointAllocationPool a = Make({{1, 4}});
AddressRange b = a.Allocate(5); base::AddressRegion b = a.Allocate(5);
CheckPool(a, {{1, 5}}); CheckPool(a, {{1, 4}});
CHECK(b.is_empty()); CHECK(b.is_empty());
} }
TEST_F(DisjointAllocationPoolTest, FailToExtractExact) { TEST_F(DisjointAllocationPoolTest, FailToExtractExact) {
DisjointAllocationPool a = Make({{1, 5}, {10, 14}}); DisjointAllocationPool a = Make({{1, 4}, {10, 4}});
AddressRange b = a.Allocate(5); base::AddressRegion b = a.Allocate(5);
CheckPool(a, {{1, 5}, {10, 14}}); CheckPool(a, {{1, 4}, {10, 4}});
CHECK(b.is_empty()); CHECK(b.is_empty());
} }
TEST_F(DisjointAllocationPoolTest, ExtractExact) { TEST_F(DisjointAllocationPoolTest, ExtractExact) {
DisjointAllocationPool a = Make({{1, 5}, {10, 15}}); DisjointAllocationPool a = Make({{1, 4}, {10, 5}});
AddressRange b = a.Allocate(5); base::AddressRegion b = a.Allocate(5);
CheckPool(a, {{1, 5}}); CheckPool(a, {{1, 4}});
CheckRange(b, {10, 15}); CheckRange(b, {10, 5});
} }
TEST_F(DisjointAllocationPoolTest, Merging) { TEST_F(DisjointAllocationPoolTest, Merging) {
DisjointAllocationPool a = Make({{10, 15}, {20, 25}}); DisjointAllocationPool a = Make({{10, 5}, {20, 5}});
a.Merge({15, 20}); a.Merge({15, 5});
CheckPool(a, {{10, 25}}); CheckPool(a, {{10, 15}});
} }
TEST_F(DisjointAllocationPoolTest, MergingMore) { TEST_F(DisjointAllocationPoolTest, MergingMore) {
DisjointAllocationPool a = Make({{10, 15}, {20, 25}, {30, 35}}); DisjointAllocationPool a = Make({{10, 5}, {20, 5}, {30, 5}});
a.Merge({15, 20}); a.Merge({15, 5});
a.Merge({25, 30}); a.Merge({25, 5});
CheckPool(a, {{10, 35}}); CheckPool(a, {{10, 25}});
} }
TEST_F(DisjointAllocationPoolTest, MergingSkip) { TEST_F(DisjointAllocationPoolTest, MergingSkip) {
DisjointAllocationPool a = Make({{10, 15}, {20, 25}, {30, 35}}); DisjointAllocationPool a = Make({{10, 5}, {20, 5}, {30, 5}});
a.Merge({25, 30}); a.Merge({25, 5});
CheckPool(a, {{10, 15}, {20, 35}}); CheckPool(a, {{10, 5}, {20, 15}});
} }
TEST_F(DisjointAllocationPoolTest, MergingSkipLargerSrc) { TEST_F(DisjointAllocationPoolTest, MergingSkipLargerSrc) {
DisjointAllocationPool a = Make({{10, 15}, {20, 25}, {30, 35}}); DisjointAllocationPool a = Make({{10, 5}, {20, 5}, {30, 5}});
a.Merge({25, 30}); a.Merge({25, 5});
a.Merge({35, 40}); a.Merge({35, 5});
CheckPool(a, {{10, 15}, {20, 40}}); CheckPool(a, {{10, 5}, {20, 20}});
} }
TEST_F(DisjointAllocationPoolTest, MergingSkipLargerSrcWithGap) { TEST_F(DisjointAllocationPoolTest, MergingSkipLargerSrcWithGap) {
DisjointAllocationPool a = Make({{10, 15}, {20, 25}, {30, 35}}); DisjointAllocationPool a = Make({{10, 5}, {20, 5}, {30, 5}});
a.Merge({25, 30}); a.Merge({25, 5});
a.Merge({36, 40}); a.Merge({36, 4});
CheckPool(a, {{10, 15}, {20, 35}, {36, 40}}); CheckPool(a, {{10, 5}, {20, 15}, {36, 4}});
} }
enum ModuleStyle : int { Fixed = 0, Growable = 1 }; enum ModuleStyle : int { Fixed = 0, Growable = 1 };
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment