Commit fbf1bc66 authored by mlippautz's avatar mlippautz Committed by Commit bot

Revert of [heap] Improve size profiling for ArrayBuffer tracking (patchset #6...

Revert of [heap] Improve size profiling for ArrayBuffer tracking (patchset #6 id:140001 of https://codereview.chromium.org/2210263002/ )

Reason for revert:
Tanks octane

Original issue's description:
> [heap] Improve size profiling for ArrayBuffer tracking
>
> Eagerly account for retained sizes during ArrayBuffer tracking. Following up on this,
> we can now do Scavenges if the amount of memory retained from new space is too large.
>
> BUG=chromium:621829
> R=jochen@chromium.org,hpayer@chromium.org
>
> Committed: https://crrev.com/28e13bd6a75c9467dae43043e7b741a1387d5252
> Cr-Commit-Position: refs/heads/master@{#38731}

TBR=jochen@chromium.org,hpayer@chromium.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=chromium:621829

Review-Url: https://codereview.chromium.org/2261513003
Cr-Commit-Position: refs/heads/master@{#38739}
parent ed08838e
......@@ -7605,7 +7605,7 @@ class Internals {
kExternalMemoryOffset + kApiInt64Size;
static const int kIsolateRootsOffset = kExternalMemoryLimitOffset +
kApiInt64Size + kApiInt64Size +
kApiPointerSize;
kApiPointerSize + kApiPointerSize;
static const int kUndefinedValueRootIndex = 4;
static const int kTheHoleValueRootIndex = 5;
static const int kNullValueRootIndex = 6;
......
......@@ -10,7 +10,7 @@
namespace v8 {
namespace internal {
void ArrayBufferTracker::RegisterNew(JSArrayBuffer* buffer) {
void ArrayBufferTracker::RegisterNew(Heap* heap, JSArrayBuffer* buffer) {
void* data = buffer->backing_store();
if (!data) return;
......@@ -26,18 +26,13 @@ void ArrayBufferTracker::RegisterNew(JSArrayBuffer* buffer) {
DCHECK_NOT_NULL(tracker);
tracker->Add(buffer, length);
}
if (page->InNewSpace()) {
retained_from_new_space_.Increment(length);
} else {
retained_from_old_space_.Increment(length);
}
// We may go over the limit of externally allocated memory here. We call the
// api function to trigger a GC in this case.
reinterpret_cast<v8::Isolate*>(heap_->isolate())
reinterpret_cast<v8::Isolate*>(heap->isolate())
->AdjustAmountOfExternalAllocatedMemory(length);
}
void ArrayBufferTracker::Unregister(JSArrayBuffer* buffer) {
void ArrayBufferTracker::Unregister(Heap* heap, JSArrayBuffer* buffer) {
void* data = buffer->backing_store();
if (!data) return;
......@@ -49,12 +44,7 @@ void ArrayBufferTracker::Unregister(JSArrayBuffer* buffer) {
DCHECK_NOT_NULL(tracker);
length = tracker->Remove(buffer);
}
if (page->InNewSpace()) {
retained_from_new_space_.Decrement(length);
} else {
retained_from_old_space_.Decrement(length);
}
heap_->update_external_memory(-static_cast<int64_t>(length));
heap->update_external_memory(-static_cast<intptr_t>(length));
}
void LocalArrayBufferTracker::Add(Key key, const Value& value) {
......
......@@ -14,7 +14,7 @@ LocalArrayBufferTracker::~LocalArrayBufferTracker() {
}
template <LocalArrayBufferTracker::FreeMode free_mode>
LocalArrayBufferTracker::ProcessResult LocalArrayBufferTracker::Free() {
void LocalArrayBufferTracker::Free() {
size_t freed_memory = 0;
for (TrackingData::iterator it = array_buffers_.begin();
it != array_buffers_.end();) {
......@@ -30,71 +30,60 @@ LocalArrayBufferTracker::ProcessResult LocalArrayBufferTracker::Free() {
++it;
}
}
return ProcessResult(freed_memory, 0);
if (freed_memory > 0) {
heap_->update_external_memory_concurrently_freed(
static_cast<intptr_t>(freed_memory));
}
}
template <typename Callback>
LocalArrayBufferTracker::ProcessResult LocalArrayBufferTracker::Process(
Callback callback) {
void LocalArrayBufferTracker::Process(Callback callback) {
JSArrayBuffer* new_buffer = nullptr;
size_t freed_memory = 0;
size_t promoted_memory = 0;
size_t len = 0;
Page* target_page = nullptr;
LocalArrayBufferTracker* tracker = nullptr;
for (TrackingData::iterator it = array_buffers_.begin();
it != array_buffers_.end();) {
switch (callback(it->first, &new_buffer)) {
case kKeepEntry:
const CallbackResult result = callback(it->first, &new_buffer);
if (result == kKeepEntry) {
++it;
break;
case kUpdateEntry:
} else if (result == kUpdateEntry) {
DCHECK_NOT_NULL(new_buffer);
target_page = Page::FromAddress(new_buffer->address());
Page* target_page = Page::FromAddress(new_buffer->address());
// We need to lock the target page because we cannot guarantee
// exclusive access to new space pages.
if (target_page->InNewSpace()) target_page->mutex()->Lock();
tracker = target_page->local_tracker();
LocalArrayBufferTracker* tracker = target_page->local_tracker();
if (tracker == nullptr) {
target_page->AllocateLocalTracker();
tracker = target_page->local_tracker();
}
DCHECK_NOT_NULL(tracker);
len = it->second;
tracker->Add(new_buffer, len);
if (target_page->InNewSpace()) {
target_page->mutex()->Unlock();
} else {
promoted_memory += len;
}
tracker->Add(new_buffer, it->second);
if (target_page->InNewSpace()) target_page->mutex()->Unlock();
it = array_buffers_.erase(it);
break;
case kRemoveEntry:
len = it->second;
} else if (result == kRemoveEntry) {
const size_t len = it->second;
heap_->isolate()->array_buffer_allocator()->Free(
it->first->backing_store(), len);
freed_memory += len;
it = array_buffers_.erase(it);
break;
} else {
UNREACHABLE();
}
}
return ProcessResult(freed_memory, promoted_memory);
}
void ArrayBufferTracker::AccountForConcurrentlyFreedMemory() {
heap_->update_external_memory(
static_cast<int64_t>(concurrently_freed_.Value()));
concurrently_freed_.SetValue(0);
if (freed_memory > 0) {
heap_->update_external_memory_concurrently_freed(
static_cast<intptr_t>(freed_memory));
}
}
void ArrayBufferTracker::FreeDeadInNewSpace() {
DCHECK_EQ(heap_->gc_state(), Heap::HeapState::SCAVENGE);
for (Page* page : NewSpacePageRange(heap_->new_space()->FromSpaceStart(),
heap_->new_space()->FromSpaceEnd())) {
void ArrayBufferTracker::FreeDeadInNewSpace(Heap* heap) {
DCHECK_EQ(heap->gc_state(), Heap::HeapState::SCAVENGE);
for (Page* page : NewSpacePageRange(heap->new_space()->FromSpaceStart(),
heap->new_space()->FromSpaceEnd())) {
bool empty = ProcessBuffers(page, kUpdateForwardedRemoveOthers);
CHECK(empty);
}
AccountForConcurrentlyFreedMemory();
heap->account_external_memory_concurrently_freed();
}
void ArrayBufferTracker::FreeDead(Page* page) {
......@@ -102,13 +91,7 @@ void ArrayBufferTracker::FreeDead(Page* page) {
LocalArrayBufferTracker* tracker = page->local_tracker();
if (tracker == nullptr) return;
DCHECK(!page->SweepingDone());
LocalArrayBufferTracker::ProcessResult result =
tracker->Free<LocalArrayBufferTracker::kFreeDead>();
if (page->InNewSpace()) {
retained_from_new_space_.Decrement(result.freed);
} else {
retained_from_old_space_.Decrement(result.freed);
}
if (tracker->IsEmpty()) {
page->ReleaseLocalTracker();
}
......@@ -117,14 +100,7 @@ void ArrayBufferTracker::FreeDead(Page* page) {
void ArrayBufferTracker::FreeAll(Page* page) {
LocalArrayBufferTracker* tracker = page->local_tracker();
if (tracker == nullptr) return;
LocalArrayBufferTracker::ProcessResult result =
tracker->Free<LocalArrayBufferTracker::kFreeAll>();
concurrently_freed_.Increment(result.freed);
if (page->InNewSpace()) {
retained_from_new_space_.Decrement(result.freed);
} else {
retained_from_old_space_.Decrement(result.freed);
}
if (tracker->IsEmpty()) {
page->ReleaseLocalTracker();
}
......@@ -135,7 +111,7 @@ bool ArrayBufferTracker::ProcessBuffers(Page* page, ProcessingMode mode) {
if (tracker == nullptr) return true;
DCHECK(page->SweepingDone());
LocalArrayBufferTracker::ProcessResult result = tracker->Process(
tracker->Process(
[mode](JSArrayBuffer* old_buffer, JSArrayBuffer** new_buffer) {
MapWord map_word = old_buffer->map_word();
if (map_word.IsForwardingAddress()) {
......@@ -146,13 +122,6 @@ bool ArrayBufferTracker::ProcessBuffers(Page* page, ProcessingMode mode) {
? LocalArrayBufferTracker::kKeepEntry
: LocalArrayBufferTracker::kRemoveEntry;
});
concurrently_freed_.Increment(result.freed);
if (page->InNewSpace()) {
retained_from_new_space_.Decrement(result.freed + result.promoted);
} else {
retained_from_old_space_.Decrement(result.freed);
}
retained_from_old_space_.Increment(result.promoted);
return tracker->IsEmpty();
}
......
......@@ -8,7 +8,6 @@
#include <unordered_map>
#include "src/allocation.h"
#include "src/base/atomic-utils.h"
#include "src/base/platform/mutex.h"
#include "src/globals.h"
......@@ -19,61 +18,40 @@ class Heap;
class JSArrayBuffer;
class Page;
class ArrayBufferTracker {
class ArrayBufferTracker : public AllStatic {
public:
enum ProcessingMode {
kUpdateForwardedRemoveOthers,
kUpdateForwardedKeepOthers,
};
// Returns whether a buffer is currently tracked.
static bool IsTracked(JSArrayBuffer* buffer);
explicit ArrayBufferTracker(Heap* heap)
: heap_(heap),
concurrently_freed_(0),
retained_from_new_space_(0),
retained_from_old_space_(0) {}
// The following methods are used to track raw C++ pointers to externally
// allocated memory used as backing store in live array buffers.
// Register/unregister a new JSArrayBuffer |buffer| for tracking. Guards all
// access to the tracker by taking the page lock for the corresponding page.
inline void RegisterNew(JSArrayBuffer* buffer);
inline void Unregister(JSArrayBuffer* buffer);
inline static void RegisterNew(Heap* heap, JSArrayBuffer* buffer);
inline static void Unregister(Heap* heap, JSArrayBuffer* buffer);
// Frees all backing store pointers for dead JSArrayBuffers in new space.
// Does not take any locks and can only be called during Scavenge.
void FreeDeadInNewSpace();
static void FreeDeadInNewSpace(Heap* heap);
// Frees all backing store pointers for dead JSArrayBuffer on a given page.
// Requires marking information to be present. Requires the page lock to be
// taken by the caller.
void FreeDead(Page* page);
static void FreeDead(Page* page);
// Frees all remaining, live or dead, array buffers on a page. Only useful
// during tear down.
void FreeAll(Page* page);
static void FreeAll(Page* page);
// Processes all array buffers on a given page. |mode| specifies the action
// to perform on the buffers. Returns whether the tracker is empty or not.
bool ProcessBuffers(Page* page, ProcessingMode mode);
static bool ProcessBuffers(Page* page, ProcessingMode mode);
void AccountForConcurrentlyFreedMemory();
size_t retained_from_new_space() { return retained_from_new_space_.Value(); }
size_t retained_from_old_space() { return retained_from_old_space_.Value(); }
private:
Heap* heap_;
base::AtomicNumber<size_t> concurrently_freed_;
// Number of bytes retained from new space.
base::AtomicNumber<size_t> retained_from_new_space_;
// Number of bytes retained from old space.
base::AtomicNumber<size_t> retained_from_old_space_;
// Returns whether a buffer is currently tracked.
static bool IsTracked(JSArrayBuffer* buffer);
};
// LocalArrayBufferTracker tracks internalized array buffers.
......@@ -87,32 +65,23 @@ class LocalArrayBufferTracker {
enum CallbackResult { kKeepEntry, kUpdateEntry, kRemoveEntry };
enum FreeMode { kFreeDead, kFreeAll };
struct ProcessResult {
ProcessResult(size_t freed, size_t promoted)
: freed(freed), promoted(promoted) {}
size_t freed;
size_t promoted;
};
explicit LocalArrayBufferTracker(Heap* heap) : heap_(heap) {}
~LocalArrayBufferTracker();
inline void Add(Key key, const Value& value);
inline Value Remove(Key key);
// Frees up array buffers determined by |free_mode|. Returns statistics in
// ProcessResult.
// Frees up array buffers determined by |free_mode|.
template <FreeMode free_mode>
ProcessResult Free();
void Free();
// Processes buffers one by one. The CallbackResult of the callback decides
// what action to take on the buffer. Returns statistics in ProcessResult.
// what action to take on the buffer.
//
// Callback should be of type:
// CallbackResult fn(JSArrayBuffer* buffer, JSArrayBuffer** new_buffer);
template <typename Callback>
ProcessResult Process(Callback callback);
void Process(Callback callback);
bool IsEmpty() { return array_buffers_.empty(); }
......
......@@ -162,8 +162,7 @@ Heap::Heap()
deserialization_complete_(false),
strong_roots_list_(NULL),
heap_iterator_depth_(0),
force_oom_(false),
array_buffer_tracker_(nullptr) {
force_oom_(false) {
// Allow build-time customization of the max semispace size. Building
// V8 with snapshots and a non-default max semispace size is much
// easier if you can define it as part of the build environment.
......@@ -313,21 +312,6 @@ GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space,
return SCAVENGER;
}
size_t Heap::external_memory_retained_from_new_space() {
// This is just an approximation.
return array_buffer_tracker()->retained_from_new_space();
}
bool Heap::ShouldDoScavengeForReducingExternalMemory() {
size_t retained_new_space = external_memory_retained_from_new_space();
size_t retained_old_space = external_memory() - retained_new_space;
float new_space_ratio =
static_cast<float>(new_space_.SizeOfObjects()) / retained_new_space;
float old_space_ratio =
static_cast<float>(old_space_->SizeOfObjects()) / retained_old_space;
// TODO(mlippautz): Add some lower bound.
return new_space_ratio > old_space_ratio;
}
// TODO(1238405): Combine the infrastructure for --heap-stats and
// --log-gc to avoid the complicated preprocessor and flag testing.
......@@ -1754,7 +1738,7 @@ void Heap::Scavenge() {
// Set age mark.
new_space_.set_age_mark(new_space_.top());
array_buffer_tracker()->FreeDeadInNewSpace();
ArrayBufferTracker::FreeDeadInNewSpace(this);
// Update how much has survived scavenge.
IncrementYoungSurvivorsCounter(static_cast<int>(
......@@ -2040,12 +2024,12 @@ HeapObject* Heap::DoubleAlignForDeserialization(HeapObject* object, int size) {
void Heap::RegisterNewArrayBuffer(JSArrayBuffer* buffer) {
array_buffer_tracker()->RegisterNew(buffer);
ArrayBufferTracker::RegisterNew(this, buffer);
}
void Heap::UnregisterArrayBuffer(JSArrayBuffer* buffer) {
array_buffer_tracker()->Unregister(buffer);
ArrayBufferTracker::Unregister(this, buffer);
}
......@@ -5400,8 +5384,6 @@ bool Heap::SetUp() {
*this, ScavengeJob::kBytesAllocatedBeforeNextIdleTask);
new_space()->AddAllocationObserver(idle_scavenge_observer_);
array_buffer_tracker_ = new ArrayBufferTracker(this);
return true;
}
......@@ -5610,9 +5592,6 @@ void Heap::TearDown() {
delete store_buffer_;
store_buffer_ = nullptr;
delete array_buffer_tracker_;
array_buffer_tracker_ = nullptr;
delete memory_allocator_;
memory_allocator_ = nullptr;
}
......
......@@ -850,9 +850,14 @@ class Heap {
int64_t external_memory() { return external_memory_; }
void update_external_memory(int64_t delta) { external_memory_ += delta; }
size_t external_memory_retained_from_new_space();
void update_external_memory_concurrently_freed(intptr_t freed) {
external_memory_concurrently_freed_.Increment(freed);
}
bool ShouldDoScavengeForReducingExternalMemory();
void account_external_memory_concurrently_freed() {
external_memory_ -= external_memory_concurrently_freed_.Value();
external_memory_concurrently_freed_.SetValue(0);
}
void DeoptMarkedAllocationSites();
......@@ -1407,8 +1412,6 @@ class Heap {
void RegisterNewArrayBuffer(JSArrayBuffer* buffer);
void UnregisterArrayBuffer(JSArrayBuffer* buffer);
ArrayBufferTracker* array_buffer_tracker() { return array_buffer_tracker_; }
// ===========================================================================
// Allocation site tracking. =================================================
// ===========================================================================
......@@ -2049,6 +2052,9 @@ class Heap {
// Caches the amount of external memory registered at the last MC.
int64_t external_memory_at_last_mark_compact_;
// The amount of memory that has been freed concurrently.
base::AtomicNumber<intptr_t> external_memory_concurrently_freed_;
// This can be calculated directly from a pointer to the heap; however, it is
// more expedient to get at the isolate directly from within Heap methods.
Isolate* isolate_;
......@@ -2290,9 +2296,6 @@ class Heap {
// Used for testing purposes.
bool force_oom_;
// Tracker for ArrayBuffers pointing to external memory.
ArrayBufferTracker* array_buffer_tracker_;
// Classes in "heap" can be friends.
friend class AlwaysAllocateScope;
friend class GCCallbacksScope;
......
......@@ -833,7 +833,7 @@ void MarkCompactCollector::Prepare() {
space = spaces.next()) {
space->PrepareForMarkCompact();
}
heap()->array_buffer_tracker()->AccountForConcurrentlyFreedMemory();
heap()->account_external_memory_concurrently_freed();
#ifdef VERIFY_HEAP
if (!was_marked_incrementally_ && FLAG_verify_heap) {
......@@ -3129,8 +3129,7 @@ bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) {
case kObjectsNewToOld:
success = collector_->VisitLiveObjects(page, &new_space_visitor_,
kClearMarkbits);
heap->array_buffer_tracker()->ProcessBuffers(
ArrayBufferTracker::ProcessBuffers(
page, ArrayBufferTracker::kUpdateForwardedRemoveOthers);
DCHECK(success);
break;
......@@ -3157,14 +3156,14 @@ bool MarkCompactCollector::Evacuator::EvacuatePage(Page* page) {
EvacuateRecordOnlyVisitor record_visitor(collector_->heap());
success =
collector_->VisitLiveObjects(page, &record_visitor, kKeepMarking);
heap->array_buffer_tracker()->ProcessBuffers(
ArrayBufferTracker::ProcessBuffers(
page, ArrayBufferTracker::kUpdateForwardedKeepOthers);
DCHECK(success);
// We need to return failure here to indicate that we want this page
// added to the sweeper.
success = false;
} else {
heap->array_buffer_tracker()->ProcessBuffers(
ArrayBufferTracker::ProcessBuffers(
page, ArrayBufferTracker::kUpdateForwardedRemoveOthers);
}
break;
......@@ -3368,7 +3367,7 @@ int MarkCompactCollector::Sweeper::RawSweep(
// Before we sweep objects on the page, we free dead array buffers which
// requires valid mark bits.
p->heap()->array_buffer_tracker()->FreeDead(p);
ArrayBufferTracker::FreeDead(p);
// We also release the black area markers here.
p->ReleaseBlackAreaEndMarkerMap();
......@@ -3939,7 +3938,7 @@ void MarkCompactCollector::StartSweepSpace(PagedSpace* space) {
PrintIsolate(isolate(), "sweeping: released page: %p",
static_cast<void*>(p));
}
heap()->array_buffer_tracker()->FreeAll(p);
ArrayBufferTracker::FreeAll(p);
space->ReleasePage(p);
continue;
}
......
......@@ -1127,7 +1127,7 @@ bool PagedSpace::HasBeenSetUp() { return true; }
void PagedSpace::TearDown() {
for (auto it = begin(); it != end();) {
Page* page = *(it++); // Will be erased.
heap()->array_buffer_tracker()->FreeAll(page);
ArrayBufferTracker::FreeAll(page);
heap()->memory_allocator()->Free<MemoryAllocator::kFull>(page);
}
anchor_.set_next_page(&anchor_);
......@@ -1627,8 +1627,6 @@ void NewSpace::UpdateInlineAllocationLimit(int size_in_bytes) {
bool NewSpace::AddFreshPage() {
if (heap()->ShouldDoScavengeForReducingExternalMemory()) return false;
Address top = allocation_info_.top();
DCHECK(!Page::IsAtObjectStart(top));
if (!to_space_.AdvancePage()) {
......@@ -1821,7 +1819,7 @@ void SemiSpace::TearDown() {
// Properly uncommit memory to keep the allocator counters in sync.
if (is_committed()) {
for (Page* p : *this) {
heap()->array_buffer_tracker()->FreeAll(p);
ArrayBufferTracker::FreeAll(p);
}
Uncommit();
}
......
......@@ -314,65 +314,5 @@ UNINITIALIZED_TEST(ArrayBuffer_SemiSpaceCopyMultipleTasks) {
}
}
TEST(ArrayBuffer_RetainedCounterPromotion) {
// The test checks that retained counters on ArrayBufferTracker are consistent
// with where the buffers are on the heap.
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
ArrayBufferTracker* tracker = heap->array_buffer_tracker();
{
v8::HandleScope handle_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
CHECK(heap->InNewSpace(*buf));
CHECK_EQ(tracker->retained_from_new_space(), 100u);
CHECK_EQ(tracker->retained_from_old_space(), 0u);
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(heap->InNewSpace(*buf));
CHECK_EQ(tracker->retained_from_new_space(), 100u);
CHECK_EQ(tracker->retained_from_old_space(), 0u);
heap::GcAndSweep(heap, OLD_SPACE);
CHECK(heap->InOldSpace(*buf));
CHECK_EQ(tracker->retained_from_new_space(), 0u);
CHECK_EQ(tracker->retained_from_old_space(), 100u);
}
}
TEST(ArrayBuffer_RetainedCounterNewSpace) {
// The test checks that retained counters on ArrayBufferTracker are consistent
// with where the buffers are on the heap.
CcTest::InitializeVM();
LocalContext env;
v8::Isolate* isolate = env->GetIsolate();
Heap* heap = reinterpret_cast<Isolate*>(isolate)->heap();
ArrayBufferTracker* tracker = heap->array_buffer_tracker();
{
v8::HandleScope handle_scope(isolate);
{
v8::HandleScope tmp_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 100);
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
CHECK(heap->InNewSpace(*buf));
}
{
v8::HandleScope tmp_scope(isolate);
Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, 50);
Handle<JSArrayBuffer> buf = v8::Utils::OpenHandle(*ab);
CHECK(heap->InNewSpace(*buf));
CHECK_EQ(tracker->retained_from_new_space(), 150u);
CHECK_EQ(tracker->retained_from_old_space(), 0u);
heap::GcAndSweep(heap, NEW_SPACE);
CHECK(heap->InNewSpace(*buf));
CHECK_EQ(tracker->retained_from_new_space(), 50u);
CHECK_EQ(tracker->retained_from_old_space(), 0u);
}
heap::GcAndSweep(heap, NEW_SPACE);
CHECK_EQ(tracker->retained_from_new_space(), 0u);
CHECK_EQ(tracker->retained_from_old_space(), 0u);
}
}
} // namespace internal
} // namespace v8
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment