Commit 8751debc authored by Dan Elphick's avatar Dan Elphick Committed by Commit Bot

[cleanup] Remove unused parameters from heap/*

Removes lots of parameters that are never used (found using
-Wunused-parameter).

Also wires up the pretenure parameter for Factory::NewFrameArray so it's
actually used.

Change-Id: I486e22ac0683afb84bba6a286947674254f93832
Reviewed-on: https://chromium-review.googlesource.com/1041687Reviewed-by: 's avatarUlan Degenbaev <ulan@chromium.org>
Reviewed-by: 's avatarHannes Payer <hpayer@chromium.org>
Commit-Queue: Dan Elphick <delphick@chromium.org>
Cr-Commit-Position: refs/heads/master@{#53077}
parent 35e3fda1
......@@ -421,8 +421,8 @@ Handle<FeedbackMetadata> Factory::NewFeedbackMetadata(int slot_count) {
Handle<FrameArray> Factory::NewFrameArray(int number_of_frames,
PretenureFlag pretenure) {
DCHECK_LE(0, number_of_frames);
Handle<FixedArray> result =
NewFixedArrayWithHoles(FrameArray::LengthFor(number_of_frames));
Handle<FixedArray> result = NewFixedArrayWithHoles(
FrameArray::LengthFor(number_of_frames), pretenure);
result->set(FrameArray::kFrameCountIndex, Smi::kZero);
return Handle<FrameArray>::cast(result);
}
......@@ -638,7 +638,7 @@ bool inline IsOneByte(Vector<const char> str, int chars) {
return chars == str.length();
}
bool inline IsOneByte(Handle<String> str, int chars) {
bool inline IsOneByte(Handle<String> str) {
return str->IsOneByteRepresentation();
}
......@@ -789,7 +789,7 @@ Handle<String> Factory::NewTwoByteInternalizedString(Vector<const uc16> str,
Handle<String> Factory::NewInternalizedStringImpl(Handle<String> string,
int chars,
uint32_t hash_field) {
if (IsOneByte(string, chars)) {
if (IsOneByte(string)) {
return AllocateInternalizedStringImpl<true>(string, chars, hash_field);
}
return AllocateInternalizedStringImpl<false>(string, chars, hash_field);
......
......@@ -1761,7 +1761,7 @@ bool Heap::PerformGarbageCollection(
}
if (!fast_promotion_mode_ || collector == MARK_COMPACTOR) {
ComputeFastPromotionMode(promotion_ratio_ + semi_space_copied_rate_);
ComputeFastPromotionMode();
}
isolate_->counters()->objs_since_last_young()->Set(0);
......@@ -2238,7 +2238,7 @@ void Heap::Scavenge() {
SetGCState(NOT_IN_GC);
}
void Heap::ComputeFastPromotionMode(double survival_rate) {
void Heap::ComputeFastPromotionMode() {
const size_t survived_in_new_space =
survived_last_scavenge_ * 100 / new_space_->Capacity();
fast_promotion_mode_ =
......
......@@ -1976,7 +1976,7 @@ class Heap {
bool InvokeNearHeapLimitCallback();
void ComputeFastPromotionMode(double survival_rate);
void ComputeFastPromotionMode();
// Attempt to over-approximate the weak closure by marking object groups and
// implicit references from global handles, but don't atomically complete
......
......@@ -227,8 +227,7 @@ class IncrementalMarkingRootMarkingVisitor : public RootVisitor {
};
void IncrementalMarking::SetOldSpacePageFlags(MemoryChunk* chunk,
bool is_marking,
bool is_compacting) {
bool is_marking) {
if (is_marking) {
chunk->SetFlag(MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING);
chunk->SetFlag(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING);
......@@ -253,7 +252,7 @@ void IncrementalMarking::SetNewSpacePageFlags(MemoryChunk* chunk,
void IncrementalMarking::DeactivateIncrementalWriteBarrierForSpace(
PagedSpace* space) {
for (Page* p : *space) {
SetOldSpacePageFlags(p, false, false);
SetOldSpacePageFlags(p, false);
}
}
......@@ -273,14 +272,14 @@ void IncrementalMarking::DeactivateIncrementalWriteBarrier() {
DeactivateIncrementalWriteBarrierForSpace(heap_->new_space());
for (LargePage* lop : *heap_->lo_space()) {
SetOldSpacePageFlags(lop, false, false);
SetOldSpacePageFlags(lop, false);
}
}
void IncrementalMarking::ActivateIncrementalWriteBarrier(PagedSpace* space) {
for (Page* p : *space) {
SetOldSpacePageFlags(p, true, is_compacting_);
SetOldSpacePageFlags(p, true);
}
}
......@@ -299,7 +298,7 @@ void IncrementalMarking::ActivateIncrementalWriteBarrier() {
ActivateIncrementalWriteBarrier(heap_->new_space());
for (LargePage* lop : *heap_->lo_space()) {
SetOldSpacePageFlags(lop, true, is_compacting_);
SetOldSpacePageFlags(lop, true);
}
}
......
......@@ -227,7 +227,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
void MarkBlackAndPush(HeapObject* obj);
inline void SetOldSpacePageFlags(MemoryChunk* chunk) {
SetOldSpacePageFlags(chunk, IsMarking(), IsCompacting());
SetOldSpacePageFlags(chunk, IsMarking());
}
inline void SetNewSpacePageFlags(Page* chunk) {
......@@ -279,8 +279,7 @@ class V8_EXPORT_PRIVATE IncrementalMarking {
IncrementalMarking& incremental_marking_;
};
static void SetOldSpacePageFlags(MemoryChunk* chunk, bool is_marking,
bool is_compacting);
static void SetOldSpacePageFlags(MemoryChunk* chunk, bool is_marking);
static void SetNewSpacePageFlags(MemoryChunk* chunk, bool is_marking);
......
......@@ -2897,15 +2897,13 @@ class RememberedSetUpdatingItem : public UpdatingItem {
}
void UpdateTypedPointers() {
Isolate* isolate = heap_->isolate();
if (chunk_->typed_slot_set<OLD_TO_NEW, AccessMode::NON_ATOMIC>() !=
nullptr) {
CHECK_NE(chunk_->owner(), heap_->map_space());
RememberedSet<OLD_TO_NEW>::IterateTyped(
chunk_,
[isolate, this](SlotType slot_type, Address host_addr, Address slot) {
chunk_, [this](SlotType slot_type, Address host_addr, Address slot) {
return UpdateTypedSlotHelper::UpdateTypedSlot(
isolate, slot_type, slot, [this](MaybeObject** slot) {
slot_type, slot, [this](MaybeObject** slot) {
return CheckAndUpdateOldToNewSlot(
reinterpret_cast<Address>(slot));
});
......@@ -2916,13 +2914,11 @@ class RememberedSetUpdatingItem : public UpdatingItem {
nullptr)) {
CHECK_NE(chunk_->owner(), heap_->map_space());
RememberedSet<OLD_TO_OLD>::IterateTyped(
chunk_,
[isolate](SlotType slot_type, Address host_addr, Address slot) {
chunk_, [](SlotType slot_type, Address host_addr, Address slot) {
// Using UpdateStrongSlot is OK here, because there are no weak
// typed slots.
return UpdateTypedSlotHelper::UpdateTypedSlot(
isolate, slot_type, slot,
UpdateStrongSlot<AccessMode::NON_ATOMIC>);
slot_type, slot, UpdateStrongSlot<AccessMode::NON_ATOMIC>);
});
}
}
......@@ -4058,12 +4054,11 @@ class PageMarkingItem : public MarkingItem {
}
void MarkTypedPointers(YoungGenerationMarkingTask* task) {
Isolate* isolate = heap()->isolate();
RememberedSet<OLD_TO_NEW>::IterateTyped(
chunk_, [this, isolate, task](SlotType slot_type, Address host_addr,
Address slot) {
chunk_,
[this, task](SlotType slot_type, Address host_addr, Address slot) {
return UpdateTypedSlotHelper::UpdateTypedSlot(
isolate, slot_type, slot, [this, task](MaybeObject** slot) {
slot_type, slot, [this, task](MaybeObject** slot) {
return CheckAndMarkObject(task,
reinterpret_cast<Address>(slot));
});
......
......@@ -84,7 +84,7 @@ void MemoryReducer::NotifyTimer(const Event& event) {
GarbageCollectionReason::kFinalizeMarkingViaTask);
}
// Re-schedule the timer.
ScheduleTimer(event.time_ms, state_.next_gc_start_ms - event.time_ms);
ScheduleTimer(state_.next_gc_start_ms - event.time_ms);
if (FLAG_trace_gc_verbose) {
heap()->isolate()->PrintWithTimestamp(
"Memory reducer: waiting for %.f ms\n",
......@@ -100,7 +100,7 @@ void MemoryReducer::NotifyMarkCompact(const Event& event) {
state_ = Step(state_, event);
if (old_action != kWait && state_.action == kWait) {
// If we are transitioning to the WAIT state, start the timer.
ScheduleTimer(event.time_ms, state_.next_gc_start_ms - event.time_ms);
ScheduleTimer(state_.next_gc_start_ms - event.time_ms);
}
if (old_action == kRun) {
if (FLAG_trace_gc_verbose) {
......@@ -117,7 +117,7 @@ void MemoryReducer::NotifyPossibleGarbage(const Event& event) {
state_ = Step(state_, event);
if (old_action != kWait && state_.action == kWait) {
// If we are transitioning to the WAIT state, start the timer.
ScheduleTimer(event.time_ms, state_.next_gc_start_ms - event.time_ms);
ScheduleTimer(state_.next_gc_start_ms - event.time_ms);
}
}
......@@ -199,8 +199,7 @@ MemoryReducer::State MemoryReducer::Step(const State& state,
UNREACHABLE();
}
void MemoryReducer::ScheduleTimer(double time_ms, double delay_ms) {
void MemoryReducer::ScheduleTimer(double delay_ms) {
DCHECK_LT(0, delay_ms);
if (heap()->IsTearingDown()) return;
// Leave some room for precision error in task scheduler.
......
......@@ -123,7 +123,7 @@ class V8_EXPORT_PRIVATE MemoryReducer {
// the incoming event.
static State Step(const State& state, const Event& event);
// Posts a timer task that will call NotifyTimer after the given delay.
void ScheduleTimer(double time_ms, double delay_ms);
void ScheduleTimer(double delay_ms);
void TearDown();
static const int kLongDelayMs;
static const int kShortDelayMs;
......
......@@ -328,8 +328,7 @@ class UpdateTypedSlotHelper {
// Updates a typed slot using an untyped slot callback.
// The callback accepts MaybeObject** and returns SlotCallbackResult.
template <typename Callback>
static SlotCallbackResult UpdateTypedSlot(Isolate* isolate,
SlotType slot_type, Address addr,
static SlotCallbackResult UpdateTypedSlot(SlotType slot_type, Address addr,
Callback callback) {
switch (slot_type) {
case CODE_TARGET_SLOT: {
......
......@@ -125,7 +125,7 @@ void Scavenger::ScavengePage(MemoryChunk* page) {
RememberedSet<OLD_TO_NEW>::IterateTyped(
page, [this](SlotType type, Address host_addr, Address addr) {
return UpdateTypedSlotHelper::UpdateTypedSlot(
heap_->isolate(), type, addr, [this](MaybeObject** addr) {
type, addr, [this](MaybeObject** addr) {
return CheckAndScavengeObject(heap(),
reinterpret_cast<Address>(addr));
});
......
......@@ -721,7 +721,7 @@ Page* SemiSpace::InitializePage(MemoryChunk* chunk, Executability executable) {
}
LargePage* LargePage::Initialize(Heap* heap, MemoryChunk* chunk,
Executability executable, Space* owner) {
Executability executable) {
if (executable && chunk->size() > LargePage::kMaxCodePageSize) {
STATIC_ASSERT(LargePage::kMaxCodePageSize <= TypedSlotSet::kMaxOffset);
FATAL("Code page is too large.");
......@@ -1167,7 +1167,7 @@ LargePage* MemoryAllocator::AllocateLargePage(size_t size,
Executability executable) {
MemoryChunk* chunk = AllocateChunk(size, size, executable, owner);
if (chunk == nullptr) return nullptr;
return LargePage::Initialize(isolate_->heap(), chunk, executable, owner);
return LargePage::Initialize(isolate_->heap(), chunk, executable);
}
template <typename SpaceType>
......
......@@ -892,7 +892,7 @@ class LargePage : public MemoryChunk {
private:
static LargePage* Initialize(Heap* heap, MemoryChunk* chunk,
Executability executable, Space* owner);
Executability executable);
friend class MemoryAllocator;
};
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment