scavenger-inl.h 19 KB
Newer Older
1 2 3 4 5 6 7 8
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#ifndef V8_HEAP_SCAVENGER_INL_H_
#define V8_HEAP_SCAVENGER_INL_H_

#include "src/heap/scavenger.h"
9

10
#include "src/heap/incremental-marking-inl.h"
11
#include "src/heap/local-allocator-inl.h"
12
#include "src/objects-inl.h"
13
#include "src/objects/map.h"
14
#include "src/objects/slots-inl.h"
15 16 17 18

namespace v8 {
namespace internal {

19
void Scavenger::PromotionList::View::PushRegularObject(HeapObject object,
20 21 22 23
                                                       int size) {
  promotion_list_->PushRegularObject(task_id_, object, size);
}

24 25
void Scavenger::PromotionList::View::PushLargeObject(HeapObject object, Map map,
                                                     int size) {
26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48
  promotion_list_->PushLargeObject(task_id_, object, map, size);
}

bool Scavenger::PromotionList::View::IsEmpty() {
  return promotion_list_->IsEmpty();
}

size_t Scavenger::PromotionList::View::LocalPushSegmentSize() {
  return promotion_list_->LocalPushSegmentSize(task_id_);
}

bool Scavenger::PromotionList::View::Pop(struct PromotionListEntry* entry) {
  return promotion_list_->Pop(task_id_, entry);
}

bool Scavenger::PromotionList::View::IsGlobalPoolEmpty() {
  return promotion_list_->IsGlobalPoolEmpty();
}

bool Scavenger::PromotionList::View::ShouldEagerlyProcessPromotionList() {
  return promotion_list_->ShouldEagerlyProcessPromotionList(task_id_);
}

49 50
void Scavenger::PromotionList::PushRegularObject(int task_id, HeapObject object,
                                                 int size) {
51 52 53
  regular_object_promotion_list_.Push(task_id, ObjectAndSize(object, size));
}

54
void Scavenger::PromotionList::PushLargeObject(int task_id, HeapObject object,
55
                                               Map map, int size) {
56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93
  large_object_promotion_list_.Push(task_id, {object, map, size});
}

bool Scavenger::PromotionList::IsEmpty() {
  return regular_object_promotion_list_.IsEmpty() &&
         large_object_promotion_list_.IsEmpty();
}

size_t Scavenger::PromotionList::LocalPushSegmentSize(int task_id) {
  return regular_object_promotion_list_.LocalPushSegmentSize(task_id) +
         large_object_promotion_list_.LocalPushSegmentSize(task_id);
}

bool Scavenger::PromotionList::Pop(int task_id,
                                   struct PromotionListEntry* entry) {
  ObjectAndSize regular_object;
  if (regular_object_promotion_list_.Pop(task_id, &regular_object)) {
    entry->heap_object = regular_object.first;
    entry->size = regular_object.second;
    entry->map = entry->heap_object->map();
    return true;
  }
  return large_object_promotion_list_.Pop(task_id, entry);
}

bool Scavenger::PromotionList::IsGlobalPoolEmpty() {
  return regular_object_promotion_list_.IsGlobalPoolEmpty() &&
         large_object_promotion_list_.IsGlobalPoolEmpty();
}

bool Scavenger::PromotionList::ShouldEagerlyProcessPromotionList(int task_id) {
  // Threshold when to prioritize processing of the promotion list. Right
  // now we only look into the regular object list.
  const int kProcessPromotionListThreshold =
      kRegularObjectPromotionListSegmentSize / 2;
  return LocalPushSegmentSize(task_id) < kProcessPromotionListThreshold;
}

94
void Scavenger::PageMemoryFence(MaybeObject object) {
95 96 97
#ifdef THREAD_SANITIZER
  // Perform a dummy acquire load to tell TSAN that there is no data race
  // with  page initialization.
98
  HeapObject heap_object;
99
  if (object->GetHeapObject(&heap_object)) {
100
    MemoryChunk* chunk = MemoryChunk::FromAddress(heap_object->address());
101 102 103 104 105
    CHECK_NOT_NULL(chunk->synchronized_heap());
  }
#endif
}

106
bool Scavenger::MigrateObject(Map map, HeapObject source, HeapObject target,
107
                              int size) {
108
  // Copy the content of source to target.
109
  target->set_map_word(MapWord::FromMap(map));
110 111
  heap()->CopyBlock(target->address() + kTaggedSize,
                    source->address() + kTaggedSize, size - kTaggedSize);
112

113
  Object old = source->map_slot().Release_CompareAndSwap(
114
      map, MapWord::FromForwardingAddress(target).ToMap());
115 116 117 118
  if (old != map) {
    // Other task migrated the object.
    return false;
  }
119 120 121 122 123 124

  if (V8_UNLIKELY(is_logging_)) {
    heap()->OnMoveEvent(target, source, size);
  }

  if (is_incremental_marking_) {
125
    heap()->incremental_marking()->TransferColor(source, target);
126
  }
127
  heap()->UpdateAllocationSite(map, source, &local_pretenuring_feedback_);
128
  return true;
129 130
}

131
template <typename THeapObjectSlot>
132 133 134
CopyAndForwardResult Scavenger::SemiSpaceCopyObject(
    Map map, THeapObjectSlot slot, HeapObject object, int object_size,
    ObjectFields object_fields) {
135 136 137
  static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
                    std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
                "Only FullHeapObjectSlot and HeapObjectSlot are expected here");
138
  DCHECK(heap()->AllowedToBeMigrated(object, NEW_SPACE));
139
  AllocationAlignment alignment = HeapObject::RequiredAlignment(map);
140
  AllocationResult allocation =
141
      allocator_.Allocate(NEW_SPACE, object_size, alignment);
142

143
  HeapObject target;
144
  if (allocation.To(&target)) {
145 146
    DCHECK(heap()->incremental_marking()->non_atomic_marking_state()->IsWhite(
        target));
147 148 149
    const bool self_success = MigrateObject(map, object, target, object_size);
    if (!self_success) {
      allocator_.FreeLast(NEW_SPACE, target, object_size);
150
      MapWord map_word = object->synchronized_map_word();
151
      HeapObjectReference::Update(slot, map_word.ToForwardingAddress());
152 153
      DCHECK(!Heap::InFromPage(*slot));
      return Heap::InToPage(*slot)
154 155
                 ? CopyAndForwardResult::SUCCESS_YOUNG_GENERATION
                 : CopyAndForwardResult::SUCCESS_OLD_GENERATION;
156
    }
157
    HeapObjectReference::Update(slot, target);
158 159 160
    if (object_fields == ObjectFields::kMaybePointers) {
      copied_list_.Push(ObjectAndSize(target, object_size));
    }
161
    copied_size_ += object_size;
162
    return CopyAndForwardResult::SUCCESS_YOUNG_GENERATION;
163
  }
164
  return CopyAndForwardResult::FAILURE;
165 166
}

167 168
template <typename THeapObjectSlot>
CopyAndForwardResult Scavenger::PromoteObject(Map map, THeapObjectSlot slot,
169
                                              HeapObject object,
170 171
                                              int object_size,
                                              ObjectFields object_fields) {
172 173 174
  static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
                    std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
                "Only FullHeapObjectSlot and HeapObjectSlot are expected here");
175
  AllocationAlignment alignment = HeapObject::RequiredAlignment(map);
176
  AllocationResult allocation =
177
      allocator_.Allocate(OLD_SPACE, object_size, alignment);
178

179
  HeapObject target;
180
  if (allocation.To(&target)) {
181 182
    DCHECK(heap()->incremental_marking()->non_atomic_marking_state()->IsWhite(
        target));
183 184 185
    const bool self_success = MigrateObject(map, object, target, object_size);
    if (!self_success) {
      allocator_.FreeLast(OLD_SPACE, target, object_size);
186
      MapWord map_word = object->synchronized_map_word();
187
      HeapObjectReference::Update(slot, map_word.ToForwardingAddress());
188 189
      DCHECK(!Heap::InFromPage(*slot));
      return Heap::InToPage(*slot)
190 191
                 ? CopyAndForwardResult::SUCCESS_YOUNG_GENERATION
                 : CopyAndForwardResult::SUCCESS_OLD_GENERATION;
192
    }
193
    HeapObjectReference::Update(slot, target);
194
    if (object_fields == ObjectFields::kMaybePointers) {
195
      promotion_list_.PushRegularObject(target, object_size);
196
    }
197
    promoted_size_ += object_size;
198
    return CopyAndForwardResult::SUCCESS_OLD_GENERATION;
199
  }
200
  return CopyAndForwardResult::FAILURE;
201 202
}

203 204 205 206 207 208 209
SlotCallbackResult Scavenger::RememberedSetEntryNeeded(
    CopyAndForwardResult result) {
  DCHECK_NE(CopyAndForwardResult::FAILURE, result);
  return result == CopyAndForwardResult::SUCCESS_YOUNG_GENERATION ? KEEP_SLOT
                                                                  : REMOVE_SLOT;
}

210 211
bool Scavenger::HandleLargeObject(Map map, HeapObject object, int object_size,
                                  ObjectFields object_fields) {
212 213 214 215
  // TODO(hpayer): Make this check size based, i.e.
  // object_size > kMaxRegularHeapObjectSize
  if (V8_UNLIKELY(
          FLAG_young_generation_large_objects &&
216
          MemoryChunk::FromHeapObject(object)->InNewLargeObjectSpace())) {
217 218
    DCHECK_EQ(NEW_LO_SPACE,
              MemoryChunk::FromHeapObject(object)->owner()->identity());
219 220
    if (object->map_slot().Release_CompareAndSwap(
            map, MapWord::FromForwardingAddress(object).ToMap()) == map) {
221
      surviving_new_large_objects_.insert({object, map});
222
      promoted_size_ += object_size;
223
      if (object_fields == ObjectFields::kMaybePointers) {
224 225 226 227 228 229 230 231
        promotion_list_.PushLargeObject(object, map, object_size);
      }
    }
    return true;
  }
  return false;
}

232
template <typename THeapObjectSlot>
233 234 235
SlotCallbackResult Scavenger::EvacuateObjectDefault(
    Map map, THeapObjectSlot slot, HeapObject object, int object_size,
    ObjectFields object_fields) {
236 237 238
  static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
                    std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
                "Only FullHeapObjectSlot and HeapObjectSlot are expected here");
239
  SLOW_DCHECK(object->SizeFromMap(map) == object_size);
240
  CopyAndForwardResult result;
241

242
  if (HandleLargeObject(map, object, object_size, object_fields)) {
243
    return KEEP_SLOT;
244 245
  }

246 247 248
  SLOW_DCHECK(static_cast<size_t>(object_size) <=
              MemoryChunkLayout::AllocatableMemoryInDataPage());

249 250 251
  if (!heap()->ShouldBePromoted(object->address())) {
    // A semi-space copy may fail due to fragmentation. In that case, we
    // try to promote the object.
252
    result = SemiSpaceCopyObject(map, slot, object, object_size, object_fields);
253 254 255
    if (result != CopyAndForwardResult::FAILURE) {
      return RememberedSetEntryNeeded(result);
    }
256 257
  }

258 259 260
  // We may want to promote this object if the object was already semi-space
  // copied in a previes young generation GC or if the semi-space copy above
  // failed.
261
  result = PromoteObject(map, slot, object, object_size, object_fields);
262 263 264
  if (result != CopyAndForwardResult::FAILURE) {
    return RememberedSetEntryNeeded(result);
  }
265

266
  // If promotion failed, we try to copy the object to the other semi-space.
267
  result = SemiSpaceCopyObject(map, slot, object, object_size, object_fields);
268 269 270
  if (result != CopyAndForwardResult::FAILURE) {
    return RememberedSetEntryNeeded(result);
  }
271

272
  heap()->FatalProcessOutOfMemory("Scavenger: semi-space copy");
273
  UNREACHABLE();
274 275
}

276 277
template <typename THeapObjectSlot>
SlotCallbackResult Scavenger::EvacuateThinString(Map map, THeapObjectSlot slot,
278
                                                 ThinString object,
279
                                                 int object_size) {
280 281 282
  static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
                    std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
                "Only FullHeapObjectSlot and HeapObjectSlot are expected here");
283
  if (!is_incremental_marking_) {
284 285 286
    // The ThinString should die after Scavenge, so avoid writing the proper
    // forwarding pointer and instead just signal the actual object as forwarded
    // reference.
287
    String actual = object->actual();
288 289
    // ThinStrings always refer to internalized strings, which are always in old
    // space.
290
    DCHECK(!Heap::InYoungGeneration(actual));
291
    HeapObjectReference::Update(slot, actual);
292
    return REMOVE_SLOT;
293 294
  }

295 296 297 298
  DCHECK_EQ(ObjectFields::kMaybePointers,
            Map::ObjectFieldsFrom(map->visitor_id()));
  return EvacuateObjectDefault(map, slot, object, object_size,
                               ObjectFields::kMaybePointers);
299 300
}

301
template <typename THeapObjectSlot>
302
SlotCallbackResult Scavenger::EvacuateShortcutCandidate(Map map,
303
                                                        THeapObjectSlot slot,
304
                                                        ConsString object,
305
                                                        int object_size) {
306 307 308
  static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
                    std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
                "Only FullHeapObjectSlot and HeapObjectSlot are expected here");
309 310
  DCHECK(IsShortcutCandidate(map->instance_type()));
  if (!is_incremental_marking_ &&
311
      object->unchecked_second() == ReadOnlyRoots(heap()).empty_string()) {
312
    HeapObject first = HeapObject::cast(object->unchecked_first());
313

314
    HeapObjectReference::Update(slot, first);
315

316
    if (!Heap::InYoungGeneration(first)) {
317
      object->map_slot().Release_Store(
318
          MapWord::FromForwardingAddress(first).ToMap());
319
      return REMOVE_SLOT;
320 321
    }

322
    MapWord first_word = first->synchronized_map_word();
323
    if (first_word.IsForwardingAddress()) {
324
      HeapObject target = first_word.ToForwardingAddress();
325

326
      HeapObjectReference::Update(slot, target);
327
      object->map_slot().Release_Store(
328
          MapWord::FromForwardingAddress(target).ToMap());
329
      return Heap::InYoungGeneration(target) ? KEEP_SLOT : REMOVE_SLOT;
330
    }
331
    Map map = first_word.ToMap();
332
    SlotCallbackResult result =
333 334
        EvacuateObjectDefault(map, slot, first, first->SizeFromMap(map),
                              Map::ObjectFieldsFrom(map->visitor_id()));
335
    object->map_slot().Release_Store(
336
        MapWord::FromForwardingAddress(slot.ToHeapObject()).ToMap());
337
    return result;
338
  }
339 340 341 342
  DCHECK_EQ(ObjectFields::kMaybePointers,
            Map::ObjectFieldsFrom(map->visitor_id()));
  return EvacuateObjectDefault(map, slot, object, object_size,
                               ObjectFields::kMaybePointers);
343 344
}

345 346
template <typename THeapObjectSlot>
SlotCallbackResult Scavenger::EvacuateObject(THeapObjectSlot slot, Map map,
347
                                             HeapObject source) {
348 349 350
  static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
                    std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
                "Only FullHeapObjectSlot and HeapObjectSlot are expected here");
351
  SLOW_DCHECK(Heap::InFromPage(source));
352
  SLOW_DCHECK(!MapWord::FromMap(map).IsForwardingAddress());
353
  int size = source->SizeFromMap(map);
354 355
  // Cannot use ::cast() below because that would add checks in debug mode
  // that require re-reading the map.
356 357
  VisitorId visitor_id = map->visitor_id();
  switch (visitor_id) {
358
    case kVisitThinString:
359
      // At the moment we don't allow weak pointers to thin strings.
360
      DCHECK(!(*slot)->IsWeak());
361 362
      return EvacuateThinString(map, slot, ThinString::unchecked_cast(source),
                                size);
363
    case kVisitShortcutCandidate:
364
      DCHECK(!(*slot)->IsWeak());
365
      // At the moment we don't allow weak pointers to cons strings.
366
      return EvacuateShortcutCandidate(
367
          map, slot, ConsString::unchecked_cast(source), size);
368
    default:
369 370
      return EvacuateObjectDefault(map, slot, source, size,
                                   Map::ObjectFieldsFrom(visitor_id));
371 372 373
  }
}

374 375
template <typename THeapObjectSlot>
SlotCallbackResult Scavenger::ScavengeObject(THeapObjectSlot p,
376
                                             HeapObject object) {
377 378 379
  static_assert(std::is_same<THeapObjectSlot, FullHeapObjectSlot>::value ||
                    std::is_same<THeapObjectSlot, HeapObjectSlot>::value,
                "Only FullHeapObjectSlot and HeapObjectSlot are expected here");
380
  DCHECK(Heap::InFromPage(object));
381

382 383
  // Synchronized load that consumes the publishing CAS of MigrateObject.
  MapWord first_word = object->synchronized_map_word();
384 385 386 387

  // If the first word is a forwarding address, the object has already been
  // copied.
  if (first_word.IsForwardingAddress()) {
388
    HeapObject dest = first_word.ToForwardingAddress();
389
    HeapObjectReference::Update(p, dest);
390 391
    DCHECK_IMPLIES(Heap::InYoungGeneration(dest),
                   Heap::InToPage(dest) || Heap::IsLargeObject(dest));
392

393
    return Heap::InYoungGeneration(dest) ? KEEP_SLOT : REMOVE_SLOT;
394 395
  }

396
  Map map = first_word.ToMap();
397
  // AllocationMementos are unrooted and shouldn't survive a scavenge
398
  DCHECK_NE(ReadOnlyRoots(heap()).allocation_memento_map(), map);
399
  // Call the slow part of scavenge object.
400
  return EvacuateObject(p, map, object);
401 402
}

403 404 405 406 407 408 409
template <typename TSlot>
SlotCallbackResult Scavenger::CheckAndScavengeObject(Heap* heap, TSlot slot) {
  static_assert(
      std::is_same<TSlot, FullMaybeObjectSlot>::value ||
          std::is_same<TSlot, MaybeObjectSlot>::value,
      "Only FullMaybeObjectSlot and MaybeObjectSlot are expected here");
  using THeapObjectSlot = typename TSlot::THeapObjectSlot;
410
  MaybeObject object = *slot;
411
  if (Heap::InFromPage(object)) {
412
    HeapObject heap_object = object->GetHeapObject();
413

414
    SlotCallbackResult result =
415
        ScavengeObject(THeapObjectSlot(slot), heap_object);
416
    DCHECK_IMPLIES(result == REMOVE_SLOT,
417
                   !heap->InYoungGeneration((*slot)->GetHeapObject()));
418
    return result;
419
  } else if (Heap::InToPage(object)) {
420 421 422
    // Already updated slot. This can happen when processing of the work list
    // is interleaved with processing roots.
    return KEEP_SLOT;
423
  }
424 425
  // Slots can point to "to" space if the slot has been recorded multiple
  // times in the remembered set. We remove the redundant slot now.
426 427
  return REMOVE_SLOT;
}
428

429
void ScavengeVisitor::VisitPointers(HeapObject host, ObjectSlot start,
430
                                    ObjectSlot end) {
431 432 433
  return VisitPointersImpl(host, start, end);
}

434
void ScavengeVisitor::VisitPointers(HeapObject host, MaybeObjectSlot start,
435
                                    MaybeObjectSlot end) {
436
  return VisitPointersImpl(host, start, end);
437 438
}

439 440
void ScavengeVisitor::VisitCodeTarget(Code host, RelocInfo* rinfo) {
  Code target = Code::GetCodeFromTargetAddress(rinfo->target_address());
441 442 443
#ifdef DEBUG
  Code old_target = target;
#endif
444
  FullObjectSlot slot(&target);
445 446 447 448 449 450
  VisitHeapObjectImpl(slot, target);
  // Code objects are never in new-space, so the slot contents must not change.
  DCHECK_EQ(old_target, target);
}

void ScavengeVisitor::VisitEmbeddedPointer(Code host, RelocInfo* rinfo) {
451
  HeapObject heap_object = rinfo->target_object();
452
#ifdef DEBUG
453
  HeapObject old_heap_object = heap_object;
454 455 456 457 458 459 460 461 462
#endif
  FullObjectSlot slot(&heap_object);
  VisitHeapObjectImpl(slot, heap_object);
  // We don't embed new-space objects into code, so the slot contents must not
  // change.
  DCHECK_EQ(old_heap_object, heap_object);
}

template <typename TSlot>
463
void ScavengeVisitor::VisitHeapObjectImpl(TSlot slot, HeapObject heap_object) {
464
  if (Heap::InYoungGeneration(heap_object)) {
465 466
    scavenger_->ScavengeObject(HeapObjectSlot(slot), heap_object);
  }
467 468
}

469
template <typename TSlot>
470
void ScavengeVisitor::VisitPointersImpl(HeapObject host, TSlot start,
471 472
                                        TSlot end) {
  for (TSlot slot = start; slot < end; ++slot) {
473
    typename TSlot::TObject object = *slot;
474
    HeapObject heap_object;
475
    // Treat weak references as strong.
476 477
    if (object.GetHeapObject(&heap_object)) {
      VisitHeapObjectImpl(slot, heap_object);
478 479 480 481
    }
  }
}

482 483 484 485
}  // namespace internal
}  // namespace v8

#endif  // V8_HEAP_SCAVENGER_INL_H_