heap-inl.h 24.3 KB
Newer Older
1
// Copyright 2012 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4

5 6
#ifndef V8_HEAP_HEAP_INL_H_
#define V8_HEAP_HEAP_INL_H_
7

8 9
#include <cmath>

10
#include "src/base/platform/platform.h"
11
#include "src/counters.h"
12
#include "src/heap/heap.h"
13
#include "src/heap/incremental-marking-inl.h"
14
#include "src/heap/mark-compact.h"
15
#include "src/heap/remembered-set.h"
16
#include "src/heap/spaces-inl.h"
17
#include "src/heap/store-buffer.h"
18 19
#include "src/isolate.h"
#include "src/list-inl.h"
20
#include "src/log.h"
21
#include "src/msan.h"
22
#include "src/objects-inl.h"
23
#include "src/type-feedback-vector-inl.h"
24

25 26
namespace v8 {
namespace internal {
27

hpayer's avatar
hpayer committed
28 29
void PromotionQueue::insert(HeapObject* target, int32_t size,
                            bool was_marked_black) {
30
  if (emergency_stack_ != NULL) {
hpayer's avatar
hpayer committed
31
    emergency_stack_->Add(Entry(target, size, was_marked_black));
32 33 34
    return;
  }

35
  if ((rear_ - 1) < limit_) {
36
    RelocateQueueHead();
hpayer's avatar
hpayer committed
37
    emergency_stack_->Add(Entry(target, size, was_marked_black));
38
    return;
39
  }
40

41 42 43
  struct Entry* entry = reinterpret_cast<struct Entry*>(--rear_);
  entry->obj_ = target;
  entry->size_ = size;
hpayer's avatar
hpayer committed
44
  entry->was_marked_black_ = was_marked_black;
45

46
// Assert no overflow into live objects.
47
#ifdef DEBUG
48
  SemiSpace::AssertValidRange(target->GetIsolate()->heap()->new_space()->top(),
49 50
                              reinterpret_cast<Address>(rear_));
#endif
51 52 53
}


54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73
#define ROOT_ACCESSOR(type, name, camel_name) \
  type* Heap::name() { return type::cast(roots_[k##camel_name##RootIndex]); }
ROOT_LIST(ROOT_ACCESSOR)
#undef ROOT_ACCESSOR

#define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
  Map* Heap::name##_map() { return Map::cast(roots_[k##Name##MapRootIndex]); }
STRUCT_LIST(STRUCT_MAP_ACCESSOR)
#undef STRUCT_MAP_ACCESSOR

#define STRING_ACCESSOR(name, str) \
  String* Heap::name() { return String::cast(roots_[k##name##RootIndex]); }
INTERNALIZED_STRING_LIST(STRING_ACCESSOR)
#undef STRING_ACCESSOR

#define SYMBOL_ACCESSOR(name) \
  Symbol* Heap::name() { return Symbol::cast(roots_[k##name##RootIndex]); }
PRIVATE_SYMBOL_LIST(SYMBOL_ACCESSOR)
#undef SYMBOL_ACCESSOR

74
#define SYMBOL_ACCESSOR(name, description) \
75 76
  Symbol* Heap::name() { return Symbol::cast(roots_[k##name##RootIndex]); }
PUBLIC_SYMBOL_LIST(SYMBOL_ACCESSOR)
77
WELL_KNOWN_SYMBOL_LIST(SYMBOL_ACCESSOR)
78 79 80 81 82 83 84 85 86 87 88 89 90 91 92
#undef SYMBOL_ACCESSOR

#define ROOT_ACCESSOR(type, name, camel_name)                                 \
  void Heap::set_##name(type* value) {                                        \
    /* The deserializer makes use of the fact that these common roots are */  \
    /* never in new space and never on a page that is being compacted.    */  \
    DCHECK(!deserialization_complete() ||                                     \
           RootCanBeWrittenAfterInitialization(k##camel_name##RootIndex));    \
    DCHECK(k##camel_name##RootIndex >= kOldSpaceRoots || !InNewSpace(value)); \
    roots_[k##camel_name##RootIndex] = value;                                 \
  }
ROOT_LIST(ROOT_ACCESSOR)
#undef ROOT_ACCESSOR


93
template <>
94 95 96 97 98 99
bool inline Heap::IsOneByte(Vector<const char> str, int chars) {
  // TODO(dcarney): incorporate Latin-1 check when Latin-1 is supported?
  return chars == str.length();
}


100
template <>
101 102 103 104 105
bool inline Heap::IsOneByte(String* str, int chars) {
  return str->IsOneByteRepresentation();
}


106
AllocationResult Heap::AllocateInternalizedStringFromUtf8(
107
    Vector<const char> str, int chars, uint32_t hash_field) {
108
  if (IsOneByte(str, chars)) {
109 110
    return AllocateOneByteInternalizedString(Vector<const uint8_t>::cast(str),
                                             hash_field);
111
  }
112
  return AllocateInternalizedStringImpl<false>(str, chars, hash_field);
113 114 115
}


116 117 118
template <typename T>
AllocationResult Heap::AllocateInternalizedStringImpl(T t, int chars,
                                                      uint32_t hash_field) {
119
  if (IsOneByte(t, chars)) {
120
    return AllocateInternalizedStringImpl<true>(t, chars, hash_field);
121
  }
122
  return AllocateInternalizedStringImpl<false>(t, chars, hash_field);
123 124 125
}


126
AllocationResult Heap::AllocateOneByteInternalizedString(
127
    Vector<const uint8_t> str, uint32_t hash_field) {
128
  CHECK_GE(String::kMaxLength, str.length());
129
  // Compute map and object size.
130
  Map* map = one_byte_internalized_string_map();
131
  int size = SeqOneByteString::SizeFor(str.length());
132 133

  // Allocate string.
134
  HeapObject* result = nullptr;
135
  {
136
    AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
137
    if (!allocation.To(&result)) return allocation;
138 139
  }

140
  // String maps are all immortal immovable objects.
141
  result->set_map_no_write_barrier(map);
142 143 144 145 146
  // Set length and hash fields of the allocated string.
  String* answer = String::cast(result);
  answer->set_length(str.length());
  answer->set_hash_field(hash_field);

147
  DCHECK_EQ(size, answer->Size());
148 149

  // Fill in the characters.
150 151
  MemCopy(answer->address() + SeqOneByteString::kHeaderSize, str.start(),
          str.length());
152 153 154 155 156

  return answer;
}


157 158
AllocationResult Heap::AllocateTwoByteInternalizedString(Vector<const uc16> str,
                                                         uint32_t hash_field) {
159
  CHECK_GE(String::kMaxLength, str.length());
160
  // Compute map and object size.
161
  Map* map = internalized_string_map();
162 163 164
  int size = SeqTwoByteString::SizeFor(str.length());

  // Allocate string.
165
  HeapObject* result = nullptr;
166
  {
167
    AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
168
    if (!allocation.To(&result)) return allocation;
169 170
  }

171
  result->set_map(map);
172 173 174 175 176
  // Set length and hash fields of the allocated string.
  String* answer = String::cast(result);
  answer->set_length(str.length());
  answer->set_hash_field(hash_field);

177
  DCHECK_EQ(size, answer->Size());
178 179

  // Fill in the characters.
180 181
  MemCopy(answer->address() + SeqTwoByteString::kHeaderSize, str.start(),
          str.length() * kUC16Size);
182 183 184 185

  return answer;
}

186
AllocationResult Heap::CopyFixedArray(FixedArray* src) {
187
  if (src->length() == 0) return src;
188 189 190 191
  return CopyFixedArrayWithMap(src, src->map());
}


192
AllocationResult Heap::CopyFixedDoubleArray(FixedDoubleArray* src) {
193
  if (src->length() == 0) return src;
194 195 196 197
  return CopyFixedDoubleArrayWithMap(src, src->map());
}


198
AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
199
                                   AllocationAlignment alignment) {
200 201 202
  DCHECK(AllowHandleAllocation::IsAllowed());
  DCHECK(AllowHeapAllocation::IsAllowed());
  DCHECK(gc_state_ == NOT_IN_GC);
203
#ifdef DEBUG
204
  if (FLAG_gc_interval >= 0 && !always_allocate() &&
205
      Heap::allocation_timeout_-- <= 0) {
206
    return AllocationResult::Retry(space);
207
  }
208 209
  isolate_->counters()->objs_since_last_full()->Increment();
  isolate_->counters()->objs_since_last_young()->Increment();
210
#endif
211

212
  bool large_object = size_in_bytes > Page::kMaxRegularHeapObjectSize;
213
  HeapObject* object = nullptr;
214
  AllocationResult allocation;
215
  if (NEW_SPACE == space) {
216 217 218
    if (large_object) {
      space = LO_SPACE;
    } else {
219
      allocation = new_space_.AllocateRaw(size_in_bytes, alignment);
220 221
      if (allocation.To(&object)) {
        OnAllocationEvent(object, size_in_bytes);
222
      }
223
      return allocation;
224
    }
225 226
  }

227
  // Here we only allocate in the old generation.
228
  if (OLD_SPACE == space) {
229 230 231 232 233
    if (large_object) {
      allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
    } else {
      allocation = old_space_->AllocateRaw(size_in_bytes, alignment);
    }
234
  } else if (CODE_SPACE == space) {
235
    if (size_in_bytes <= code_space()->AreaSize()) {
236
      allocation = code_space_->AllocateRawUnaligned(size_in_bytes);
237 238 239
    } else {
      allocation = lo_space_->AllocateRaw(size_in_bytes, EXECUTABLE);
    }
240
  } else if (LO_SPACE == space) {
241
    DCHECK(large_object);
242
    allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
243
  } else if (MAP_SPACE == space) {
244
    allocation = map_space_->AllocateRawUnaligned(size_in_bytes);
245 246 247
  } else {
    // NEW_SPACE is not allowed here.
    UNREACHABLE();
248
  }
249 250 251 252
  if (allocation.To(&object)) {
    OnAllocationEvent(object, size_in_bytes);
  } else {
    old_gen_exhausted_ = true;
253
  }
254

255
  return allocation;
256 257 258
}


259 260 261 262 263 264
void Heap::OnAllocationEvent(HeapObject* object, int size_in_bytes) {
  HeapProfiler* profiler = isolate_->heap_profiler();
  if (profiler->is_tracking_allocations()) {
    profiler->AllocationEvent(object->address(), size_in_bytes);
  }

265
  if (FLAG_verify_predictable) {
266 267 268 269
    ++allocations_count_;
    // Advance synthetic time by making a time request.
    MonotonicallyIncreasingTimeInMs();

270 271 272
    UpdateAllocationsHash(object);
    UpdateAllocationsHash(size_in_bytes);

273
    if (allocations_count_ % FLAG_dump_allocations_digest_at_alloc == 0) {
274 275 276
      PrintAlloctionsHash();
    }
  }
277 278

  if (FLAG_trace_allocation_stack_interval > 0) {
279
    if (!FLAG_verify_predictable) ++allocations_count_;
280 281 282 283
    if (allocations_count_ % FLAG_trace_allocation_stack_interval == 0) {
      isolate()->PrintStack(stdout, Isolate::kPrintStackConcise);
    }
  }
284 285 286
}


287
void Heap::OnMoveEvent(HeapObject* target, HeapObject* source,
288 289 290 291 292 293
                       int size_in_bytes) {
  HeapProfiler* heap_profiler = isolate_->heap_profiler();
  if (heap_profiler->is_tracking_object_moves()) {
    heap_profiler->ObjectMoveEvent(source->address(), target->address(),
                                   size_in_bytes);
  }
294 295 296
  if (target->IsSharedFunctionInfo()) {
    LOG_CODE_EVENT(isolate_, SharedFunctionInfoMoveEvent(source->address(),
                                                         target->address()));
297 298 299 300
  }

  if (FLAG_verify_predictable) {
    ++allocations_count_;
301 302
    // Advance synthetic time by making a time request.
    MonotonicallyIncreasingTimeInMs();
303 304 305 306 307

    UpdateAllocationsHash(source);
    UpdateAllocationsHash(target);
    UpdateAllocationsHash(size_in_bytes);

308
    if (allocations_count_ % FLAG_dump_allocations_digest_at_alloc == 0) {
309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338
      PrintAlloctionsHash();
    }
  }
}


void Heap::UpdateAllocationsHash(HeapObject* object) {
  Address object_address = object->address();
  MemoryChunk* memory_chunk = MemoryChunk::FromAddress(object_address);
  AllocationSpace allocation_space = memory_chunk->owner()->identity();

  STATIC_ASSERT(kSpaceTagSize + kPageSizeBits <= 32);
  uint32_t value =
      static_cast<uint32_t>(object_address - memory_chunk->address()) |
      (static_cast<uint32_t>(allocation_space) << kPageSizeBits);

  UpdateAllocationsHash(value);
}


void Heap::UpdateAllocationsHash(uint32_t value) {
  uint16_t c1 = static_cast<uint16_t>(value);
  uint16_t c2 = static_cast<uint16_t>(value >> 16);
  raw_allocations_hash_ =
      StringHasher::AddCharacterCore(raw_allocations_hash_, c1);
  raw_allocations_hash_ =
      StringHasher::AddCharacterCore(raw_allocations_hash_, c2);
}


339 340 341 342 343
void Heap::RegisterExternalString(String* string) {
  external_string_table_.AddString(string);
}


344
void Heap::FinalizeExternalString(String* string) {
345
  DCHECK(string->IsExternalString());
346 347
  v8::String::ExternalStringResourceBase** resource_addr =
      reinterpret_cast<v8::String::ExternalStringResourceBase**>(
348
          reinterpret_cast<byte*>(string) + ExternalString::kResourceOffset -
349
          kHeapObjectTag);
350

351 352 353
  // Dispose of the C++ object if it has not already been disposed.
  if (*resource_addr != NULL) {
    (*resource_addr)->Dispose();
354
    *resource_addr = NULL;
355
  }
356 357 358
}


359
bool Heap::InNewSpace(Object* object) {
360
  bool result = new_space_.Contains(object);
361 362 363
  DCHECK(!result ||                 // Either not in new space
         gc_state_ != NOT_IN_GC ||  // ... or in the middle of GC
         InToSpace(object));        // ... or in to-space (where we allocate).
364
  return result;
365 366 367
}

bool Heap::InFromSpace(Object* object) {
368
  return new_space_.FromSpaceContains(object);
369 370 371 372
}


bool Heap::InToSpace(Object* object) {
373
  return new_space_.ToSpaceContains(object);
374 375
}

376
bool Heap::InOldSpace(Object* object) { return old_space_->Contains(object); }
377

378 379
bool Heap::InNewSpaceSlow(Address address) {
  return new_space_.ContainsSlow(address);
380 381
}

382 383 384
bool Heap::InOldSpaceSlow(Address address) {
  return old_space_->ContainsSlow(address);
}
385

386 387 388 389 390
bool Heap::OldGenerationAllocationLimitReached() {
  if (!incremental_marking()->IsStopped()) return false;
  return OldGenerationSpaceAvailable() < 0;
}

391
template <PromotionMode promotion_mode>
392
bool Heap::ShouldBePromoted(Address old_address, int object_size) {
393
  Page* page = Page::FromAddress(old_address);
394
  Address age_mark = new_space_.age_mark();
395 396

  if (promotion_mode == PROMOTE_MARKED) {
397
    MarkBit mark_bit = ObjectMarking::MarkBitFrom(old_address);
398 399 400 401 402
    if (!Marking::IsWhite(mark_bit)) {
      return true;
    }
  }

403
  return page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) &&
404
         (!page->ContainsLimit(age_mark) || old_address < age_mark);
405 406
}

407 408 409 410 411 412 413 414
PromotionMode Heap::CurrentPromotionMode() {
  if (incremental_marking()->IsMarking()) {
    return PROMOTE_MARKED;
  } else {
    return DEFAULT_PROMOTION;
  }
}

415 416 417
void Heap::RecordWrite(Object* object, int offset, Object* o) {
  if (!InNewSpace(o) || !object->IsHeapObject() || InNewSpace(object)) {
    return;
418
  }
419 420 421 422
  RememberedSet<OLD_TO_NEW>::Insert(
      Page::FromAddress(reinterpret_cast<Address>(object)),
      HeapObject::cast(object)->address() + offset);
}
423 424 425 426 427 428

void Heap::RecordWriteIntoCode(Code* host, RelocInfo* rinfo, Object* value) {
  if (InNewSpace(value)) {
    RecordWriteIntoCodeSlow(host, rinfo, value);
  }
}
429 430 431 432 433 434 435 436 437 438

void Heap::RecordFixedArrayElements(FixedArray* array, int offset, int length) {
  if (InNewSpace(array)) return;
  Page* page = Page::FromAddress(reinterpret_cast<Address>(array));
  for (int i = 0; i < length; i++) {
    if (!InNewSpace(array->get(offset + i))) continue;
    RememberedSet<OLD_TO_NEW>::Insert(
        page,
        reinterpret_cast<Address>(array->RawFieldOfElementAt(offset + i)));
  }
439 440 441
}


442
bool Heap::AllowedToBeMigrated(HeapObject* obj, AllocationSpace dst) {
443 444
  // Object migration is governed by the following rules:
  //
445
  // 1) Objects in new-space can be migrated to the old space
446 447 448
  //    that matches their target space or they stay in new-space.
  // 2) Objects in old-space stay in the same space when migrating.
  // 3) Fillers (two or more words) can migrate due to left-trimming of
449
  //    fixed arrays in new-space or old space.
450 451 452 453 454
  // 4) Fillers (one word) can never migrate, they are skipped by
  //    incremental marking explicitly to prevent invalid pattern.
  //
  // Since this function is used for debugging only, we do not place
  // asserts here, but check everything explicitly.
455 456 457
  if (obj->map() == one_pointer_filler_map()) return false;
  InstanceType type = obj->map()->instance_type();
  MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
458 459 460
  AllocationSpace src = chunk->owner()->identity();
  switch (src) {
    case NEW_SPACE:
461 462 463 464
      return dst == src || dst == OLD_SPACE;
    case OLD_SPACE:
      return dst == src &&
             (dst == OLD_SPACE || obj->IsFiller() || obj->IsExternalString());
465 466 467 468 469 470 471 472 473 474
    case CODE_SPACE:
      return dst == src && type == CODE_TYPE;
    case MAP_SPACE:
    case LO_SPACE:
      return false;
  }
  UNREACHABLE();
  return false;
}

475
void Heap::CopyBlock(Address dst, Address src, int byte_size) {
476
  CopyWords(reinterpret_cast<Object**>(dst), reinterpret_cast<Object**>(src),
477
            static_cast<size_t>(byte_size / kPointerSize));
478 479
}

480
template <Heap::FindMementoMode mode>
481
AllocationMemento* Heap::FindAllocationMemento(HeapObject* object) {
482
  // Check if there is potentially a memento behind the object. If
483
  // the last word of the memento is on another page we return
484
  // immediately.
485 486 487
  Address object_address = object->address();
  Address memento_address = object_address + object->Size();
  Address last_memento_word_address = memento_address + kPointerSize;
488
  if (!Page::OnSamePage(object_address, last_memento_word_address)) {
489
    return nullptr;
490 491
  }
  HeapObject* candidate = HeapObject::FromAddress(memento_address);
492 493 494 495 496
  Map* candidate_map = candidate->map();
  // This fast check may peek at an uninitialized word. However, the slow check
  // below (memento_address == top) ensures that this is safe. Mark the word as
  // initialized to silence MemorySanitizer warnings.
  MSAN_MEMORY_IS_INITIALIZED(&candidate_map, sizeof(candidate_map));
497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515
  if (candidate_map != allocation_memento_map()) {
    return nullptr;
  }
  AllocationMemento* memento_candidate = AllocationMemento::cast(candidate);

  // Depending on what the memento is used for, we might need to perform
  // additional checks.
  Address top;
  switch (mode) {
    case Heap::kForGC:
      return memento_candidate;
    case Heap::kForRuntime:
      if (memento_candidate == nullptr) return nullptr;
      // Either the object is the last object in the new space, or there is
      // another object of at least word size (the header map word) following
      // it, so suffices to compare ptr and top here.
      top = NewSpaceTop();
      DCHECK(memento_address == top ||
             memento_address + HeapObject::kHeaderSize <= top ||
516
             !Page::OnSamePage(memento_address, top - 1));
517 518 519 520 521 522 523 524 525
      if ((memento_address != top) && memento_candidate->IsValid()) {
        return memento_candidate;
      }
      return nullptr;
    default:
      UNREACHABLE();
  }
  UNREACHABLE();
  return nullptr;
526 527
}

528
template <Heap::UpdateAllocationSiteMode mode>
529
void Heap::UpdateAllocationSite(HeapObject* object,
lpy's avatar
lpy committed
530
                                base::HashMap* pretenuring_feedback) {
531
  DCHECK(InFromSpace(object));
532
  if (!FLAG_allocation_site_pretenuring ||
533 534
      !AllocationSite::CanTrack(object->map()->instance_type()))
    return;
535 536
  AllocationMemento* memento_candidate = FindAllocationMemento<kForGC>(object);
  if (memento_candidate == nullptr) return;
537

538 539
  if (mode == kGlobal) {
    DCHECK_EQ(pretenuring_feedback, global_pretenuring_feedback_);
540 541 542 543 544
    // Entering global pretenuring feedback is only used in the scavenger, where
    // we are allowed to actually touch the allocation site.
    if (!memento_candidate->IsValid()) return;
    AllocationSite* site = memento_candidate->GetAllocationSite();
    DCHECK(!site->IsZombie());
545 546
    // For inserting in the global pretenuring storage we need to first
    // increment the memento found count on the allocation site.
547 548 549
    if (site->IncrementMementoFoundCount()) {
      global_pretenuring_feedback_->LookupOrInsert(site,
                                                   ObjectHash(site->address()));
550 551
    }
  } else {
552 553
    DCHECK_EQ(mode, kCached);
    DCHECK_NE(pretenuring_feedback, global_pretenuring_feedback_);
554 555 556 557
    // Entering cached feedback is used in the parallel case. We are not allowed
    // to dereference the allocation site and rather have to postpone all checks
    // till actually merging the data.
    Address key = memento_candidate->GetAllocationSiteUnchecked();
lpy's avatar
lpy committed
558
    base::HashMap::Entry* e =
559
        pretenuring_feedback->LookupOrInsert(key, ObjectHash(key));
560 561 562 563
    DCHECK(e != nullptr);
    (*bit_cast<intptr_t*>(&e->value))++;
  }
}
564

565

566 567 568
void Heap::RemoveAllocationSitePretenuringFeedback(AllocationSite* site) {
  global_pretenuring_feedback_->Remove(
      site, static_cast<uint32_t>(bit_cast<uintptr_t>(site)));
569 570 571
}


572 573 574 575 576 577 578 579
bool Heap::CollectGarbage(AllocationSpace space, const char* gc_reason,
                          const v8::GCCallbackFlags callbackFlags) {
  const char* collector_reason = NULL;
  GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
  return CollectGarbage(collector, gc_reason, collector_reason, callbackFlags);
}


580
Isolate* Heap::isolate() {
581 582
  return reinterpret_cast<Isolate*>(
      reinterpret_cast<intptr_t>(this) -
583
      reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(16)->heap()) + 16);
584 585
}

586

587
void Heap::ExternalStringTable::AddString(String* string) {
588
  DCHECK(string->IsExternalString());
589
  if (heap_->InNewSpace(string)) {
590 591 592 593 594 595 596
    new_space_strings_.Add(string);
  } else {
    old_space_strings_.Add(string);
  }
}


597
void Heap::ExternalStringTable::Iterate(ObjectVisitor* v) {
598 599 600 601 602 603 604 605 606 607 608 609 610
  if (!new_space_strings_.is_empty()) {
    Object** start = &new_space_strings_[0];
    v->VisitPointers(start, start + new_space_strings_.length());
  }
  if (!old_space_strings_.is_empty()) {
    Object** start = &old_space_strings_[0];
    v->VisitPointers(start, start + old_space_strings_.length());
  }
}


// Verify() is inline to avoid ifdef-s around its calls in release
// mode.
611
void Heap::ExternalStringTable::Verify() {
612 613
#ifdef DEBUG
  for (int i = 0; i < new_space_strings_.length(); ++i) {
614
    Object* obj = Object::cast(new_space_strings_[i]);
615
    DCHECK(heap_->InNewSpace(obj));
616
    DCHECK(!obj->IsTheHole(heap_->isolate()));
617 618
  }
  for (int i = 0; i < old_space_strings_.length(); ++i) {
619
    Object* obj = Object::cast(old_space_strings_[i]);
620
    DCHECK(!heap_->InNewSpace(obj));
621
    DCHECK(!obj->IsTheHole(heap_->isolate()));
622 623 624 625 626
  }
#endif
}


627
void Heap::ExternalStringTable::AddOldString(String* string) {
628 629
  DCHECK(string->IsExternalString());
  DCHECK(!heap_->InNewSpace(string));
630 631 632 633
  old_space_strings_.Add(string);
}


634
void Heap::ExternalStringTable::ShrinkNewStrings(int position) {
635
  new_space_strings_.Rewind(position);
636
#ifdef VERIFY_HEAP
637 638 639
  if (FLAG_verify_heap) {
    Verify();
  }
640
#endif
641 642
}

643 644 645 646 647 648 649 650 651 652
// static
int DescriptorLookupCache::Hash(Object* source, Name* name) {
  DCHECK(name->IsUniqueName());
  // Uses only lower 32 bits if pointers are larger.
  uint32_t source_hash =
      static_cast<uint32_t>(reinterpret_cast<uintptr_t>(source)) >>
      kPointerSizeLog2;
  uint32_t name_hash = name->hash_field();
  return (source_hash ^ name_hash) % kLength;
}
653

654 655 656 657 658 659 660 661 662 663
int DescriptorLookupCache::Lookup(Map* source, Name* name) {
  int index = Hash(source, name);
  Key& key = keys_[index];
  if ((key.source == source) && (key.name == name)) return results_[index];
  return kAbsent;
}


void DescriptorLookupCache::Update(Map* source, Name* name, int result) {
  DCHECK(result != kAbsent);
664 665 666 667 668
  int index = Hash(source, name);
  Key& key = keys_[index];
  key.source = source;
  key.name = name;
  results_[index] = result;
669 670 671
}


672
void Heap::ClearInstanceofCache() {
673
  set_instanceof_cache_function(Smi::FromInt(0));
674 675
}

676
Oddball* Heap::ToBoolean(bool condition) {
677 678 679 680 681
  return condition ? true_value() : false_value();
}


void Heap::CompletelyClearInstanceofCache() {
682 683
  set_instanceof_cache_map(Smi::FromInt(0));
  set_instanceof_cache_function(Smi::FromInt(0));
684 685 686
}


687 688 689 690 691 692 693
uint32_t Heap::HashSeed() {
  uint32_t seed = static_cast<uint32_t>(hash_seed()->value());
  DCHECK(FLAG_randomize_hashes || seed == 0);
  return seed;
}


694 695 696 697 698 699 700 701 702
int Heap::NextScriptId() {
  int last_id = last_script_id()->value();
  if (last_id == Smi::kMaxValue) {
    last_id = 1;
  } else {
    last_id++;
  }
  set_last_script_id(Smi::FromInt(last_id));
  return last_id;
703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724
}

void Heap::SetArgumentsAdaptorDeoptPCOffset(int pc_offset) {
  DCHECK(arguments_adaptor_deopt_pc_offset() == Smi::FromInt(0));
  set_arguments_adaptor_deopt_pc_offset(Smi::FromInt(pc_offset));
}

void Heap::SetConstructStubDeoptPCOffset(int pc_offset) {
  DCHECK(construct_stub_deopt_pc_offset() == Smi::FromInt(0));
  set_construct_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
}

void Heap::SetGetterStubDeoptPCOffset(int pc_offset) {
  DCHECK(getter_stub_deopt_pc_offset() == Smi::FromInt(0));
  set_getter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
}

void Heap::SetSetterStubDeoptPCOffset(int pc_offset) {
  DCHECK(setter_stub_deopt_pc_offset() == Smi::FromInt(0));
  set_setter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
}

725 726 727 728
void Heap::SetInterpreterEntryReturnPCOffset(int pc_offset) {
  DCHECK(interpreter_entry_return_pc_offset() == Smi::FromInt(0));
  set_interpreter_entry_return_pc_offset(Smi::FromInt(pc_offset));
}
729

730 731 732 733 734 735
int Heap::GetNextTemplateSerialNumber() {
  int next_serial_number = next_template_serial_number()->value() + 1;
  set_next_template_serial_number(Smi::FromInt(next_serial_number));
  return next_serial_number;
}

736 737 738 739 740
void Heap::SetSerializedTemplates(FixedArray* templates) {
  DCHECK_EQ(empty_fixed_array(), serialized_templates());
  set_serialized_templates(templates);
}

741
AlwaysAllocateScope::AlwaysAllocateScope(Isolate* isolate)
742
    : heap_(isolate->heap()) {
743
  heap_->always_allocate_scope_count_.Increment(1);
744 745 746
}


747
AlwaysAllocateScope::~AlwaysAllocateScope() {
748
  heap_->always_allocate_scope_count_.Increment(-1);
749 750 751 752 753 754 755
}


void VerifyPointersVisitor::VisitPointers(Object** start, Object** end) {
  for (Object** current = start; current < end; current++) {
    if ((*current)->IsHeapObject()) {
      HeapObject* object = HeapObject::cast(*current);
756
      CHECK(object->GetIsolate()->heap()->Contains(object));
757
      CHECK(object->map()->IsMap());
758 759 760 761 762
    }
  }
}


763 764
void VerifySmisVisitor::VisitPointers(Object** start, Object** end) {
  for (Object** current = start; current < end; current++) {
765
    CHECK((*current)->IsSmi());
766 767
  }
}
768 769
}  // namespace internal
}  // namespace v8
770

771
#endif  // V8_HEAP_HEAP_INL_H_